-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathencoder.py
More file actions
33 lines (25 loc) · 1.17 KB
/
encoder.py
File metadata and controls
33 lines (25 loc) · 1.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import tensorflow as tf
RECURRENT_INITIALISER = 'glorot_uniform'
class Encoder(tf.keras.Model):
dimension = 256
def __init__(self, vocabulary, dimension, encdoding, batchSize):
super(Encoder, self).__init__()
self.encdoding = encdoding
# GRU - Grated Recurrent Unit is an RNN
self.gru = tf.keras.layers.GRU(encdoding,
return_sequences=True,
return_state=True,
recurrent_initializer=RECURRENT_INITIALISER)
# 'glorot_uniform' draws samples from a uniform distribution
self.batchSize = batchSize
# Defining the word embedding to be fed into the neural network (GRU)
self.embedding = tf.keras.layers.Embedding(vocabulary, 256)
def call(self, x, hiddenState):
x = self.embedding(x)
output, state = self.gru(x, initial_state=hiddenState)
return output, state
def hiddenStateInit(self):
return tf.zeros((self.batchSize, self.encdoding))
# Inspired and modified from TensorFlow example
# TensorFlow Addons Networks : Sequence-to-Sequence NMT with Attention Mechanism
# 2021