Skip to content

Commit

Permalink
Update AttentionLayer.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Samsomyajit committed Dec 28, 2019
1 parent 04718a3 commit 8622e22
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions s-atmech/AttentionLayer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,8 @@

class AttentionLayer(Layer):
"""
This class implements Bahdanau attention (https://arxiv.org/pdf/1409.0473.pdf).
There are three sets of weights introduced W_a, U_a, and V_a
"""
There are three sets of weights introduced W_a, U_a, and V_a
"""

def __init__(self, **kwargs):
super(AttentionLayer, self).__init__(**kwargs)
Expand Down Expand Up @@ -91,7 +90,7 @@ def context_step(inputs, states):
return c_i, [c_i]

def create_inital_state(inputs, hidden_size):
# We are not using initial states, but need to pass something to K.rnn funciton

fake_state = K.zeros_like(inputs) # <= (batch_size, enc_seq_len, latent_dim
fake_state = K.sum(fake_state, axis=[1, 2]) # <= (batch_size)
fake_state = K.expand_dims(fake_state) # <= (batch_size, 1)
Expand Down

0 comments on commit 8622e22

Please sign in to comment.