Skip to content

Commit

Permalink
Fix typos in encoder variable names (#72)
Browse files Browse the repository at this point in the history
  • Loading branch information
mattdangerw authored Mar 28, 2022
1 parent 44933f7 commit a266926
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
8 changes: 4 additions & 4 deletions keras_nlp/layers/transformer_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def _build(self, input_shape):
self._enc_dec_attention_layernorm = keras.layers.LayerNormalization()
self._feedforward_layernorm = keras.layers.LayerNormalization()

self._self_attentiondropout = keras.layers.Dropout(rate=self.dropout)
self._self_attention_dropout = keras.layers.Dropout(rate=self.dropout)
self._enc_dec_attentiondropout = keras.layers.Dropout(
rate=self.dropout,
)
Expand All @@ -140,15 +140,15 @@ def _build(self, input_shape):
kernel_initializer=self.kernel_initializer,
bias_initializer=self.bias_initializer,
)
self._outputdropout = keras.layers.Dropout(rate=self.dropout)
self._output_dropout = keras.layers.Dropout(rate=self.dropout)

def _add_and_norm(self, input1, input2, norm_layer):
return norm_layer(input1 + input2)

def _feed_forward(self, input):
x = self._intermediate_dense(input)
x = self._output_dense(x)
return self._outputdropout(x)
return self._output_dropout(x)

def call(
self,
Expand Down Expand Up @@ -206,7 +206,7 @@ def call(
decoder_sequence,
attention_mask=decoder_mask,
)
self_attended = self._self_attentiondropout(self_attended)
self_attended = self._self_attention_dropout(self_attended)
self_attended = self._add_and_norm(
self_attended, decoder_sequence, self._decoder_attention_layernorm
)
Expand Down
8 changes: 4 additions & 4 deletions keras_nlp/layers/transformer_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def _build(self, input_shape):
self._attention_layernorm = keras.layers.LayerNormalization()
self._feedforward_layernorm = keras.layers.LayerNormalization()

self._attentiondropout = keras.layers.Dropout(rate=self.dropout)
self._attention_dropout = keras.layers.Dropout(rate=self.dropout)

self._intermediate_dense = keras.layers.Dense(
self.intermediate_dim,
Expand All @@ -120,15 +120,15 @@ def _build(self, input_shape):
kernel_initializer=self.kernel_initializer,
bias_initializer=self.bias_initializer,
)
self._outputdropout = keras.layers.Dropout(rate=self.dropout)
self._output_dropout = keras.layers.Dropout(rate=self.dropout)

def _add_and_norm(self, input1, input2, norm_layer):
return norm_layer(input1 + input2)

def _feed_forward(self, input):
x = self._intermediate_dense(input)
x = self._output_dense(x)
return self._outputdropout(x)
return self._output_dropout(x)

def call(self, inputs, padding_mask=None, attention_mask=None):
"""Forward pass of the TransformerEncoder.
Expand Down Expand Up @@ -161,7 +161,7 @@ def call(self, inputs, padding_mask=None, attention_mask=None):
attended = self._multi_head_attention_layer(
inputs, inputs, inputs, attention_mask=mask
)
attended = self._attentiondropout(attended)
attended = self._attention_dropout(attended)
attended = self._add_and_norm(
inputs,
attended,
Expand Down

0 comments on commit a266926

Please sign in to comment.