From a26692644c95cbd3280335f358b8cfae4f427de9 Mon Sep 17 00:00:00 2001 From: Matt Watson <1389937+mattdangerw@users.noreply.github.com> Date: Mon, 28 Mar 2022 14:30:57 -0700 Subject: [PATCH] Fix typos in encoder variable names (#72) --- keras_nlp/layers/transformer_decoder.py | 8 ++++---- keras_nlp/layers/transformer_encoder.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/keras_nlp/layers/transformer_decoder.py b/keras_nlp/layers/transformer_decoder.py index 65e02fbc76..2df066ac6a 100644 --- a/keras_nlp/layers/transformer_decoder.py +++ b/keras_nlp/layers/transformer_decoder.py @@ -121,7 +121,7 @@ def _build(self, input_shape): self._enc_dec_attention_layernorm = keras.layers.LayerNormalization() self._feedforward_layernorm = keras.layers.LayerNormalization() - self._self_attentiondropout = keras.layers.Dropout(rate=self.dropout) + self._self_attention_dropout = keras.layers.Dropout(rate=self.dropout) self._enc_dec_attentiondropout = keras.layers.Dropout( rate=self.dropout, ) @@ -140,7 +140,7 @@ def _build(self, input_shape): kernel_initializer=self.kernel_initializer, bias_initializer=self.bias_initializer, ) - self._outputdropout = keras.layers.Dropout(rate=self.dropout) + self._output_dropout = keras.layers.Dropout(rate=self.dropout) def _add_and_norm(self, input1, input2, norm_layer): return norm_layer(input1 + input2) @@ -148,7 +148,7 @@ def _add_and_norm(self, input1, input2, norm_layer): def _feed_forward(self, input): x = self._intermediate_dense(input) x = self._output_dense(x) - return self._outputdropout(x) + return self._output_dropout(x) def call( self, @@ -206,7 +206,7 @@ def call( decoder_sequence, attention_mask=decoder_mask, ) - self_attended = self._self_attentiondropout(self_attended) + self_attended = self._self_attention_dropout(self_attended) self_attended = self._add_and_norm( self_attended, decoder_sequence, self._decoder_attention_layernorm ) diff --git a/keras_nlp/layers/transformer_encoder.py b/keras_nlp/layers/transformer_encoder.py index 17366429bc..ef057aeb04 100644 --- a/keras_nlp/layers/transformer_encoder.py +++ b/keras_nlp/layers/transformer_encoder.py @@ -107,7 +107,7 @@ def _build(self, input_shape): self._attention_layernorm = keras.layers.LayerNormalization() self._feedforward_layernorm = keras.layers.LayerNormalization() - self._attentiondropout = keras.layers.Dropout(rate=self.dropout) + self._attention_dropout = keras.layers.Dropout(rate=self.dropout) self._intermediate_dense = keras.layers.Dense( self.intermediate_dim, @@ -120,7 +120,7 @@ def _build(self, input_shape): kernel_initializer=self.kernel_initializer, bias_initializer=self.bias_initializer, ) - self._outputdropout = keras.layers.Dropout(rate=self.dropout) + self._output_dropout = keras.layers.Dropout(rate=self.dropout) def _add_and_norm(self, input1, input2, norm_layer): return norm_layer(input1 + input2) @@ -128,7 +128,7 @@ def _add_and_norm(self, input1, input2, norm_layer): def _feed_forward(self, input): x = self._intermediate_dense(input) x = self._output_dense(x) - return self._outputdropout(x) + return self._output_dropout(x) def call(self, inputs, padding_mask=None, attention_mask=None): """Forward pass of the TransformerEncoder. @@ -161,7 +161,7 @@ def call(self, inputs, padding_mask=None, attention_mask=None): attended = self._multi_head_attention_layer( inputs, inputs, inputs, attention_mask=mask ) - attended = self._attentiondropout(attended) + attended = self._attention_dropout(attended) attended = self._add_and_norm( inputs, attended,