Skip to content

Commit a266926

Browse files
authored
Fix typos in encoder variable names (#72)
1 parent 44933f7 commit a266926

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

keras_nlp/layers/transformer_decoder.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def _build(self, input_shape):
121121
self._enc_dec_attention_layernorm = keras.layers.LayerNormalization()
122122
self._feedforward_layernorm = keras.layers.LayerNormalization()
123123

124-
self._self_attentiondropout = keras.layers.Dropout(rate=self.dropout)
124+
self._self_attention_dropout = keras.layers.Dropout(rate=self.dropout)
125125
self._enc_dec_attentiondropout = keras.layers.Dropout(
126126
rate=self.dropout,
127127
)
@@ -140,15 +140,15 @@ def _build(self, input_shape):
140140
kernel_initializer=self.kernel_initializer,
141141
bias_initializer=self.bias_initializer,
142142
)
143-
self._outputdropout = keras.layers.Dropout(rate=self.dropout)
143+
self._output_dropout = keras.layers.Dropout(rate=self.dropout)
144144

145145
def _add_and_norm(self, input1, input2, norm_layer):
146146
return norm_layer(input1 + input2)
147147

148148
def _feed_forward(self, input):
149149
x = self._intermediate_dense(input)
150150
x = self._output_dense(x)
151-
return self._outputdropout(x)
151+
return self._output_dropout(x)
152152

153153
def call(
154154
self,
@@ -206,7 +206,7 @@ def call(
206206
decoder_sequence,
207207
attention_mask=decoder_mask,
208208
)
209-
self_attended = self._self_attentiondropout(self_attended)
209+
self_attended = self._self_attention_dropout(self_attended)
210210
self_attended = self._add_and_norm(
211211
self_attended, decoder_sequence, self._decoder_attention_layernorm
212212
)

keras_nlp/layers/transformer_encoder.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def _build(self, input_shape):
107107
self._attention_layernorm = keras.layers.LayerNormalization()
108108
self._feedforward_layernorm = keras.layers.LayerNormalization()
109109

110-
self._attentiondropout = keras.layers.Dropout(rate=self.dropout)
110+
self._attention_dropout = keras.layers.Dropout(rate=self.dropout)
111111

112112
self._intermediate_dense = keras.layers.Dense(
113113
self.intermediate_dim,
@@ -120,15 +120,15 @@ def _build(self, input_shape):
120120
kernel_initializer=self.kernel_initializer,
121121
bias_initializer=self.bias_initializer,
122122
)
123-
self._outputdropout = keras.layers.Dropout(rate=self.dropout)
123+
self._output_dropout = keras.layers.Dropout(rate=self.dropout)
124124

125125
def _add_and_norm(self, input1, input2, norm_layer):
126126
return norm_layer(input1 + input2)
127127

128128
def _feed_forward(self, input):
129129
x = self._intermediate_dense(input)
130130
x = self._output_dense(x)
131-
return self._outputdropout(x)
131+
return self._output_dropout(x)
132132

133133
def call(self, inputs, padding_mask=None, attention_mask=None):
134134
"""Forward pass of the TransformerEncoder.
@@ -161,7 +161,7 @@ def call(self, inputs, padding_mask=None, attention_mask=None):
161161
attended = self._multi_head_attention_layer(
162162
inputs, inputs, inputs, attention_mask=mask
163163
)
164-
attended = self._attentiondropout(attended)
164+
attended = self._attention_dropout(attended)
165165
attended = self._add_and_norm(
166166
inputs,
167167
attended,

0 commit comments

Comments
 (0)