We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2786381 commit de3e79cCopy full SHA for de3e79c
paddlenlp/transformers/skep/modeling.py
@@ -342,7 +342,7 @@ def forward(self,
342
"""
343
if attention_mask is None:
344
attention_mask = paddle.unsqueeze(
345
- (input_ids == self.pad_token_id
+ (input_ids.astype("int64") == self.pad_token_id
346
).astype(self.pooler.dense.weight.dtype) * -1e9,
347
axis=[1, 2])
348
embedding_output = self.embeddings(
0 commit comments