We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f28e6b3 commit 122cf27Copy full SHA for 122cf27
src/lightning/pytorch/demos/transformer.py
@@ -54,7 +54,7 @@ def __init__(
54
55
self.ninp = ninp
56
self.vocab_size = vocab_size
57
- self.src_mask = None
+ self.src_mask: Optional[Tensor] = None
58
59
def generate_square_subsequent_mask(self, size: int) -> Tensor:
60
"""Generate a square mask for the sequence to prevent future tokens from being seen."""
0 commit comments