We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 060cc7b commit 834562bCopy full SHA for 834562b
fastvideo/v1/layers/layernorm.py
@@ -37,7 +37,6 @@ def __init__(
37
self.weight = torch.ones(hidden_size)
38
if self.has_weight:
39
self.weight = nn.Parameter(self.weight)
40
-
41
42
# if we do fully_shard(model.layer_norm), and we call layer_form.forward_native(input) instead of layer_norm(input),
43
# we need to call model.layer_norm.register_fsdp_forward_method(model, "forward_native") to make sure fsdp2 hooks are triggered
0 commit comments