We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent bde9888 commit 26e5e6aCopy full SHA for 26e5e6a
torchrec/distributed/test_utils/test_sharding.py
@@ -764,7 +764,7 @@ def sharding_single_rank_test_single_process(
764
765
global_model_named_params_as_dict = dict(global_model.named_parameters())
766
local_model_named_params_as_dict = dict(local_model.named_parameters())
767
-
+ # Registers a hook to update parameters in the backward pass, when gradients are computed.
768
if apply_optimizer_in_backward_config is not None:
769
for apply_optim_name, (
770
optimizer_type,
0 commit comments