We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent efd714b commit ad0e1fdCopy full SHA for ad0e1fd
megatron/optimizer/__init__.py
@@ -113,7 +113,7 @@ def get_megatron_optimizer(model,
113
114
# Determine whether the params have main-grad field.
115
params_have_main_grad = False
116
- if args.DDP_impl == 'local':
+ if args.use_contiguous_buffers_in_local_ddp:
117
params_have_main_grad = True
118
119
# Mixed precision optimizer.
0 commit comments