We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1e5324f commit 732b445Copy full SHA for 732b445
onmt/utils/optimizers.py
@@ -331,7 +331,7 @@ def step(self):
331
if hasattr(self._optimizer, "clip_master_grads") and \
332
self._max_grad_norm > 0:
333
import apex
334
- torch.nn.utils.glip_grad_norm_(
+ torch.nn.utils.clip_grad_norm_(
335
apex.amp.master_params(self), self._max_grad_norm)
336
for group in self._optimizer.param_groups:
337
group['lr'] = learning_rate
0 commit comments