Skip to content

Commit 4914da1

Browse files
authored
Merge pull request #16744 from velconia/locl_rel_1_4_imperative_fix_growing_dict
Fix auto growth bug of optimizer in dygraph mode
2 parents df339c0 + c7cca0a commit 4914da1

File tree

1 file changed

+20
-9
lines changed

1 file changed

+20
-9
lines changed

python/paddle/fluid/optimizer.py

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -275,15 +275,26 @@ def _create_optimization_pass(self, parameters_and_grads):
275275
self._create_global_learning_rate()
276276

277277
optimize_ops = []
278-
for param_and_grad in parameters_and_grads:
279-
if param_and_grad[1] is None:
280-
continue
281-
with param_and_grad[0].block.program._optimized_guard(
282-
param_and_grad), name_scope("optimizer"):
283-
if param_and_grad[0].trainable is True:
284-
optimize_op = self._append_optimize_op(global_block,
285-
param_and_grad)
286-
optimize_ops.append(optimize_op)
278+
if framework._in_dygraph_mode():
279+
for param_and_grad in parameters_and_grads:
280+
if param_and_grad[1] is None:
281+
continue
282+
with param_and_grad[0].block.program._optimized_guard(
283+
param_and_grad):
284+
if param_and_grad[0].trainable is True:
285+
optimize_op = self._append_optimize_op(global_block,
286+
param_and_grad)
287+
optimize_ops.append(optimize_op)
288+
else:
289+
for param_and_grad in parameters_and_grads:
290+
if param_and_grad[1] is None:
291+
continue
292+
with param_and_grad[0].block.program._optimized_guard(
293+
param_and_grad), name_scope("optimizer"):
294+
if param_and_grad[0].trainable is True:
295+
optimize_op = self._append_optimize_op(global_block,
296+
param_and_grad)
297+
optimize_ops.append(optimize_op)
287298

288299
# Get custom finish ops for subclasses
289300
# FIXME: Need to fix this once we figure out how to handle dependencies

0 commit comments

Comments
 (0)