Skip to content

Commit 55edfca

Browse files
committed
revert unused change
1 parent fec0b19 commit 55edfca

File tree

1 file changed

+15
-17
lines changed

1 file changed

+15
-17
lines changed

python/paddle/fluid/optimizer.py

Lines changed: 15 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -292,28 +292,26 @@ def minimize(self,
292292
This method combines interface `append_backward()` and
293293
`create_optimization_pass()` into one.
294294
"""
295-
with program_guard(loss.block.program, startup_program):
295+
params_grads = append_backward(loss, parameter_list, no_grad_set,
296+
[error_clip_callback])
296297

297-
params_grads = append_backward(loss, parameter_list, no_grad_set,
298-
[error_clip_callback])
298+
params_grads = sorted(params_grads, key=lambda x: x[0].name)
299299

300-
params_grads = sorted(params_grads, key=lambda x: x[0].name)
300+
params_grads, table_param_and_grad, table_optimize_op = \
301+
self._process_distribute_lookuptable(params_grads, loss, startup_program)
301302

302-
params_grads, table_param_and_grad, table_optimize_op = \
303-
self._process_distribute_lookuptable(params_grads, loss, startup_program)
303+
params_grads = append_gradient_clip_ops(params_grads)
304304

305-
params_grads = append_gradient_clip_ops(params_grads)
305+
# Add regularization if any
306+
params_grads = append_regularization_ops(params_grads,
307+
self.regularization)
306308

307-
# Add regularization if any
308-
params_grads = append_regularization_ops(params_grads,
309-
self.regularization)
310-
311-
optimize_ops = self._create_optimization_pass(params_grads, loss,
312-
startup_program)
313-
if table_optimize_op is not None:
314-
optimize_ops.append(table_optimize_op)
315-
params_grads.append(table_param_and_grad)
316-
return optimize_ops, params_grads
309+
optimize_ops = self._create_optimization_pass(params_grads, loss,
310+
startup_program)
311+
if table_optimize_op is not None:
312+
optimize_ops.append(table_optimize_op)
313+
params_grads.append(table_param_and_grad)
314+
return optimize_ops, params_grads
317315

318316

319317
class SGDOptimizer(Optimizer):

0 commit comments

Comments
 (0)