Skip to content

Commit 1c19f1a

Browse files
committed
Do not change API in doc PR
1 parent 7747e01 commit 1c19f1a

File tree

4 files changed

+6
-6
lines changed

4 files changed

+6
-6
lines changed

python/paddle/fluid/clip.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def set_gradient_clip(clip, param_list=None, program=None):
215215
def append_gradient_clip_ops(param_grad):
216216
context = dict()
217217
for p, g in param_grad:
218-
with p.block.program.optimization_guard(p):
218+
with p.block.program.optimized_guard(p):
219219
clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr())
220220
if clip_attr is None:
221221
clip_attr = NullGradientClipAttr()
@@ -228,7 +228,7 @@ def append_gradient_clip_ops(param_grad):
228228

229229
res = []
230230
for p, g in param_grad:
231-
with p.block.program.optimization_guard(p):
231+
with p.block.program.optimized_guard(p):
232232
res.append(clip_attr.create_operators(param=p, grad=g))
233233

234234
return res

python/paddle/fluid/framework.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,7 +1103,7 @@ def set_op_role_var(self, var_name):
11031103
self._op_role_var = [var_name]
11041104

11051105
@contextlib.contextmanager
1106-
def optimization_guard(self, var):
1106+
def optimized_guard(self, var):
11071107
"""
11081108
A with guard to set :code:`Optimization` :code:`OpRole` and
11091109
:code:`OpRoleVar` automatically.
@@ -1116,7 +1116,7 @@ def optimization_guard(self, var):
11161116
Examples:
11171117
11181118
>>> p, g = backward(...)
1119-
>>> with program.optimization_guard(p):
1119+
>>> with program.optimized_guard(p):
11201120
>>> p = p - 0.001 * g
11211121
"""
11221122
OpRole = core.op_proto_and_checker_maker.OpRole

python/paddle/fluid/optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ def create_optimization_pass(self,
226226

227227
optimize_ops = []
228228
for param_and_grad in parameters_and_grads:
229-
with param_and_grad[0].block.program.optimization_guard(
229+
with param_and_grad[0].block.program.optimized_guard(
230230
param_and_grad[0]):
231231
if param_and_grad[0].trainable is True and param_and_grad[
232232
1] is not None:

python/paddle/fluid/regularizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
4343
"""
4444
params_and_grads = []
4545
for param, grad in parameters_and_grads:
46-
with param.block.program.optimization_guard(param):
46+
with param.block.program.optimized_guard(param):
4747
# If no gradient then we don't need to do anything
4848
if grad is None:
4949
params_and_grads.append((param, grad))

0 commit comments

Comments
 (0)