Skip to content

Commit efafc72

Browse files
authored
Hide program APIs (#12315)
* hide program APIs * fix merge error * update
1 parent c9e5c1e commit efafc72

File tree

14 files changed

+46
-53
lines changed

14 files changed

+46
-53
lines changed

paddle/fluid/API.spec

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,10 @@
11
paddle.fluid.Program.__init__ ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
22
paddle.fluid.Program.block ArgSpec(args=['self', 'index'], varargs=None, keywords=None, defaults=None)
33
paddle.fluid.Program.clone ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,))
4-
paddle.fluid.Program.copy_data_info_from ArgSpec(args=['self', 'other'], varargs=None, keywords=None, defaults=None)
5-
paddle.fluid.Program.create_block ArgSpec(args=['self', 'parent_idx'], varargs=None, keywords=None, defaults=(None,))
64
paddle.fluid.Program.current_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
7-
paddle.fluid.Program.get_desc ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
85
paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
9-
paddle.fluid.Program.inference_optimize ArgSpec(args=['self', 'export_for_deployment'], varargs=None, keywords=None, defaults=(True,))
106
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
11-
paddle.fluid.Program.optimized_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
127
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
13-
paddle.fluid.Program.prune ArgSpec(args=['self', 'targets'], varargs=None, keywords=None, defaults=None)
14-
paddle.fluid.Program.rollback ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
158
paddle.fluid.Program.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,))
169
paddle.fluid.Operator.__init__ ArgSpec(args=['self', 'block', 'desc', 'type', 'inputs', 'outputs', 'attrs'], varargs=None, keywords=None, defaults=(None, None, None, None))
1710
paddle.fluid.Operator.all_attrs ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)

python/paddle/fluid/backward.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -347,7 +347,7 @@ def _append_backward_ops_(block,
347347
# If the op has its own sub-block, deal with the sub-block first
348348
if op.has_attr("sub_block"):
349349
sub_block = program.block(op.block_attr_id("sub_block"))
350-
grad_sub_block = program.create_block()
350+
grad_sub_block = program._create_block()
351351
grad_sub_block._set_forward_block_idx(sub_block.idx)
352352
cb = _callback_lookup_(op)
353353
if cb is not None:
@@ -361,7 +361,7 @@ def _append_backward_ops_(block,
361361
_append_backward_ops_(sub_block, sub_block.ops, grad_sub_block,
362362
no_grad_dict, grad_to_var, callbacks)
363363

364-
program.rollback()
364+
program._rollback()
365365
grad_sub_block_list.append(grad_sub_block.desc)
366366

367367
# Getting op's corresponding grad_op

python/paddle/fluid/clip.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -331,7 +331,7 @@ def append_gradient_clip_ops(param_grads):
331331
for p, g in param_grads:
332332
if g is None:
333333
continue
334-
with p.block.program.optimized_guard([p, g]):
334+
with p.block.program._optimized_guard([p, g]):
335335
clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr())
336336
if clip_attr is None:
337337
clip_attr = NullGradientClipAttr()
@@ -346,7 +346,7 @@ def append_gradient_clip_ops(param_grads):
346346
for p, g in param_grads:
347347
if g is None:
348348
continue
349-
with p.block.program.optimized_guard([p, g]):
349+
with p.block.program._optimized_guard([p, g]):
350350
res.append(clip_attr._create_operators(param=p, grad=g))
351351

352352
return res

python/paddle/fluid/concurrency.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def __init__(self,
126126
self.channel = channel
127127

128128
def __enter__(self):
129-
self.block = self.main_program.create_block()
129+
self.block = self.main_program._create_block()
130130

131131
def construct_op(self):
132132
main_program = self.helper.main_program
@@ -187,7 +187,7 @@ def construct_op(self):
187187
if self.value else '')
188188

189189
def __exit__(self, exc_type, exc_val, exc_tb):
190-
self.main_program.rollback()
190+
self.main_program._rollback()
191191
if exc_type is not None:
192192
return False # re-raise exception
193193
return True

python/paddle/fluid/framework.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -935,7 +935,7 @@ class Block(object):
935935
936936
Notes:
937937
The constructor of Block should not be invoked directly. Please
938-
use `Program.create_block()` to create a block.
938+
use `Program._create_block()` to create a block.
939939
940940
Examples:
941941
.. code-block:: python
@@ -1483,7 +1483,7 @@ def set_op_role_var(self, var_name):
14831483
self._op_role_var = [var_name]
14841484

14851485
@contextlib.contextmanager
1486-
def optimized_guard(self, param_and_grads):
1486+
def _optimized_guard(self, param_and_grads):
14871487
"""
14881488
A with guard to set :code:`Optimization` :code:`OpRole` and
14891489
:code:`OpRoleVar` automatically.
@@ -1496,7 +1496,7 @@ def optimized_guard(self, param_and_grads):
14961496
Examples:
14971497
14981498
>>> p, g = backward(...)
1499-
>>> with program.optimized_guard([p,g]):
1499+
>>> with program._optimized_guard([p,g]):
15001500
>>> p = p - 0.001 * g
15011501
"""
15021502
OpRole = core.op_proto_and_checker_maker.OpRole
@@ -1554,7 +1554,7 @@ def to_string(self, throw_on_error, with_details=False):
15541554
res_str = _debug_string_(proto, throw_on_error)
15551555
return res_str
15561556

1557-
def get_desc(self):
1557+
def _get_desc(self):
15581558
"""
15591559
Get the C++ side of `ProgramDesc` object pointer. The C++ object is
15601560
exposed by :code:`pybind`.
@@ -1647,7 +1647,7 @@ def clone(self, for_test=False):
16471647
The two code snippets above will generate same programs.
16481648
"""
16491649
if for_test:
1650-
p = self.inference_optimize(export_for_deployment=False)
1650+
p = self._inference_optimize(export_for_deployment=False)
16511651
else:
16521652
p = Program()
16531653
p.current_block_idx = self.current_block_idx
@@ -1663,10 +1663,10 @@ def clone(self, for_test=False):
16631663
p._sync_with_cpp()
16641664

16651665
p._copy_param_info_from(self)
1666-
p.copy_data_info_from(self)
1666+
p._copy_data_info_from(self)
16671667
return p
16681668

1669-
def prune(self, targets):
1669+
def _prune(self, targets):
16701670
"""
16711671
Prune operators and variables which are not needed to generate
16721672
:code:`targets`.
@@ -1717,7 +1717,7 @@ def prune(self, targets):
17171717
res._sync_with_cpp()
17181718
return res
17191719

1720-
def inference_optimize(self, export_for_deployment=True):
1720+
def _inference_optimize(self, export_for_deployment=True):
17211721
"""
17221722
This method will create a new program and do following adjustments on it:
17231723
1. Remove all reader variables and their creator ops if exist.
@@ -1841,7 +1841,7 @@ def current_block(self):
18411841
"""
18421842
return self.blocks[self.current_block_idx]
18431843

1844-
def create_block(self, parent_idx=None):
1844+
def _create_block(self, parent_idx=None):
18451845
"""
18461846
Create a new block with the :code:`parent_idx` and change the current block
18471847
to new block.
@@ -1860,7 +1860,7 @@ def create_block(self, parent_idx=None):
18601860
self.blocks.append(Block(self, self.current_block_idx))
18611861
return self.current_block()
18621862

1863-
def rollback(self):
1863+
def _rollback(self):
18641864
"""
18651865
Exit a code block, i.e., roll back to the parent block.
18661866
Returns:
@@ -1906,7 +1906,7 @@ def _copy_param_info_from(self, other):
19061906
"program, with represent the same topology")
19071907
self.global_block()._copy_param_info_from(other.global_block())
19081908

1909-
def copy_data_info_from(self, other):
1909+
def _copy_data_info_from(self, other):
19101910
"""
19111911
Copy the information of data variables from other program.
19121912

python/paddle/fluid/io.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -515,8 +515,8 @@ def get_inference_program(target_vars, main_program=None):
515515
vars.extend(var.metrics)
516516
else:
517517
vars.append(var)
518-
pruned_program = main_program.prune(targets=vars)
519-
inference_program = pruned_program.inference_optimize()
518+
pruned_program = main_program._prune(targets=vars)
519+
inference_program = pruned_program._inference_optimize()
520520
return inference_program
521521

522522

@@ -644,8 +644,8 @@ def save_inference_model(dirname,
644644
global_block._remove_op(i)
645645
copy_program.desc.flush()
646646

647-
pruned_program = copy_program.prune(targets=target_vars)
648-
inference_program = pruned_program.inference_optimize(
647+
pruned_program = copy_program._prune(targets=target_vars)
648+
inference_program = pruned_program._inference_optimize(
649649
export_for_deployment=export_for_deployment)
650650
fetch_var_names = [v.name for v in target_vars]
651651

python/paddle/fluid/layers/control_flow.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -217,10 +217,10 @@ def __init__(self, main_program):
217217
self.main_program = main_program
218218

219219
def __enter__(self):
220-
self.main_program.create_block()
220+
self.main_program._create_block()
221221

222222
def __exit__(self, exc_type, exc_val, exc_tb):
223-
self.main_program.rollback()
223+
self.main_program._rollback()
224224
if exc_type is not None:
225225
return False # re-raise exception
226226
return True

python/paddle/fluid/layers/io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1008,9 +1008,9 @@ def _is_completed(self):
10081008
@contextlib.contextmanager
10091009
def block(self):
10101010
self.status = Preprocessor.IN_SUB_BLOCK
1011-
self.sub_block = self.main_prog.create_block()
1011+
self.sub_block = self.main_prog._create_block()
10121012
yield
1013-
self.main_prog.rollback()
1013+
self.main_prog._rollback()
10141014
self.status = Preprocessor.AFTER_SUB_BLOCK
10151015
if not self._is_completed():
10161016
raise RuntimeError(

python/paddle/fluid/optimizer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ def _create_optimization_pass(self,
236236
for param_and_grad in parameters_and_grads:
237237
if param_and_grad[1] is None:
238238
continue
239-
with param_and_grad[0].block.program.optimized_guard(
239+
with param_and_grad[0].block.program._optimized_guard(
240240
param_and_grad), name_scope("optimizer"):
241241
if param_and_grad[0].trainable is True:
242242
optimize_op = self._append_optimize_op(loss.block,
@@ -580,7 +580,7 @@ def _finish_update(self, block, param_and_grads):
580580
for param, grad in param_and_grads:
581581
if grad is None:
582582
continue
583-
with param.block.program.optimized_guard([param, grad]):
583+
with param.block.program._optimized_guard([param, grad]):
584584
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
585585
param)
586586
beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str,
@@ -709,7 +709,7 @@ def _finish_update(self, block, parameters_and_grads):
709709
for param, grad in parameters_and_grads:
710710
if grad is None:
711711
continue
712-
with param.block.program.optimized_guard([param, grad]):
712+
with param.block.program._optimized_guard([param, grad]):
713713
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
714714
param)
715715
main_block.append_op(
@@ -1198,7 +1198,7 @@ def __init__(self,
11981198
for param, grad in self.params_grads:
11991199
if grad is None:
12001200
continue
1201-
with param.block.program.optimized_guard([param, grad]):
1201+
with param.block.program._optimized_guard([param, grad]):
12021202
self._append_average_accumulate_op(param)
12031203

12041204
self.apply_program = Program()

python/paddle/fluid/regularizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
4747
if grad is None:
4848
params_and_grads.append((param, grad))
4949
continue
50-
with param.block.program.optimized_guard([param, grad]):
50+
with param.block.program._optimized_guard([param, grad]):
5151
regularization_term = None
5252
if param.regularizer is not None:
5353
# Add variable for regularization term in grad block

0 commit comments

Comments
 (0)