Skip to content

Commit 4f82d33

Browse files
authored
Merge pull request #15685 from sneaxiy/release/1.3
Cherry-pick signature error to Release/1.3
2 parents 9720a13 + 4b2794c commit 4f82d33

File tree

16 files changed

+76
-45
lines changed

16 files changed

+76
-45
lines changed

paddle/fluid/API.spec

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,13 @@ paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None
88
paddle.fluid.Program.to_string ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,))
99
paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
1010
paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
11-
paddle.fluid.program_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
12-
paddle.fluid.name_scope ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
11+
paddle.fluid.program_guard ArgSpec(args=['main_program', 'startup_program'], varargs=None, keywords=None, defaults=(None,))
12+
paddle.fluid.name_scope ArgSpec(args=['prefix'], varargs=None, keywords=None, defaults=(None,))
1313
paddle.fluid.Executor.__init__ ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None)
1414
paddle.fluid.Executor.close ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
1515
paddle.fluid.Executor.run ArgSpec(args=['self', 'program', 'feed', 'fetch_list', 'feed_var_name', 'fetch_var_name', 'scope', 'return_numpy', 'use_program_cache'], varargs=None, keywords=None, defaults=(None, None, None, 'feed', 'fetch', None, True, False))
1616
paddle.fluid.global_scope ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
17-
paddle.fluid.scope_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
17+
paddle.fluid.scope_guard ArgSpec(args=['scope'], varargs=None, keywords=None, defaults=None)
1818
paddle.fluid.DistributeTranspiler.__init__ ArgSpec(args=['self', 'config'], varargs=None, keywords=None, defaults=(None,))
1919
paddle.fluid.DistributeTranspiler.get_pserver_program ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None)
2020
paddle.fluid.DistributeTranspiler.get_pserver_programs ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None)
@@ -66,7 +66,7 @@ paddle.fluid.initializer.XavierInitializer.__init__ ArgSpec(args=['self', 'unifo
6666
paddle.fluid.initializer.BilinearInitializer.__init__ ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
6767
paddle.fluid.initializer.MSRAInitializer.__init__ ArgSpec(args=['self', 'uniform', 'fan_in', 'seed'], varargs=None, keywords=None, defaults=(True, None, 0))
6868
paddle.fluid.initializer.force_init_on_cpu ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
69-
paddle.fluid.initializer.init_on_cpu ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
69+
paddle.fluid.initializer.init_on_cpu ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
7070
paddle.fluid.initializer.NumpyArrayInitializer.__init__ ArgSpec(args=['self', 'value'], varargs=None, keywords=None, defaults=None)
7171
paddle.fluid.layers.fc ArgSpec(args=['input', 'size', 'num_flatten_dims', 'param_attr', 'bias_attr', 'act', 'is_test', 'name'], varargs=None, keywords=None, defaults=(1, None, None, None, False, None))
7272
paddle.fluid.layers.embedding ArgSpec(args=['input', 'size', 'is_sparse', 'is_distributed', 'padding_idx', 'param_attr', 'dtype'], varargs=None, keywords=None, defaults=(False, False, None, None, 'float32'))
@@ -229,7 +229,7 @@ paddle.fluid.layers.random_data_generator ArgSpec(args=['low', 'high', 'shapes',
229229
paddle.fluid.layers.py_reader ArgSpec(args=['capacity', 'shapes', 'dtypes', 'lod_levels', 'name', 'use_double_buffer'], varargs=None, keywords=None, defaults=(None, None, True))
230230
paddle.fluid.layers.create_py_reader_by_data ArgSpec(args=['capacity', 'feed_list', 'name', 'use_double_buffer'], varargs=None, keywords=None, defaults=(None, True))
231231
paddle.fluid.layers.Preprocessor.__init__ ArgSpec(args=['self', 'reader', 'name'], varargs=None, keywords=None, defaults=(None,))
232-
paddle.fluid.layers.Preprocessor.block ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
232+
paddle.fluid.layers.Preprocessor.block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
233233
paddle.fluid.layers.Preprocessor.inputs ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
234234
paddle.fluid.layers.Preprocessor.outputs ArgSpec(args=['self'], varargs='outs', keywords=None, defaults=None)
235235
paddle.fluid.layers.load ArgSpec(args=['out', 'file_path', 'load_as_fp16'], varargs=None, keywords=None, defaults=(None,))
@@ -270,7 +270,7 @@ paddle.fluid.layers.IfElse.input ArgSpec(args=['self', 'x'], varargs=None, keywo
270270
paddle.fluid.layers.IfElse.output ArgSpec(args=['self'], varargs='outs', keywords=None, defaults=None)
271271
paddle.fluid.layers.IfElse.true_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
272272
paddle.fluid.layers.DynamicRNN.__init__ ArgSpec(args=['self', 'name'], varargs=None, keywords=None, defaults=(None,))
273-
paddle.fluid.layers.DynamicRNN.block ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
273+
paddle.fluid.layers.DynamicRNN.block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
274274
paddle.fluid.layers.DynamicRNN.memory ArgSpec(args=['self', 'init', 'shape', 'value', 'need_reorder', 'dtype'], varargs=None, keywords=None, defaults=(None, None, 0.0, False, 'float32'))
275275
paddle.fluid.layers.DynamicRNN.output ArgSpec(args=['self'], varargs='outputs', keywords=None, defaults=None)
276276
paddle.fluid.layers.DynamicRNN.static_input ArgSpec(args=['self', 'x'], varargs=None, keywords=None, defaults=None)
@@ -346,12 +346,12 @@ paddle.fluid.contrib.StateCell.set_state ArgSpec(args=['self', 'state_name', 'st
346346
paddle.fluid.contrib.StateCell.state_updater ArgSpec(args=['self', 'updater'], varargs=None, keywords=None, defaults=None)
347347
paddle.fluid.contrib.StateCell.update_states ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
348348
paddle.fluid.contrib.TrainingDecoder.__init__ ArgSpec(args=['self', 'state_cell', 'name'], varargs=None, keywords=None, defaults=(None,))
349-
paddle.fluid.contrib.TrainingDecoder.block ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
349+
paddle.fluid.contrib.TrainingDecoder.block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
350350
paddle.fluid.contrib.TrainingDecoder.output ArgSpec(args=['self'], varargs='outputs', keywords=None, defaults=None)
351351
paddle.fluid.contrib.TrainingDecoder.static_input ArgSpec(args=['self', 'x'], varargs=None, keywords=None, defaults=None)
352352
paddle.fluid.contrib.TrainingDecoder.step_input ArgSpec(args=['self', 'x'], varargs=None, keywords=None, defaults=None)
353353
paddle.fluid.contrib.BeamSearchDecoder.__init__ ArgSpec(args=['self', 'state_cell', 'init_ids', 'init_scores', 'target_dict_dim', 'word_dim', 'input_var_dict', 'topk_size', 'sparse_emb', 'max_len', 'beam_size', 'end_id', 'name'], varargs=None, keywords=None, defaults=({}, 50, True, 100, 1, 1, None))
354-
paddle.fluid.contrib.BeamSearchDecoder.block ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
354+
paddle.fluid.contrib.BeamSearchDecoder.block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
355355
paddle.fluid.contrib.BeamSearchDecoder.decode ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
356356
paddle.fluid.contrib.BeamSearchDecoder.early_stop ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
357357
paddle.fluid.contrib.BeamSearchDecoder.read_array ArgSpec(args=['self', 'init', 'is_ids', 'is_scores'], varargs=None, keywords=None, defaults=(False, False))
@@ -456,7 +456,7 @@ paddle.fluid.optimizer.AdadeltaOptimizer.apply_gradients ArgSpec(args=['self', '
456456
paddle.fluid.optimizer.AdadeltaOptimizer.backward ArgSpec(args=['self', 'loss', 'startup_program', 'parameter_list', 'no_grad_set', 'callbacks'], varargs=None, keywords=None, defaults=(None, None, None, None))
457457
paddle.fluid.optimizer.AdadeltaOptimizer.minimize ArgSpec(args=['self', 'loss', 'startup_program', 'parameter_list', 'no_grad_set'], varargs=None, keywords=None, defaults=(None, None, None))
458458
paddle.fluid.optimizer.ModelAverage.__init__ ArgSpec(args=['self', 'average_window_rate', 'min_average_window', 'max_average_window', 'regularization', 'name'], varargs=None, keywords=None, defaults=(10000, 10000, None, None))
459-
paddle.fluid.optimizer.ModelAverage.apply ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
459+
paddle.fluid.optimizer.ModelAverage.apply ArgSpec(args=['self', 'executor', 'need_restore'], varargs=None, keywords=None, defaults=(True,))
460460
paddle.fluid.optimizer.ModelAverage.apply_gradients ArgSpec(args=['self', 'params_grads'], varargs=None, keywords=None, defaults=None)
461461
paddle.fluid.optimizer.ModelAverage.backward ArgSpec(args=['self', 'loss', 'startup_program', 'parameter_list', 'no_grad_set', 'callbacks'], varargs=None, keywords=None, defaults=(None, None, None, None))
462462
paddle.fluid.optimizer.ModelAverage.minimize ArgSpec(args=['self', 'loss', 'startup_program', 'parameter_list', 'no_grad_set'], varargs=None, keywords=None, defaults=(None, None, None))
@@ -491,14 +491,14 @@ paddle.fluid.clip.ErrorClipByValue.__init__ ArgSpec(args=['self', 'max', 'min'],
491491
paddle.fluid.clip.GradientClipByValue.__init__ ArgSpec(args=['self', 'max', 'min'], varargs=None, keywords=None, defaults=(None,))
492492
paddle.fluid.clip.GradientClipByNorm.__init__ ArgSpec(args=['self', 'clip_norm'], varargs=None, keywords=None, defaults=None)
493493
paddle.fluid.clip.GradientClipByGlobalNorm.__init__ ArgSpec(args=['self', 'clip_norm', 'group_name'], varargs=None, keywords=None, defaults=('default_group',))
494-
paddle.fluid.profiler.cuda_profiler ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
494+
paddle.fluid.profiler.cuda_profiler ArgSpec(args=['output_file', 'output_mode', 'config'], varargs=None, keywords=None, defaults=(None, None))
495495
paddle.fluid.profiler.reset_profiler ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
496-
paddle.fluid.profiler.profiler ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
496+
paddle.fluid.profiler.profiler ArgSpec(args=['state', 'sorted_key', 'profile_path'], varargs=None, keywords=None, defaults=(None, '/tmp/profile'))
497497
paddle.fluid.profiler.start_profiler ArgSpec(args=['state'], varargs=None, keywords=None, defaults=None)
498498
paddle.fluid.profiler.stop_profiler ArgSpec(args=['sorted_key', 'profile_path'], varargs=None, keywords=None, defaults=(None, '/tmp/profile'))
499499
paddle.fluid.unique_name.generate ArgSpec(args=['key'], varargs=None, keywords=None, defaults=None)
500500
paddle.fluid.unique_name.switch ArgSpec(args=['new_generator'], varargs=None, keywords=None, defaults=(None,))
501-
paddle.fluid.unique_name.guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
501+
paddle.fluid.unique_name.guard ArgSpec(args=['new_generator'], varargs=None, keywords=None, defaults=(None,))
502502
paddle.fluid.recordio_writer.convert_reader_to_recordio_file ArgSpec(args=['filename', 'reader_creator', 'feeder', 'compressor', 'max_num_records', 'feed_order'], varargs=None, keywords=None, defaults=(Compressor.Snappy, 1000, None))
503503
paddle.fluid.recordio_writer.convert_reader_to_recordio_files ArgSpec(args=['filename', 'batch_per_file', 'reader_creator', 'feeder', 'compressor', 'max_num_records', 'feed_order'], varargs=None, keywords=None, defaults=(Compressor.Snappy, 1000, None))
504504
paddle.fluid.Scope Scope() -> paddle.fluid.core._Scope

python/paddle/fluid/contrib/decoder/beam_search_decoder.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
from __future__ import print_function
2424

25-
import contextlib
25+
from ...wrapped_decorator import signature_safe_contextmanager
2626
import numpy as np
2727
import six
2828

@@ -419,7 +419,7 @@ def __init__(self, state_cell, name=None):
419419
self._state_cell = state_cell
420420
self._state_cell._enter_decoder(self)
421421

422-
@contextlib.contextmanager
422+
@signature_safe_contextmanager
423423
def block(self):
424424
"""
425425
Define the behavior of the decoder for each RNN time step.
@@ -613,7 +613,7 @@ def __init__(self,
613613
self._word_dim = word_dim
614614
self._input_var_dict = input_var_dict
615615

616-
@contextlib.contextmanager
616+
@signature_safe_contextmanager
617617
def block(self):
618618
"""
619619
Define the behavior of the decoder for each RNN time step.

python/paddle/fluid/contrib/inferencer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from __future__ import print_function
1616

17-
import contextlib
17+
from ..wrapped_decorator import signature_safe_contextmanager
1818

1919
from .. import core
2020

@@ -105,7 +105,7 @@ def infer(self, inputs, return_numpy=True):
105105

106106
return results
107107

108-
@contextlib.contextmanager
108+
@signature_safe_contextmanager
109109
def _prog_and_scope_guard(self):
110110
with framework.program_guard(main_program=self.inference_program):
111111
with executor.scope_guard(self.scope):

python/paddle/fluid/contrib/trainer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from __future__ import print_function
1616

17-
import contextlib
17+
from ..wrapped_decorator import signature_safe_contextmanager
1818
import os
1919
import errno
2020
import shutil
@@ -453,7 +453,7 @@ def save_inference_model(self, param_path, feeded_var_names,
453453
io.save_inference_model(param_path, feeded_var_names, target_vars,
454454
exe)
455455

456-
@contextlib.contextmanager
456+
@signature_safe_contextmanager
457457
def _prog_and_scope_guard(self):
458458
with framework.program_guard(
459459
main_program=self.train_program,

python/paddle/fluid/executor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import os
1818
import multiprocessing
1919
import numpy as np
20-
import contextlib
20+
from .wrapped_decorator import signature_safe_contextmanager
2121
import six
2222
from .framework import Program, default_main_program, Variable
2323
from . import core
@@ -49,7 +49,7 @@ def _switch_scope(scope):
4949
return ex
5050

5151

52-
@contextlib.contextmanager
52+
@signature_safe_contextmanager
5353
def scope_guard(scope):
5454
"""
5555
Change the global/default scope instance by Python `with` statement. All

python/paddle/fluid/framework.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
import collections
1818
from collections import defaultdict
19-
import contextlib
19+
from .wrapped_decorator import signature_safe_contextmanager
2020
import os
2121
import re
2222
import traceback
@@ -111,7 +111,7 @@ def name(self):
111111
_name_scope = NameScope()
112112

113113

114-
@contextlib.contextmanager
114+
@signature_safe_contextmanager
115115
def name_scope(prefix=None):
116116
"""
117117
Generate hierarchical name prefix for the operators.
@@ -1775,7 +1775,7 @@ def op_role_var(self):
17751775
def set_op_role_var(self, var_name):
17761776
self._op_role_var = [var_name]
17771777

1778-
@contextlib.contextmanager
1778+
@signature_safe_contextmanager
17791779
def _optimized_guard(self, param_and_grads):
17801780
"""
17811781
A with guard to set :code:`Optimization` :code:`OpRole` and
@@ -1805,7 +1805,7 @@ def _optimized_guard(self, param_and_grads):
18051805
self._op_role_var = tmp_var
18061806
self._current_role = tmp_role
18071807

1808-
@contextlib.contextmanager
1808+
@signature_safe_contextmanager
18091809
def _lr_schedule_guard(self, is_with_opt=False):
18101810
"""
18111811
A with guard to set :code:`LRSched` :code:`OpRole` and
@@ -2459,7 +2459,7 @@ def switch_startup_program(program):
24592459
return prev_program
24602460

24612461

2462-
@contextlib.contextmanager
2462+
@signature_safe_contextmanager
24632463
def program_guard(main_program, startup_program=None):
24642464
"""
24652465
Change the global main program and startup program with `with` statement.
@@ -2524,7 +2524,7 @@ def _get_var(name, program=None):
25242524
return program.global_block().var(name)
25252525

25262526

2527-
@contextlib.contextmanager
2527+
@signature_safe_contextmanager
25282528
def _imperative_guard(tracer):
25292529
global _imperative_tracer_
25302530
tmp_trace = _imperative_tracer_
@@ -2535,7 +2535,7 @@ def _imperative_guard(tracer):
25352535
_imperative_tracer_ = tmp_trace
25362536

25372537

2538-
@contextlib.contextmanager
2538+
@signature_safe_contextmanager
25392539
def _imperative_place_guard(place):
25402540
global _imperative_current_expected_place_
25412541
tmp_place = _imperative_current_expected_place_

python/paddle/fluid/imperative/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
import contextlib
14+
from ..wrapped_decorator import signature_safe_contextmanager
1515
import numpy as np
1616

1717
from paddle.fluid import core
@@ -24,7 +24,7 @@ def enabled():
2424
return framework._in_imperative_mode()
2525

2626

27-
@contextlib.contextmanager
27+
@signature_safe_contextmanager
2828
def guard(place=None):
2929
train = framework.Program()
3030
startup = framework.Program()

python/paddle/fluid/initializer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from . import framework
1818
import numpy as np
19-
import contextlib
19+
from .wrapped_decorator import signature_safe_contextmanager
2020
from .core import VarDesc
2121
from . import unique_name
2222

@@ -49,7 +49,7 @@ def force_init_on_cpu():
4949
return _force_init_on_cpu_
5050

5151

52-
@contextlib.contextmanager
52+
@signature_safe_contextmanager
5353
def init_on_cpu():
5454
"""
5555
Force the variable to be inited on CPU.

python/paddle/fluid/layers/control_flow.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515
from __future__ import print_function
16-
import contextlib
16+
from ..wrapped_decorator import signature_safe_contextmanager
1717

1818
from .layer_function_generator import autodoc, templatedoc
1919
from .tensor import assign, fill_constant
@@ -1532,7 +1532,7 @@ def static_input(self, x):
15321532
outputs={'Out': [x_reordered]})
15331533
return shrink_memory(x_reordered, self.step_idx, self.lod_rank_table)
15341534

1535-
@contextlib.contextmanager
1535+
@signature_safe_contextmanager
15361536
def block(self):
15371537
"""
15381538
The block for user to define operators in RNN. See the class docstring

python/paddle/fluid/layers/io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515
from __future__ import print_function
16-
import contextlib
16+
from ..wrapped_decorator import signature_safe_contextmanager
1717
import multiprocessing
1818
import os
1919
import six
@@ -1116,7 +1116,7 @@ def __init__(self, reader, name=None):
11161116
def _is_completed(self):
11171117
return self.sub_block and self.source_var_names and self.sink_var_names
11181118

1119-
@contextlib.contextmanager
1119+
@signature_safe_contextmanager
11201120
def block(self):
11211121
self.status = Preprocessor.IN_SUB_BLOCK
11221122
self.sub_block = self.main_prog._create_block()

0 commit comments

Comments
 (0)