Skip to content

Commit 2c01c22

Browse files
authored
Merge pull request #13531 from gongweibao/generator2
Hide kwargs
2 parents 0be1582 + 91bc80d commit 2c01c22

File tree

6 files changed

+341
-129
lines changed

6 files changed

+341
-129
lines changed

paddle/fluid/API.spec

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,13 @@ paddle.fluid.layers.elementwise_mul ArgSpec(args=['x', 'y', 'out', 'axis', 'use_
153153
paddle.fluid.layers.elementwise_max ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
154154
paddle.fluid.layers.elementwise_min ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
155155
paddle.fluid.layers.elementwise_pow ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
156+
paddle.fluid.layers.uniform_random_batch_size_like ArgSpec(args=['input', 'shape', 'dtype', 'input_dim_idx', 'output_dim_idx', 'min', 'max', 'seed'], varargs=None, keywords=None, defaults=('float32', 0, 0, -1.0, 1.0, 0))
157+
paddle.fluid.layers.gaussian_random ArgSpec(args=['shape', 'mean', 'std', 'seed', 'dtype', 'use_mkldnn'], varargs=None, keywords=None, defaults=(0.0, 1.0, 0, 'float32', False))
158+
paddle.fluid.layers.sampling_id ArgSpec(args=['x', 'min', 'max', 'seed', 'dtype'], varargs=None, keywords=None, defaults=(0.0, 1.0, 0, 'float32'))
159+
paddle.fluid.layers.gaussian_random_batch_size_like ArgSpec(args=['input', 'shape', 'input_dim_idx', 'output_dim_idx', 'mean', 'std', 'seed', 'dtype'], varargs=None, keywords=None, defaults=(0, 0, 0.0, 1.0, 0, 'float32'))
160+
paddle.fluid.layers.sum ArgSpec(args=['x', 'use_mkldnn'], varargs=None, keywords=None, defaults=(False,))
161+
paddle.fluid.layers.slice ArgSpec(args=['input', 'axes', 'starts', 'ends'], varargs=None, keywords=None, defaults=None)
162+
paddle.fluid.layers.shape ArgSpec(args=['input'], varargs=None, keywords=None, defaults=None)
156163
paddle.fluid.layers.data ArgSpec(args=['name', 'shape', 'append_batch_size', 'dtype', 'lod_level', 'type', 'stop_gradient'], varargs=None, keywords=None, defaults=(True, 'float32', 0, VarType.LOD_TENSOR, True))
157164
paddle.fluid.layers.open_files ArgSpec(args=['filenames', 'shapes', 'lod_levels', 'dtypes', 'thread_num', 'buffer_size', 'pass_num', 'is_test'], varargs=None, keywords=None, defaults=(None, None, 1, None))
158165
paddle.fluid.layers.read_file ArgSpec(args=['reader'], varargs=None, keywords=None, defaults=None)
@@ -224,13 +231,6 @@ paddle.fluid.layers.logical_and ArgSpec(args=[], varargs='args', keywords='kwarg
224231
paddle.fluid.layers.logical_or ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
225232
paddle.fluid.layers.logical_xor ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
226233
paddle.fluid.layers.logical_not ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
227-
paddle.fluid.layers.uniform_random_batch_size_like ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
228-
paddle.fluid.layers.gaussian_random ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
229-
paddle.fluid.layers.sampling_id ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
230-
paddle.fluid.layers.gaussian_random_batch_size_like ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
231-
paddle.fluid.layers.sum ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
232-
paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
233-
paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
234234
paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
235235
paddle.fluid.layers.sigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
236236
paddle.fluid.layers.logsigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))

paddle/fluid/operators/sampling_id_op.cc

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,16 @@ class SamplingIdOpMaker : public framework::OpProtoAndCheckerMaker {
5353
SamplingId Operator.
5454
A layer for sampling id from multinomial distribution from the
5555
input. Sampling one id for one sample.)DOC");
56-
AddAttr<float>("min", "Minimum value of random. [default 0.0].")
56+
AddAttr<float>("min", "Minimum value of random. (float, default 0.0).")
5757
.SetDefault(0.0f);
58-
AddAttr<float>("max", "Maximun value of random. [default 1.0].")
58+
AddAttr<float>("max", "Maximun value of random. (float, default 1.0).")
5959
.SetDefault(1.0f);
60-
AddAttr<int>("seed",
61-
"Random seed used for the random number engine. "
62-
"0 means use a seed generated by the system."
63-
"Note that if seed is not 0, this operator will always "
64-
"generate the same random numbers every time. [default 0].")
60+
AddAttr<int>(
61+
"seed",
62+
"Random seed used for the random number engine. "
63+
"0 means use a seed generated by the system."
64+
"Note that if seed is not 0, this operator will always "
65+
"generate the same random numbers every time. (int, default 0).")
6566
.SetDefault(0);
6667
}
6768
};

python/paddle/fluid/layers/detection.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ class number, M is number of bounding boxes. For each category
284284
target_box=loc,
285285
code_type='decode_center_size')
286286
compile_shape = scores.shape
287-
run_shape = ops.shape(scores)
287+
run_shape = nn.shape(scores)
288288
scores = nn.flatten(x=scores, axis=2)
289289
scores = nn.softmax(input=scores)
290290
scores = nn.reshape(x=scores, shape=compile_shape, actual_shape=run_shape)
@@ -697,7 +697,7 @@ def ssd_loss(location,
697697
raise ValueError("Only support mining_type == max_negative now.")
698698

699699
num, num_prior, num_class = confidence.shape
700-
conf_shape = ops.shape(confidence)
700+
conf_shape = nn.shape(confidence)
701701

702702
def __reshape_to_2d(var):
703703
return nn.flatten(x=var, axis=2)
@@ -724,7 +724,7 @@ def __reshape_to_2d(var):
724724
target_label.stop_gradient = True
725725
conf_loss = nn.softmax_with_cross_entropy(confidence, target_label)
726726
# 3. Mining hard examples
727-
actual_shape = ops.slice(conf_shape, axes=[0], starts=[0], ends=[2])
727+
actual_shape = nn.slice(conf_shape, axes=[0], starts=[0], ends=[2])
728728
actual_shape.stop_gradient = True
729729
conf_loss = nn.reshape(
730730
x=conf_loss, shape=(num, num_prior), actual_shape=actual_shape)

0 commit comments

Comments
 (0)