Skip to content

Commit d23c3ff

Browse files
authored
Merge pull request #13636 from gongweibao/generator2
Cherry pick "hide args" to release1.0.0
2 parents 4aed00e + e7ad64b commit d23c3ff

File tree

6 files changed

+341
-129
lines changed

6 files changed

+341
-129
lines changed

paddle/fluid/API.spec

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,13 @@ paddle.fluid.layers.elementwise_mul ArgSpec(args=['x', 'y', 'out', 'axis', 'use_
170170
paddle.fluid.layers.elementwise_max ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
171171
paddle.fluid.layers.elementwise_min ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
172172
paddle.fluid.layers.elementwise_pow ArgSpec(args=['x', 'y', 'out', 'axis', 'use_mkldnn', 'act', 'name'], varargs=None, keywords=None, defaults=(None, -1, False, None, None))
173+
paddle.fluid.layers.uniform_random_batch_size_like ArgSpec(args=['input', 'shape', 'dtype', 'input_dim_idx', 'output_dim_idx', 'min', 'max', 'seed'], varargs=None, keywords=None, defaults=('float32', 0, 0, -1.0, 1.0, 0))
174+
paddle.fluid.layers.gaussian_random ArgSpec(args=['shape', 'mean', 'std', 'seed', 'dtype', 'use_mkldnn'], varargs=None, keywords=None, defaults=(0.0, 1.0, 0, 'float32', False))
175+
paddle.fluid.layers.sampling_id ArgSpec(args=['x', 'min', 'max', 'seed', 'dtype'], varargs=None, keywords=None, defaults=(0.0, 1.0, 0, 'float32'))
176+
paddle.fluid.layers.gaussian_random_batch_size_like ArgSpec(args=['input', 'shape', 'input_dim_idx', 'output_dim_idx', 'mean', 'std', 'seed', 'dtype'], varargs=None, keywords=None, defaults=(0, 0, 0.0, 1.0, 0, 'float32'))
177+
paddle.fluid.layers.sum ArgSpec(args=['x', 'use_mkldnn'], varargs=None, keywords=None, defaults=(False,))
178+
paddle.fluid.layers.slice ArgSpec(args=['input', 'axes', 'starts', 'ends'], varargs=None, keywords=None, defaults=None)
179+
paddle.fluid.layers.shape ArgSpec(args=['input'], varargs=None, keywords=None, defaults=None)
173180
paddle.fluid.layers.data ArgSpec(args=['name', 'shape', 'append_batch_size', 'dtype', 'lod_level', 'type', 'stop_gradient'], varargs=None, keywords=None, defaults=(True, 'float32', 0, VarType.LOD_TENSOR, True))
174181
paddle.fluid.layers.open_files ArgSpec(args=['filenames', 'shapes', 'lod_levels', 'dtypes', 'thread_num', 'buffer_size', 'pass_num', 'is_test'], varargs=None, keywords=None, defaults=(None, None, 1, None))
175182
paddle.fluid.layers.read_file ArgSpec(args=['reader'], varargs=None, keywords=None, defaults=None)
@@ -241,13 +248,6 @@ paddle.fluid.layers.logical_and ArgSpec(args=[], varargs='args', keywords='kwarg
241248
paddle.fluid.layers.logical_or ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
242249
paddle.fluid.layers.logical_xor ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
243250
paddle.fluid.layers.logical_not ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
244-
paddle.fluid.layers.uniform_random_batch_size_like ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
245-
paddle.fluid.layers.gaussian_random ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
246-
paddle.fluid.layers.sampling_id ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
247-
paddle.fluid.layers.gaussian_random_batch_size_like ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
248-
paddle.fluid.layers.sum ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
249-
paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
250-
paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
251251
paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
252252
paddle.fluid.layers.sigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
253253
paddle.fluid.layers.logsigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))

paddle/fluid/operators/sampling_id_op.cc

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,15 +53,16 @@ class SamplingIdOpMaker : public framework::OpProtoAndCheckerMaker {
5353
SamplingId Operator.
5454
A layer for sampling id from multinomial distribution from the
5555
input. Sampling one id for one sample.)DOC");
56-
AddAttr<float>("min", "Minimum value of random. [default 0.0].")
56+
AddAttr<float>("min", "Minimum value of random. (float, default 0.0).")
5757
.SetDefault(0.0f);
58-
AddAttr<float>("max", "Maximun value of random. [default 1.0].")
58+
AddAttr<float>("max", "Maximun value of random. (float, default 1.0).")
5959
.SetDefault(1.0f);
60-
AddAttr<int>("seed",
61-
"Random seed used for the random number engine. "
62-
"0 means use a seed generated by the system."
63-
"Note that if seed is not 0, this operator will always "
64-
"generate the same random numbers every time. [default 0].")
60+
AddAttr<int>(
61+
"seed",
62+
"Random seed used for the random number engine. "
63+
"0 means use a seed generated by the system."
64+
"Note that if seed is not 0, this operator will always "
65+
"generate the same random numbers every time. (int, default 0).")
6566
.SetDefault(0);
6667
}
6768
};

python/paddle/fluid/layers/detection.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ class number, M is number of bounding boxes. For each category
284284
target_box=loc,
285285
code_type='decode_center_size')
286286
compile_shape = scores.shape
287-
run_shape = ops.shape(scores)
287+
run_shape = nn.shape(scores)
288288
scores = nn.flatten(x=scores, axis=2)
289289
scores = nn.softmax(input=scores)
290290
scores = nn.reshape(x=scores, shape=compile_shape, actual_shape=run_shape)
@@ -697,7 +697,7 @@ def ssd_loss(location,
697697
raise ValueError("Only support mining_type == max_negative now.")
698698

699699
num, num_prior, num_class = confidence.shape
700-
conf_shape = ops.shape(confidence)
700+
conf_shape = nn.shape(confidence)
701701

702702
def __reshape_to_2d(var):
703703
return nn.flatten(x=var, axis=2)
@@ -724,7 +724,7 @@ def __reshape_to_2d(var):
724724
target_label.stop_gradient = True
725725
conf_loss = nn.softmax_with_cross_entropy(confidence, target_label)
726726
# 3. Mining hard examples
727-
actual_shape = ops.slice(conf_shape, axes=[0], starts=[0], ends=[2])
727+
actual_shape = nn.slice(conf_shape, axes=[0], starts=[0], ends=[2])
728728
actual_shape.stop_gradient = True
729729
conf_loss = nn.reshape(
730730
x=conf_loss, shape=(num, num_prior), actual_shape=actual_shape)

0 commit comments

Comments
 (0)