Skip to content

Commit 6724801

Browse files
committed
fix conflict
2 parents 3ad3635 + a54c423 commit 6724801

File tree

10 files changed

+310
-30
lines changed

10 files changed

+310
-30
lines changed

paddle/fluid/API.spec

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,9 @@ paddle.fluid.layers.stanh ArgSpec(args=['x', 'scale_a', 'scale_b', 'name'], vara
167167
paddle.fluid.layers.hard_sigmoid ArgSpec(args=['x', 'slope', 'offset', 'name'], varargs=None, keywords=None, defaults=(0.2, 0.5, None))
168168
paddle.fluid.layers.swish ArgSpec(args=['x', 'beta', 'name'], varargs=None, keywords=None, defaults=(1.0, None))
169169
paddle.fluid.layers.prelu ArgSpec(args=['x', 'mode', 'param_attr', 'name'], varargs=None, keywords=None, defaults=(None, None))
170+
paddle.fluid.layers.brelu ArgSpec(args=['x', 't_min', 't_max', 'name'], varargs=None, keywords=None, defaults=(0.0, 24.0, None))
171+
paddle.fluid.layers.leaky_relu ArgSpec(args=['x', 'alpha', 'name'], varargs=None, keywords=None, defaults=(0.02, None))
172+
paddle.fluid.layers.soft_relu ArgSpec(args=['x', 'threshold', 'name'], varargs=None, keywords=None, defaults=(40.0, None))
170173
paddle.fluid.layers.flatten ArgSpec(args=['x', 'axis', 'name'], varargs=None, keywords=None, defaults=(1, None))
171174
paddle.fluid.layers.sequence_mask ArgSpec(args=['x', 'maxlen', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 'int64', None))
172175
paddle.fluid.layers.stack ArgSpec(args=['x', 'axis'], varargs=None, keywords=None, defaults=(0,))
@@ -262,26 +265,23 @@ paddle.fluid.layers.sum ArgSpec(args=[], varargs='args', keywords='kwargs', defa
262265
paddle.fluid.layers.slice ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
263266
paddle.fluid.layers.shape ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
264267
paddle.fluid.layers.maxout ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
265-
paddle.fluid.layers.sigmoid ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
266-
paddle.fluid.layers.logsigmoid ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
267-
paddle.fluid.layers.exp ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
268-
paddle.fluid.layers.tanh ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
269-
paddle.fluid.layers.tanh_shrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
270268
paddle.fluid.layers.softshrink ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
271-
paddle.fluid.layers.sqrt ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
272-
paddle.fluid.layers.abs ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
273-
paddle.fluid.layers.ceil ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
274-
paddle.fluid.layers.floor ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
275-
paddle.fluid.layers.cos ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
276-
paddle.fluid.layers.sin ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
277-
paddle.fluid.layers.round ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
278-
paddle.fluid.layers.reciprocal ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
279-
paddle.fluid.layers.square ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
280-
paddle.fluid.layers.softplus ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
281-
paddle.fluid.layers.softsign ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
282-
paddle.fluid.layers.brelu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
283-
paddle.fluid.layers.leaky_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
284-
paddle.fluid.layers.soft_relu ArgSpec(args=[], varargs='args', keywords='kwargs', defaults=None)
269+
paddle.fluid.layers.sigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
270+
paddle.fluid.layers.logsigmoid ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
271+
paddle.fluid.layers.exp ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
272+
paddle.fluid.layers.tanh ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
273+
paddle.fluid.layers.tanh_shrink ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
274+
paddle.fluid.layers.sqrt ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
275+
paddle.fluid.layers.abs ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
276+
paddle.fluid.layers.ceil ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
277+
paddle.fluid.layers.floor ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
278+
paddle.fluid.layers.cos ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
279+
paddle.fluid.layers.sin ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
280+
paddle.fluid.layers.round ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
281+
paddle.fluid.layers.reciprocal ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
282+
paddle.fluid.layers.square ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
283+
paddle.fluid.layers.softplus ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
284+
paddle.fluid.layers.softsign ArgSpec(args=['x', 'name'], varargs=None, keywords=None, defaults=(None,))
285285
paddle.fluid.layers.uniform_random ArgSpec(args=['shape', 'dtype', 'min', 'max', 'seed'], varargs=None, keywords=None, defaults=(None, None, None, None))
286286
paddle.fluid.layers.hard_shrink ArgSpec(args=['x', 'threshold'], varargs=None, keywords=None, defaults=(None,))
287287
paddle.fluid.layers.cumsum ArgSpec(args=['x', 'axis', 'exclusive', 'reverse'], varargs=None, keywords=None, defaults=(None, None, None))

paddle/fluid/framework/op_desc.cc

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,10 @@ class CompileTimeInferShapeContext : public InferShapeContext {
5454
size_t j = 0) const override {
5555
PADDLE_ENFORCE_LT(i, Inputs(in).size());
5656
PADDLE_ENFORCE_LT(j, Outputs(out).size());
57+
PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
58+
"The %s[%d] is @EMPTY@", in, i);
59+
PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
60+
"The %s[%d] is @EMPTY@", out, j);
5761
auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
5862
auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
5963
if (in_var->GetType() != proto::VarType::LOD_TENSOR) {
@@ -63,6 +67,7 @@ class CompileTimeInferShapeContext : public InferShapeContext {
6367
PADDLE_ENFORCE_EQ(in_var->GetType(), proto::VarType::LOD_TENSOR,
6468
"The %d-th output of Output(%s) must be LoDTensor.", j,
6569
out);
70+
6671
out_var->SetLoDLevel(in_var->GetLoDLevel());
6772
}
6873

paddle/fluid/framework/shape_inference.cc

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,16 @@ std::vector<DDim> InferShapeContext::GetReaderDims(
4646
return this->GetRepeatedDims(arg_names[0]);
4747
}
4848

49+
void InferShapeContext::ShareLoDs(const std::string &in,
50+
const std::string &out) const {
51+
PADDLE_ENFORCE_EQ(Inputs(in).size(), Outputs(out).size(),
52+
"The number of arguments in %s and %s is not equal.", in,
53+
out);
54+
for (size_t i = 0; i < in.size(); ++i) {
55+
ShareLoD(in, out, i, i);
56+
}
57+
}
58+
4959
DDim InferShapeContext::GetInputsElementDim(const std::string &name,
5060
int idx) const {
5161
const std::vector<std::string> &names = Inputs(name);

paddle/fluid/framework/shape_inference.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,8 @@ class InferShapeContext {
5656
virtual const std::vector<std::string> &Outputs(
5757
const std::string &name) const = 0;
5858

59+
void ShareLoDs(const std::string &in, const std::string &out) const;
60+
5961
virtual void ShareLoD(const std::string &in, const std::string &out,
6062
size_t i = 0, size_t j = 0) const = 0;
6163

paddle/fluid/operators/concat_op.cc

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,20 @@ class ConcatOpGrad : public framework::OperatorWithKernel {
9494
: OperatorWithKernel(type, inputs, outputs, attrs) {}
9595

9696
void InferShape(framework::InferShapeContext *ctx) const override {
97-
ctx->SetOutputsDim(framework::GradVarName("X"), ctx->GetInputsDim("X"));
98-
ctx->ShareLoD("X", framework::GradVarName("X"));
97+
auto in_x = "X";
98+
auto out_x_g_n = framework::GradVarName(in_x);
99+
ctx->SetOutputsDim(out_x_g_n, ctx->GetInputsDim(in_x));
100+
auto &in_names = ctx->Inputs(in_x);
101+
auto &out_names = ctx->Outputs(out_x_g_n);
102+
PADDLE_ENFORCE_EQ(
103+
in_names.size(), out_names.size(),
104+
"The number of arguments in %s[%d] and %s[%d] is not equal.", in_x,
105+
in_names.size(), out_x_g_n, out_names.size());
106+
for (size_t i = 0; i < in_names.size(); ++i) {
107+
if (out_names[i] != framework::kEmptyVarName) {
108+
ctx->ShareLoD(in_x, out_x_g_n, i, i);
109+
}
110+
}
99111
}
100112
};
101113

python/paddle/fluid/clip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ def _create_operators(self, param, grad):
280280
group_scale_name = self.group_name + "_scale"
281281
if group_scale_name not in self.context:
282282
group_norm_var = layers.sums(input=self.context[self.group_name])
283-
layers.sqrt(x=group_norm_var, out=group_norm_var)
283+
group_norm_var = layers.sqrt(x=group_norm_var)
284284
clip_var = self.context[self.group_name + "_clip"]
285285
group_scale_var = layers.elementwise_div(
286286
x=clip_var,

python/paddle/fluid/layers/layer_function_generator.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,10 @@
2323
from ..framework import OpProtoHolder, Variable
2424
from ..layer_helper import LayerHelper
2525

26-
__all__ = ['deprecated', 'generate_layer_fn', 'autodoc', 'templatedoc']
26+
__all__ = [
27+
'deprecated', 'generate_layer_fn', 'generate_layer_fn_noattr', 'autodoc',
28+
'templatedoc'
29+
]
2730

2831

2932
def _convert_(name):
@@ -212,6 +215,29 @@ def func(*args, **kwargs):
212215
return func
213216

214217

218+
def generate_layer_fn_noattr(op_type):
219+
"""Register the Python layer for an Operator without Attribute.
220+
221+
Args:
222+
op_type: The name of the operator to be created.
223+
224+
This function takes in the operator type (sigmoid, exp , tanh etc) and
225+
creates the operator functionality.
226+
227+
"""
228+
op_proto = OpProtoHolder.instance().get_op_proto(op_type)
229+
230+
def func(x, name=None):
231+
helper = LayerHelper(op_type, **locals())
232+
output = helper.create_tmp_variable(dtype=x.dtype)
233+
helper.append_op(type=op_type, inputs={"X": x}, outputs={"Out": output})
234+
return output
235+
236+
func.__name__ = op_type
237+
func.__doc__ = _generate_doc_string_(op_proto)
238+
return func
239+
240+
215241
def deprecated(func_or_class):
216242
"""
217243
Deprecated warning decorator. It will result a warning message.

python/paddle/fluid/layers/nn.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,9 @@
114114
'hard_sigmoid',
115115
'swish',
116116
'prelu',
117+
'brelu',
118+
'leaky_relu',
119+
'soft_relu',
117120
'flatten',
118121
'sequence_mask',
119122
'stack',
@@ -6104,6 +6107,74 @@ def prelu(x, mode, param_attr=None, name=None):
61046107
return out
61056108

61066109

6110+
@templatedoc()
6111+
def brelu(x, t_min=0.0, t_max=24.0, name=None):
6112+
"""
6113+
${comment}
6114+
Args:
6115+
x(${x_type}): ${x_comment}
6116+
t_min(${t_min_type}|0.0): ${t_min_comment}
6117+
t_max(${t_max_type}|24.0): ${t_max_comment}
6118+
name(str|None): A name for this layer(optional). If set None, the layer
6119+
will be named automatically.
6120+
Returns:
6121+
output(${out_type}): ${out_comment}
6122+
"""
6123+
helper = LayerHelper('brelu', **locals())
6124+
out = helper.create_tmp_variable(dtype=x.dtype)
6125+
helper.append_op(
6126+
type='brelu',
6127+
inputs={'X': x},
6128+
outputs={'Out': out},
6129+
attrs={'t_min': t_min,
6130+
't_max': t_max})
6131+
return out
6132+
6133+
6134+
@templatedoc()
6135+
def leaky_relu(x, alpha=0.02, name=None):
6136+
"""
6137+
${comment}
6138+
Args:
6139+
x(${x_type}): ${x_comment}
6140+
alpha(${alpha_type}|0.02): ${alpha_comment}
6141+
name(str|None): A name for this layer(optional). If set None, the layer
6142+
will be named automatically.
6143+
Returns:
6144+
output(${out_type}): ${out_comment}
6145+
"""
6146+
helper = LayerHelper('leaky_relu', **locals())
6147+
out = helper.create_tmp_variable(dtype=x.dtype)
6148+
helper.append_op(
6149+
type='leaky_relu',
6150+
inputs={'X': x},
6151+
outputs={'Out': out},
6152+
attrs={'alpha': alpha})
6153+
return out
6154+
6155+
6156+
@templatedoc()
6157+
def soft_relu(x, threshold=40.0, name=None):
6158+
"""
6159+
${comment}
6160+
Args:
6161+
x(${x_type}): ${x_comment}
6162+
threshold(${threshold_type}|40.0): ${threshold_comment}
6163+
name(str|None): A name for this layer(optional). If set None, the layer
6164+
will be named automatically.
6165+
Returns:
6166+
output(${out_type}): ${out_comment}
6167+
"""
6168+
helper = LayerHelper('soft_relu', **locals())
6169+
out = helper.create_tmp_variable(dtype=x.dtype)
6170+
helper.append_op(
6171+
type='soft_relu',
6172+
inputs={'X': x},
6173+
outputs={'Out': out},
6174+
attrs={'threshold': threshold})
6175+
return out
6176+
6177+
61076178
def flatten(x, axis=1, name=None):
61086179
"""
61096180
**Flatten layer**

python/paddle/fluid/layers/ops.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,14 @@
1313
# limitations under the License.
1414

1515
from __future__ import print_function
16-
from .layer_function_generator import generate_layer_fn
16+
from .layer_function_generator import generate_layer_fn, generate_layer_fn_noattr
1717

18-
__activations__ = [
18+
__activations_noattr__ = [
1919
'sigmoid',
2020
'logsigmoid',
2121
'exp',
2222
'tanh',
2323
'tanh_shrink',
24-
'softshrink',
2524
'sqrt',
2625
'abs',
2726
'ceil',
@@ -33,9 +32,6 @@
3332
'square',
3433
'softplus',
3534
'softsign',
36-
'brelu',
37-
'leaky_relu',
38-
'soft_relu',
3935
]
4036

4137
__all__ = [
@@ -56,7 +52,8 @@
5652
'slice',
5753
'shape',
5854
'maxout',
59-
] + __activations__
55+
'softshrink',
56+
]
6057

6158
for _OP in set(__all__):
6259
globals()[_OP] = generate_layer_fn(_OP)
@@ -66,6 +63,11 @@
6663
# e.g.: test_program_code.py, test_dist_train.py
6764
globals()['_scale'] = generate_layer_fn('scale')
6865

66+
__all__ += __activations_noattr__
67+
68+
for _OP in set(__activations_noattr__):
69+
globals()[_OP] = generate_layer_fn_noattr(_OP)
70+
6971
__all__ += ["uniform_random"]
7072

7173
_uniform_random_ = generate_layer_fn('uniform_random')

0 commit comments

Comments
 (0)