Skip to content

Commit 4b3e22b

Browse files
authored
Merge pull request #7574 from lcy-seso/wraper_for_l2_normalize
add python wrapper for l2 normalize layer.
2 parents 1d89866 + e043c2c commit 4b3e22b

File tree

9 files changed

+279
-68
lines changed

9 files changed

+279
-68
lines changed

doc/api/v2/fluid/layers.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -499,3 +499,8 @@ swish
499499
------
500500
.. autofunction:: paddle.v2.fluid.layers.swish
501501
:noindex:
502+
503+
l2_normalize
504+
------------
505+
.. autofunction:: paddle.v2.fluid.layers.l2_normalize
506+
:noindex:

paddle/operators/clip_op.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,8 @@ class ClipOpMaker : public framework::OpProtoAndCheckerMaker {
5151
AddComment(R"DOC(
5252
Clip Operator.
5353
54-
The clip operator limits the value of given input within an interval. The interval is
55-
specified with arguments 'min' and 'max':
54+
The clip operator limits the value of given input within an interval. The
55+
interval is specified with arguments 'min' and 'max':
5656
5757
$$
5858
Out = \min(\max(X, min), max)

paddle/operators/elementwise_op.h

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ class ElementwiseOp : public framework::OperatorWithKernel {
2626
using Tensor = framework::Tensor;
2727
void InferShape(framework::InferShapeContext* ctx) const override {
2828
PADDLE_ENFORCE(ctx->HasInput("X"),
29-
"Input(X) of elementwise op should not be null");
29+
"Input(X) of elementwise op should not be null.");
3030
PADDLE_ENFORCE(ctx->HasInput("Y"),
31-
"Input(Y) of elementwise op should not be null");
31+
"Input(Y) of elementwise op should not be null.");
3232
PADDLE_ENFORCE(ctx->HasOutput("Out"),
3333
"Output(Out) of elementwise op should not be null.");
3434

@@ -45,32 +45,31 @@ class ElementwiseOpMaker : public framework::OpProtoAndCheckerMaker {
4545
public:
4646
ElementwiseOpMaker(OpProto* proto, OpAttrChecker* op_checker)
4747
: OpProtoAndCheckerMaker(proto, op_checker) {
48-
AddInput("X", "(Tensor) The first input tensor of elementwise op");
49-
AddInput("Y", "(Tensor) The second input tensor of elementwise op");
50-
AddOutput("Out", "The output of elementwise op");
48+
AddInput("X", "(Tensor), The first input tensor of elementwise op.");
49+
AddInput("Y", "(Tensor), The second input tensor of elementwise op.");
50+
AddOutput("Out", "The output of elementwise op.");
5151
AddAttr<int>("axis",
52-
"(int, default -1) The starting dimension index "
53-
"for broadcasting Y onto X")
52+
"(int, default -1). The start dimension index "
53+
"for broadcasting Y onto X.")
5454
.SetDefault(-1)
5555
.EqualGreaterThan(-1);
5656
comment_ = R"DOC(
5757
Limited Elementwise {name} Operator.
5858
5959
The equation is:
6060
61-
.. math::
62-
{equation}
61+
$${equation}$$
6362
64-
X is a tensor of any dimension and the dimensions of tensor Y must be smaller than
65-
or equal to the dimensions of X.
63+
$X$ is a tensor of any dimension and the dimensions of tensor $Y$ must be
64+
smaller than or equal to the dimensions of $X$.
6665
6766
There are two cases for this operator:
68-
1. The shape of Y is same with X;
69-
2. The shape of Y is a subset of X.
67+
1. The shape of $Y$ is same with $X$;
68+
2. The shape of $Y$ is a subset of $X$.
7069
7170
For case 2:
72-
Y will be broadcasted to match the shape of X and axis should be
73-
the starting dimension index for broadcasting Y onto X.
71+
$Y$ will be broadcasted to match the shape of $X$ and axis should be
72+
set to index of the start dimension to broadcast $Y$ onto $X$.
7473
7574
For example
7675
.. code-block:: python
@@ -81,7 +80,8 @@ For example
8180
shape(X) = (2, 3, 4, 5), shape(Y) = (3, 4), with axis=1
8281
shape(X) = (2, 3, 4, 5), shape(Y) = (2), with axis=0
8382
84-
Either of the inputs X and Y or none can carry the LoD (Level of Details) information. However, the output only shares the LoD information with input X.
83+
Either of the inputs $X$ and $Y$ or none can carry the LoD (Level of Details)
84+
information. However, the output only shares the LoD information with input $X$.
8585
8686
)DOC";
8787
AddComment(comment_);

paddle/operators/expand_op.cc

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -58,21 +58,21 @@ class ExpandOpMaker : public framework::OpProtoAndCheckerMaker {
5858
ExpandOpMaker(OpProto* proto, OpAttrChecker* op_checker)
5959
: OpProtoAndCheckerMaker(proto, op_checker) {
6060
AddInput("X",
61-
"(Tensor, default Tensor<float>) A tensor with rank in [1, 6]."
62-
"X is the input tensor to be expanded.");
61+
"(Tensor, default Tensor<float>). A tensor with rank in [1, 6]."
62+
"X is the input to be expanded.");
6363
AddOutput("Out",
64-
"(Tensor, default Tensor<float>) A tensor with rank in [1, 6]."
65-
"The rank of Output(Out) is same as Input(X) except that each "
66-
"dimension size of Output(Out) is equal to corresponding "
67-
"dimension size of Input(X) multiplying corresponding value of "
68-
"Attr(expand_times).");
64+
"(Tensor, default Tensor<float>). A tensor with rank in [1, 6]."
65+
"The rank of Output(Out) have the same with Input(X). "
66+
"After expanding, size of each dimension of Output(Out) is equal "
67+
"to size of the corresponding dimension of Input(X) multiplying "
68+
"the corresponding value given by Attr(expand_times).");
6969
AddAttr<std::vector<int>>("expand_times",
7070
"Expand times number for each dimension.");
7171
AddComment(R"DOC(
7272
Expand operator tiles the input by given times number. You should set times
7373
number for each dimension by providing attribute 'expand_times'. The rank of X
74-
should be in [1, 6]. Please notice that size of 'expand_times' must be same with
75-
X's rank. Following is a using case:
74+
should be in [1, 6]. Please note that size of 'expand_times' must be the same
75+
with X's rank. Following is a using case:
7676
7777
Input(X) is a 3-D tensor with shape [2, 3, 1]:
7878

python/paddle/trainer_config_helpers/evaluators.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,22 @@
1616
from default_decorators import *
1717

1818
__all__ = [
19-
"evaluator_base", "classification_error_evaluator", "auc_evaluator",
20-
"pnpair_evaluator", "precision_recall_evaluator", "ctc_error_evaluator",
21-
"chunk_evaluator", "sum_evaluator", "column_sum_evaluator",
22-
"value_printer_evaluator", "gradient_printer_evaluator",
23-
"maxid_printer_evaluator", "maxframe_printer_evaluator",
24-
"seqtext_printer_evaluator", "classification_error_printer_evaluator",
25-
"detection_map_evaluator"
19+
"evaluator_base",
20+
"classification_error_evaluator",
21+
"auc_evaluator",
22+
"pnpair_evaluator",
23+
"precision_recall_evaluator",
24+
"ctc_error_evaluator",
25+
"chunk_evaluator",
26+
"sum_evaluator",
27+
"column_sum_evaluator",
28+
"value_printer_evaluator",
29+
"gradient_printer_evaluator",
30+
"maxid_printer_evaluator",
31+
"maxframe_printer_evaluator",
32+
"seqtext_printer_evaluator",
33+
"classification_error_printer_evaluator",
34+
"detection_map_evaluator",
2635
]
2736

2837

python/paddle/v2/fluid/framework.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -116,8 +116,8 @@ def _debug_string_(proto, throw_on_error=True):
116116
"""
117117
error_fields = list()
118118
if not proto.IsInitialized(error_fields) and throw_on_error:
119-
raise ValueError("{0} are not initialized\nThe message is {1}".format(
120-
error_fields, proto))
119+
raise ValueError("{0} are not initialized.\nThe message is {1}:\n".
120+
format(error_fields, proto))
121121
return proto.__str__()
122122

123123

@@ -374,12 +374,13 @@ def __init__(self,
374374
>>> outputs={"Out": [var1]})
375375
376376
Args:
377-
block(Block): The block has the current operator
378-
desc(core.OpDesc): The protobuf description
377+
block(Block): The block has the current operator.
378+
desc(core.OpDesc): The protobuf description.
379379
type(str): The type of operator.
380380
inputs(dict): The input dictionary. Key is the input parameter name.
381381
Value is a list of variables.
382-
outputs(dict): The output dictionary. Has same format with inputs
382+
outputs(dict): The output dictionary which has the same format with
383+
inputs.
383384
attrs(dict): The attributes dictionary. Key is attribute name. Value
384385
is the attribute value. The attribute type should be as same as
385386
the type registered in C++
@@ -436,10 +437,11 @@ def find_name(var_list, name):
436437
for m in proto.outputs:
437438
need.add(m.name)
438439
if not given == need:
439-
raise ValueError(
440-
"Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
441-
% (type, ", ".join(str(e) for e in need), ", ".join(
442-
str(e) for e in given)))
440+
raise ValueError(("Incorrect setting for output(s) of "
441+
"operator \"%s\". Need: [%s] Given: [%s]") %
442+
(type, ", ".join(str(e)
443+
for e in need), ", ".join(
444+
str(e) for e in given)))
443445

444446
for out_proto in proto.outputs:
445447
out_args = outputs[out_proto.name]
@@ -818,9 +820,8 @@ def prune(self, targets):
818820
if isinstance(t, Variable):
819821
t = t.op
820822
else:
821-
raise ValueError(
822-
"All targets of prune() can only be Variable or Operator."
823-
)
823+
raise ValueError(("All targets of prune() can only be "
824+
"Variable or Operator."))
824825

825826
targets_idx.append([t.block.idx, t.idx])
826827
res = Program()

python/paddle/v2/fluid/layers/io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,9 @@ def data(name,
2828
**Data Layer**
2929
3030
This function takes in the input and based on whether data has
31-
to be returned back as a minibatch, it creates the global variable using
31+
to be returned back as a minibatch, it creates the global variable by using
3232
the helper functions. The global variables can be accessed by all the
33-
following operations and layers in the graph.
33+
following operators in the graph.
3434
3535
All the input variables of this function are passed in as local variables
3636
to the LayerHelper constructor.

0 commit comments

Comments
 (0)