Skip to content

Commit 8ecf5dd

Browse files
author
chengduo
authored
Merge pull request #11553 from chengduoZH/fix_doc_param_attr
Fix ParamAttr Doc
2 parents 3f8d9b0 + 491bb6a commit 8ecf5dd

File tree

1 file changed

+102
-4
lines changed

1 file changed

+102
-4
lines changed

python/paddle/fluid/param_attr.py

Lines changed: 102 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,43 @@
2222

2323

2424
class ParamAttr(object):
25+
"""
26+
Parameter attributes object. To fine-tuning network training process, user
27+
can set parameter's attributes to control training details. Such as learning rate,
28+
regularization, trainable, do_model_average and the method to initialize param.
29+
30+
31+
Args:
32+
name(str): The parameter's name. Default None.
33+
initializer(Initializer): The method to initial this parameter. Default None.
34+
learning_rate(float): The parameter's learning rate. The learning rate when
35+
optimize is :math:`global\_lr * parameter\_lr * scheduler\_factor`.
36+
Default 1.0.
37+
regularizer(WeightDecayRegularizer): Regularization factor. Default None.
38+
trainable(bool): Whether this parameter is trainable. Default True.
39+
gradient_clip(BaseGradientClipAttr): The method to clip this parameter's
40+
gradient. Default None.
41+
do_model_average(bool): Whether this parameter should do model average.
42+
Default False.
43+
44+
Examples:
45+
.. code-block:: python
46+
47+
w_param_attrs = fluid.ParamAttr(name="fc_weight",
48+
learning_rate=0.5,
49+
regularizer=fluid.L2Decay(1.0),
50+
trainable=True)
51+
y_predict = fluid.layers.fc(input=x, size=10, param_attr=w_param_attrs)
52+
"""
53+
2554
def __init__(self,
2655
name=None,
2756
initializer=None,
2857
learning_rate=1.0,
2958
regularizer=None,
3059
trainable=True,
3160
gradient_clip=None,
32-
do_model_average=None):
61+
do_model_average=False):
3362
self.name = name
3463
self.initializer = initializer
3564
self.learning_rate = learning_rate
@@ -39,6 +68,16 @@ def __init__(self,
3968
self.model_average = do_model_average
4069

4170
def set_default_initializer(self, initializer):
71+
"""
72+
Set the default initializer, the initializer should be Constant,
73+
Uniform, Normal, Xavier, MSRA.
74+
75+
Args:
76+
initializer(Initializer): the initializer to set.
77+
78+
Returns:
79+
None
80+
"""
4281
if initializer is None:
4382
if self.initializer is None:
4483
raise ValueError("ParamAttr.initializer is not set")
@@ -50,13 +89,45 @@ def set_default_initializer(self, initializer):
5089
self.initializer = initializer
5190

5291
def set_default_param_initializer(self):
92+
"""
93+
Set the default initializer for the parameter with Xavier.
94+
95+
Args:
96+
None.
97+
98+
Returns:
99+
None.
100+
"""
53101
self.set_default_initializer(Xavier())
54102

55103
def set_default_bias_initializer(self):
104+
"""
105+
Set the default initializer for the bias with Constant(0.0).
106+
107+
Args:
108+
None.
109+
110+
Returns:
111+
None.
112+
"""
56113
self.set_default_initializer(Constant(0.0))
57114

58115
@staticmethod
59116
def to_attr(arg):
117+
"""
118+
Create ParamAttr[s].
119+
120+
Args:
121+
arg: Arguments to initialize ParamAttr[s]. arg's type can be
122+
str, Initializer, float, WeightDecayRegularizer, BaseGradientClipAttr,
123+
bool, ParamAttr, or a list of above type.
124+
125+
Returns:
126+
ParamAttr[s]: ParamAttr[s] initialized with arg.
127+
128+
Raises:
129+
arg can not initialize a ParamAttr.
130+
"""
60131
if arg is None:
61132
return ParamAttr()
62133
elif isinstance(arg, list) or isinstance(arg, tuple):
@@ -75,6 +146,15 @@ def to_attr(arg):
75146
raise TypeError("{0} cast to ParamAttr".format(type(arg)))
76147

77148
def to_kwargs(self, with_initializer=False):
149+
"""
150+
Returns the attributes of this parameter.
151+
152+
Args:
153+
with_initializer(bool): Whether to add initializer attr.
154+
155+
Returns:
156+
Parameter attributes(map): The attributes of this parameter.
157+
"""
78158
kwargs = {
79159
'name': self.name,
80160
'optimize_attr': {
@@ -92,9 +172,27 @@ def to_kwargs(self, with_initializer=False):
92172

93173
class WeightNormParamAttr(ParamAttr):
94174
"""
95-
Used for weight normalization. Any field in ParamAttr can also be set here.
96-
Besides, an extra field dim can be set to indicate the dimension except
97-
which to normalize.
175+
Used for weight Norm. Weight Norm is a reparameterization of the weight vectors
176+
in a neural network that decouples the length of those weight vectors from
177+
their direction. Weight Norm has been implemented as discussed in this
178+
paper: `Weight Normalization: A Simple Reparameterization to Accelerate
179+
Training of Deep Neural Networks
180+
<https://arxiv.org/pdf/1602.07868.pdf>`_.
181+
182+
Args:
183+
dim(list): The parameter's name. Default None.
184+
kwargs: Any field in ParamAttr. Default None.
185+
186+
Examples:
187+
.. code-block:: python
188+
189+
data = fluid.layers.data(name="data", shape=[3, 32, 32], dtype="float32")
190+
fc = fluid.layers.fc(input=data,
191+
size=1000,
192+
param_attr=WeightNormParamAttr(
193+
dim=None,
194+
name='weight_norm_param'))
195+
98196
"""
99197
# List to record the parameters reparameterized by weight normalization.
100198
# If these parameters are treated as Variable rather than Parameter,

0 commit comments

Comments
 (0)