Skip to content

Commit cfb4617

Browse files
committed
add Doc param attr
1 parent 5ea039b commit cfb4617

File tree

1 file changed

+84
-4
lines changed

1 file changed

+84
-4
lines changed

python/paddle/fluid/param_attr.py

Lines changed: 84 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,43 @@
2222

2323

2424
class ParamAttr(object):
25+
"""
26+
Parameter attributes object. To fine-tuning network training process, user
27+
can set parameter's attributes to control training details. Such as learning rate,
28+
regularization, trainable, do_model_average and the method to initialize param.
29+
30+
31+
Args:
32+
name(str): The parameter's name. Default None.
33+
initializer(Initializer): The method to initial this parameter. Default None.
34+
learning_rate(float): The parameter's learning rate. The learning rate when
35+
optimize is :math:`global\_lr * parameter\_lr * scheduler\_factor`.
36+
Default 1.0.
37+
regularizer(WeightDecayRegularizer): Regularization factor. Default None.
38+
trainable(bool): Whether this parameter is trainable. Default True.
39+
gradient_clip(BaseGradientClipAttr): The method to clip this parameter's
40+
gradient. Default None.
41+
do_model_average(bool): Whether this parameter should do model average.
42+
Default False.
43+
44+
Examples:
45+
.. code-block:: python
46+
47+
w_param_attrs = fluid.ParamAttr(name="fc_weight",
48+
learning_rate=0.5,
49+
regularizer=fluid.L2Decay(1.0),
50+
trainable=True)
51+
y_predict = fluid.layers.fc(input=x, size=10, param_attr=w_param_attrs)
52+
"""
53+
2554
def __init__(self,
2655
name=None,
2756
initializer=None,
2857
learning_rate=1.0,
2958
regularizer=None,
3059
trainable=True,
3160
gradient_clip=None,
32-
do_model_average=None):
61+
do_model_average=False):
3362
self.name = name
3463
self.initializer = initializer
3564
self.learning_rate = learning_rate
@@ -39,6 +68,10 @@ def __init__(self,
3968
self.model_average = do_model_average
4069

4170
def set_default_initializer(self, initializer):
71+
"""
72+
Set the default initializer, the initializer should be Constant,
73+
Uniform, Normal, Xavier, MSRA.
74+
"""
4275
if initializer is None:
4376
if self.initializer is None:
4477
raise ValueError("ParamAttr.initializer is not set")
@@ -50,13 +83,33 @@ def set_default_initializer(self, initializer):
5083
self.initializer = initializer
5184

5285
def set_default_param_initializer(self):
86+
"""
87+
Set the default initializer for the parameter with Xavier.
88+
"""
5389
self.set_default_initializer(Xavier())
5490

5591
def set_default_bias_initializer(self):
92+
"""
93+
Set the default initializer for the bias with Constant(0.0).
94+
"""
5695
self.set_default_initializer(Constant(0.0))
5796

5897
@staticmethod
5998
def to_attr(arg):
99+
"""
100+
Create ParamAttr[s].
101+
102+
Args:
103+
arg: Arguments to initialize ParamAttr[s]. arg's type can be
104+
str, Initializer, float, WeightDecayRegularizer, BaseGradientClipAttr,
105+
bool, ParamAttr, or a list of above type.
106+
107+
Returns:
108+
ParamAttr[s]: ParamAttr[s] initialized with arg.
109+
110+
Raises:
111+
arg can not initialize a ParamAttr.
112+
"""
60113
if arg is None:
61114
return ParamAttr()
62115
elif isinstance(arg, list) or isinstance(arg, tuple):
@@ -75,6 +128,15 @@ def to_attr(arg):
75128
raise TypeError("{0} cast to ParamAttr".format(type(arg)))
76129

77130
def to_kwargs(self, with_initializer=False):
131+
"""
132+
Returns the attributes of this parameter.
133+
134+
Args:
135+
with_initializer(bool): Whether to add initializer attr.
136+
137+
Returns:
138+
Parameter attributes(map): The attributes of this parameter.
139+
"""
78140
kwargs = {
79141
'name': self.name,
80142
'optimize_attr': {
@@ -92,9 +154,27 @@ def to_kwargs(self, with_initializer=False):
92154

93155
class WeightNormParamAttr(ParamAttr):
94156
"""
95-
Used for weight normalization. Any field in ParamAttr can also be set here.
96-
Besides, an extra field dim can be set to indicate the dimension except
97-
which to normalize.
157+
Used for weight Norm. Weight Norm is a reparameterization of the weight vectors
158+
in a neural network that decouples the length of those weight vectors from
159+
their direction. Weight Norm has been implemented as discussed in this
160+
paper: `Weight Normalization: A Simple Reparameterization to Accelerate
161+
Training of Deep Neural Networks
162+
<https://arxiv.org/pdf/1602.07868.pdf>`_.
163+
164+
Args:
165+
dim(list): The parameter's name. Default None.
166+
kwargs: Any field in ParamAttr. Default None.
167+
168+
Examples:
169+
.. code-block:: python
170+
171+
data = fluid.layers.data(name="data", shape=[3, 32, 32], dtype="float32")
172+
fc = fluid.layers.fc(input=data,
173+
size=1000,
174+
param_attr=WeightNormParamAttr(
175+
dim=None,
176+
name='weight_norm_param'))
177+
98178
"""
99179
# List to record the parameters reparameterized by weight normalization.
100180
# If these parameters are treated as Variable rather than Parameter,

0 commit comments

Comments
 (0)