Skip to content

Commit 6046ab5

Browse files
committed
Add doc reference to Variable and Parameter
1 parent 0329ee7 commit 6046ab5

File tree

1 file changed

+62
-20
lines changed

1 file changed

+62
-20
lines changed

python/paddle/fluid/framework.py

Lines changed: 62 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -120,37 +120,55 @@ def _debug_string_(proto, throw_on_error=True):
120120

121121
class Variable(object):
122122
"""
123-
Python variable. Every input and output of an operator is a variable. Every
124-
variable belongs to a block. The variable has a name and two variables in
125-
different blocks could have the same name.
123+
In Fluid, every input and output of an operator is a variable. In most
124+
cases, variables are used for holding different kinds of data or training
125+
labels. A variable belongs to a block. All variable has its own name and
126+
two variables in different blocks could have the same name.
126127
127-
There are many kinds of variables. Please reference the framework.proto for
128-
details.
128+
There are many kinds of variables. Each kind of them has its own attributes
129+
and usages. Please reference the framework.proto for details.
130+
131+
Most of a Variable's member variables can be setted to be None. It mean
132+
it is not avaiable or will be specified later.
129133
130134
Notes: The constructor of Variable should not be invoked directly. Please
131135
use `Block.create_var` to create a variable.
132136
133-
>>> cur_program = Program()
134-
>>> cur_block = cur_program.current_block()
135-
>>> new_variable = cur_block.create_var(
136-
>>> name="X", shape=[-1, 23, 48], dtype='float32')
137+
.. code-block:: python
138+
cur_program = Program()
139+
cur_block = cur_program.current_block()
140+
new_variable = cur_block.create_var(
141+
name="X", shape=[-1, 23, 48], dtype='float32')
137142
138-
Args:
139-
block(Block): The associated block. It will be passed by
140-
`Block.create_var` automatically.
143+
Member variables:
144+
block(Block): The block that the variable belongs to.
141145
type(core.VarDesc.VarType): Variable type. Please reference the
142146
framework.proto for details.
143-
shape(tuple|list|None): The shape of variable. -1 means the batch size.
147+
name(str|None): The name of the variable. If setted None, it will be
148+
generated automatically.
149+
Default: None
150+
shape(tuple|list|None): The shape of the variable. -1 means the batch size.
144151
Some kinds of variable do not contain shape, just set it to None.
145-
dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
146-
lod_level(int): The level of lod tensor. 0 means it is not a time
152+
Default: None
153+
dtype(np.dtype|core.VarDesc.VarType|str|None): The data type of variable.
154+
Default: None
155+
lod_level(int|None): The level of lod tensor. 0 means it is not a time
147156
series data.
148-
capacity(int): The capacity of Channel variable. Ignored
157+
Default: None
158+
capacity(int|None): The capacity of Channel variable. Ignored
149159
for other types.
150-
persistable(bool): True if the variable should be saved as check point.
151-
Defaults to False.
152-
stop_gradient(bool): True if the variable will stop to calculate
153-
gradients when backward. Defaults to False.
160+
Default: None
161+
persistable(bool|None): True if the variable is persistable. A persistable
162+
variable will not be deleted after an iteration ending.
163+
Defaults: None.
164+
error_clip(BaseErrorClipAttr|None): The error clip attributes of the
165+
corresponding gradient variable.
166+
Default: None
167+
stop_gradient(bool): True if the variable will stop to calculate its
168+
gradients when backward.
169+
Default: False.
170+
is_data(bool): True is the variable is an input data.
171+
Default: False
154172
"""
155173

156174
def __init__(self,
@@ -1270,6 +1288,30 @@ def list_vars(self):
12701288

12711289

12721290
class Parameter(Variable):
1291+
"""
1292+
Parameter is derived from Variable. A parameter is a persistable
1293+
Variable, and will be updated by optimizers after each iteration.
1294+
The training of a neural network is essentially the updating of
1295+
its parameters.
1296+
1297+
Relative to a general Vriable, a Parameter has several its own
1298+
member variables:
1299+
1300+
trainable(bool): True if the parameter need to be updated after
1301+
iterations.
1302+
optimize_attr(map): Parameter attributes related with optimizing.
1303+
Currently, it only contains 'learning_rate'.
1304+
Default: {'learning_rate': 1.0}
1305+
regularizer(WeightDecayRegularizer): The Regularizer which will
1306+
be applied on the parameter.
1307+
Default: None
1308+
gradient_clip_attr(BaseGradientClipAttr): The gradint clip strategy
1309+
which will be applied on the parameter.
1310+
Default: None
1311+
do_model_average(bool): True if the model average strategy will
1312+
be applied on this parameter.
1313+
"""
1314+
12731315
def __init__(self, block, shape, dtype, **kwargs):
12741316
if shape is None or dtype is None:
12751317
raise ValueError("Parameter must set shape and dtype")

0 commit comments

Comments
 (0)