@@ -120,37 +120,55 @@ def _debug_string_(proto, throw_on_error=True):
120
120
121
121
class Variable (object ):
122
122
"""
123
- Python variable. Every input and output of an operator is a variable. Every
124
- variable belongs to a block. The variable has a name and two variables in
125
- different blocks could have the same name.
123
+ In Fluid, every input and output of an operator is a variable. In most
124
+ cases, variables are used for holding different kinds of data or training
125
+ labels. A variable belongs to a block. All variable has its own name and
126
+ two variables in different blocks could have the same name.
126
127
127
- There are many kinds of variables. Please reference the framework.proto for
128
- details.
128
+ There are many kinds of variables. Each kind of them has its own attributes
129
+ and usages. Please reference the framework.proto for details.
130
+
131
+ Most of a Variable's member variables can be setted to be None. It mean
132
+ it is not avaiable or will be specified later.
129
133
130
134
Notes: The constructor of Variable should not be invoked directly. Please
131
135
use `Block.create_var` to create a variable.
132
136
133
- >>> cur_program = Program()
134
- >>> cur_block = cur_program.current_block()
135
- >>> new_variable = cur_block.create_var(
136
- >>> name="X", shape=[-1, 23, 48], dtype='float32')
137
+ .. code-block:: python
138
+ cur_program = Program()
139
+ cur_block = cur_program.current_block()
140
+ new_variable = cur_block.create_var(
141
+ name="X", shape=[-1, 23, 48], dtype='float32')
137
142
138
- Args:
139
- block(Block): The associated block. It will be passed by
140
- `Block.create_var` automatically.
143
+ Member variables:
144
+ block(Block): The block that the variable belongs to.
141
145
type(core.VarDesc.VarType): Variable type. Please reference the
142
146
framework.proto for details.
143
- shape(tuple|list|None): The shape of variable. -1 means the batch size.
147
+ name(str|None): The name of the variable. If setted None, it will be
148
+ generated automatically.
149
+ Default: None
150
+ shape(tuple|list|None): The shape of the variable. -1 means the batch size.
144
151
Some kinds of variable do not contain shape, just set it to None.
145
- dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
146
- lod_level(int): The level of lod tensor. 0 means it is not a time
152
+ Default: None
153
+ dtype(np.dtype|core.VarDesc.VarType|str|None): The data type of variable.
154
+ Default: None
155
+ lod_level(int|None): The level of lod tensor. 0 means it is not a time
147
156
series data.
148
- capacity(int): The capacity of Channel variable. Ignored
157
+ Default: None
158
+ capacity(int|None): The capacity of Channel variable. Ignored
149
159
for other types.
150
- persistable(bool): True if the variable should be saved as check point.
151
- Defaults to False.
152
- stop_gradient(bool): True if the variable will stop to calculate
153
- gradients when backward. Defaults to False.
160
+ Default: None
161
+ persistable(bool|None): True if the variable is persistable. A persistable
162
+ variable will not be deleted after an iteration ending.
163
+ Defaults: None.
164
+ error_clip(BaseErrorClipAttr|None): The error clip attributes of the
165
+ corresponding gradient variable.
166
+ Default: None
167
+ stop_gradient(bool): True if the variable will stop to calculate its
168
+ gradients when backward.
169
+ Default: False.
170
+ is_data(bool): True is the variable is an input data.
171
+ Default: False
154
172
"""
155
173
156
174
def __init__ (self ,
@@ -1306,6 +1324,30 @@ def list_vars(self):
1306
1324
1307
1325
1308
1326
class Parameter (Variable ):
1327
+ """
1328
+ Parameter is derived from Variable. A parameter is a persistable
1329
+ Variable, and will be updated by optimizers after each iteration.
1330
+ The training of a neural network is essentially the updating of
1331
+ its parameters.
1332
+
1333
+ Relative to a general Vriable, a Parameter has several its own
1334
+ member variables:
1335
+
1336
+ trainable(bool): True if the parameter need to be updated after
1337
+ iterations.
1338
+ optimize_attr(map): Parameter attributes related with optimizing.
1339
+ Currently, it only contains 'learning_rate'.
1340
+ Default: {'learning_rate': 1.0}
1341
+ regularizer(WeightDecayRegularizer): The Regularizer which will
1342
+ be applied on the parameter.
1343
+ Default: None
1344
+ gradient_clip_attr(BaseGradientClipAttr): The gradint clip strategy
1345
+ which will be applied on the parameter.
1346
+ Default: None
1347
+ do_model_average(bool): True if the model average strategy will
1348
+ be applied on this parameter.
1349
+ """
1350
+
1309
1351
def __init__ (self , block , shape , dtype , ** kwargs ):
1310
1352
if shape is None or dtype is None :
1311
1353
raise ValueError ("Parameter must set shape and dtype" )
0 commit comments