Skip to content

Commit 8ea54e2

Browse files
committed
Add docs
1 parent 0329ee7 commit 8ea54e2

File tree

3 files changed

+646
-113
lines changed

3 files changed

+646
-113
lines changed

python/paddle/fluid/average.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,25 @@ def _is_number_or_matrix_(var):
3636

3737

3838
class WeightedAverage(object):
39+
"""
40+
Calculate weighted average.
41+
42+
The average calculating is accomplished via Python totally.
43+
They do not change Paddle's Program, nor do anything to
44+
modify NN model's configuration. They are completely
45+
wrappers of Python functions.
46+
47+
Examples:
48+
.. code-block:: python
49+
avg = fluid.average.WeightedAverage()
50+
avg.add(value=2.0, weight=1)
51+
avg.add(value=4.0, weight=2)
52+
avg.eval()
53+
54+
# The result is 3.333333333.
55+
# For (2.0 * 1 + 4.0 * 2) / (1 + 2) = 3.333333333
56+
"""
57+
3958
def __init__(self):
4059
warnings.warn(
4160
"The %s is deprecated, please use fluid.metrics.Accuracy instead." %

python/paddle/fluid/backward.py

Lines changed: 59 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -147,15 +147,15 @@ def _addup_repetitive_outputs_(op_descs):
147147
else:
148148
if len(renamed_vars[var_name]) == 1:
149149
new_name = var_name + "@RENAME@" + \
150-
str(var_rename_count[var_name])
150+
str(var_rename_count[var_name])
151151
var_rename_count[var_name] += 1
152152
# rename original var_name
153153
renamed_vars[var_name][0] = new_name
154154
_rename_arg_(op_descs, var_name, new_name, 0, idx)
155155
_rename_arg_(pending_sum_ops, var_name, new_name)
156156

157157
new_name = var_name + "@RENAME@" + \
158-
str(var_rename_count[var_name])
158+
str(var_rename_count[var_name])
159159
var_rename_count[var_name] += 1
160160
op_desc.rename_output(var_name, new_name)
161161
renamed_vars[var_name].append(new_name)
@@ -434,18 +434,65 @@ def _get_stop_gradients_(program):
434434
def append_backward(loss, parameter_list=None, no_grad_set=None,
435435
callbacks=None):
436436
"""
437-
Append backward part to main_program
437+
Append backward part to main_program.
438438
439-
Args:
440-
loss(Variable): The variable generated by cost function.
441-
parameter_list(list[string]): Parameters that need to be updated by
442-
optimizer. If None, it means all parameters need to be updated.
443-
no_grad_set(set): Variables that have no gradients in Block 0.
444-
All variables with `step_gradient=True` from all blocks will be
445-
automatically added.
439+
A complete neural network training is made up of forward and backward
440+
propagation. However, when we configure a network, we only need to
441+
specify its forwrd part. The backward part is generated automatically
442+
according to the forward part by this function.
446443
447-
Return:
448-
(list[(Variable,Variable)]): list of (parameter, gradient) pair.
444+
In most cases, users do not need to invoke this function manually. It
445+
will be automatically invoked by the optimizer's `minimize` function.
446+
447+
Args:
448+
loss(Variable): The loss variable of the network.
449+
parameter_list(list[string]|None): Names of parameters that need
450+
to be updated by optimizers.
451+
If it is None, all parameters
452+
will be updated.
453+
Default: None
454+
no_grad_set(set|None): Variables in the Block 0 whose gradients
455+
should be ignored. All variables with
456+
`step_gradient=True` from all blocks will
457+
be automatically added into this set.
458+
Default: None
459+
callbacks(list[callable object]|None): The callbacks are used for
460+
doing some custom jobs during
461+
backward part building. All
462+
callable objects in it will
463+
be invoked once each time a
464+
new gradient operator is added
465+
into the program. The callable
466+
object must has two input
467+
parameters: 'block' and 'context'.
468+
The 'block' is the block which
469+
the new gradient operator will
470+
be added to. The 'context' is a
471+
map, whose keys are gradient
472+
variable names and values are
473+
corresponding original variables.
474+
In addition to this, the 'context'
475+
has another special key-value pair:
476+
the key is string '__current_op_desc__'
477+
and the value is the op_desc of the
478+
gradient operator who has just
479+
triggered the callable object.
480+
481+
Returns:
482+
list[(Variable,Variable)]: Pairs of parameter and its
483+
corresponding gradients. The key is the parameter and the
484+
value is gradient variable.
485+
486+
Raises:
487+
AssertionError: If `loss` is not an instance of Variable.
488+
489+
Examples:
490+
.. code-block:: python
491+
492+
# network configuration code
493+
# ...
494+
avg_loss = fluid.layers.mean(loss)
495+
param_grad_list = fluid.backward.append_backward(loss=avg_loss)
449496
"""
450497
assert isinstance(loss, framework.Variable)
451498

0 commit comments

Comments
 (0)