Skip to content

Commit 86ca31a

Browse files
CaiZixinswtkiwi
andauthored
English API Docs Optimization Part 1 (#24536)
* test=develop, test=document_fix * test=develop, test=document_fix Co-authored-by: swtkiwi <[email protected]>
1 parent 2d0f849 commit 86ca31a

22 files changed

+622
-1
lines changed

python/paddle/fluid/backward.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1196,6 +1196,8 @@ def append_backward(loss,
11961196
callbacks=None,
11971197
checkpoints=None):
11981198
"""
1199+
:api_attr: Static Graph
1200+
11991201
This function appends backward part to main_program.
12001202
12011203
A complete neural network training is made up of forward and backward
@@ -1724,6 +1726,8 @@ def calc_gradient(targets, inputs, target_gradients=None, no_grad_set=None):
17241726

17251727
def gradients(targets, inputs, target_gradients=None, no_grad_set=None):
17261728
"""
1729+
:api_attr: Static Graph
1730+
17271731
Backpropagate the gradients of targets to inputs.
17281732
17291733
Args:

python/paddle/fluid/clip.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,10 @@ def _create_operators(self, param, grad):
158158

159159
class GradientClipByValue(GradientClipBase):
160160
"""
161+
:alias_main: paddle.nn.GradientClipByValue
162+
:alias: paddle.nn.GradientClipByValue,paddle.nn.clip.GradientClipByValue
163+
:old_api: paddle.fluid.clip.GradientClipByValue
164+
161165
Limit the value of multi-dimensional Tensor :math:`X` to the range [min, max].
162166
163167
- Any values less than min are set to ``min``.
@@ -296,6 +300,10 @@ def _create_operators(self, param, grad):
296300

297301
class GradientClipByNorm(GradientClipBase):
298302
"""
303+
:alias_main: paddle.nn.GradientClipByNorm
304+
:alias: paddle.nn.GradientClipByNorm,paddle.nn.clip.GradientClipByNorm
305+
:old_api: paddle.fluid.clip.GradientClipByNorm
306+
299307
Limit the l2 norm of multi-dimensional Tensor :math:`X` to ``clip_norm`` .
300308
301309
- If the l2 norm of :math:`X` is greater than ``clip_norm`` , :math:`X` will be compressed by a ratio.
@@ -447,6 +455,10 @@ def _create_operators(self, param, grad):
447455

448456
class GradientClipByGlobalNorm(GradientClipBase):
449457
"""
458+
:alias_main: paddle.nn.GradientClipByGlobalNorm
459+
:alias: paddle.nn.GradientClipByGlobalNorm,paddle.nn.clip.GradientClipByGlobalNorm
460+
:old_api: paddle.fluid.clip.GradientClipByGlobalNorm
461+
450462
Given a list of Tensor :math:`t\_list` , calculate the global norm for the elements of all tensors in
451463
:math:`t\_list` , and limit it to ``clip_norm`` .
452464
@@ -691,6 +703,8 @@ def _create_operators(self, param, grad):
691703
@framework.dygraph_not_support
692704
def set_gradient_clip(clip, param_list=None, program=None):
693705
"""
706+
:api_attr: Static Graph
707+
694708
Warning:
695709
696710
This API must be used after building network, and before ``minimize`` ,

python/paddle/fluid/compiler.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,8 @@ def _has_optimizer_in_control_flow(program):
8686

8787
class CompiledProgram(object):
8888
"""
89+
:api_attr: Static Graph
90+
8991
The CompiledProgram is used to transform a program or graph for
9092
various optimizations according to the configuration of build_strategy,
9193
for example, the operators' fusion in the computation graph, memory

python/paddle/fluid/data.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,11 @@
2424

2525
def data(name, shape, dtype='float32', lod_level=0):
2626
"""
27+
:api_attr: Static Graph
28+
:alias_main: paddle.nn.data
29+
:alias: paddle.nn.data,paddle.nn.input.data
30+
:old_api: paddle.fluid.data
31+
2732
**Data Layer**
2833
2934
This function creates a variable on the global block. The global variable

python/paddle/fluid/data_feed_desc.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020

2121
class DataFeedDesc(object):
2222
"""
23+
:api_attr: Static Graph
24+
2325
Datafeed descriptor, describing input training data format. This class is
2426
currently only used for AsyncExecutor (See comments for class AsyncExecutor
2527
for a brief introduction)

python/paddle/fluid/data_feeder.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,8 @@ def __call__(self):
211211

212212
class DataFeeder(object):
213213
"""
214+
:api_attr: Static Graph
215+
214216
DataFeeder converts the data that returned by a reader into a data
215217
structure that can feed into Executor. The reader is usually a
216218
python generator that returns a list of mini-batch data entries.

python/paddle/fluid/dataset.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -728,6 +728,8 @@ def global_shuffle(self, fleet=None, thread_num=12):
728728

729729
def release_memory(self):
730730
"""
731+
:api_attr: Static Graph
732+
731733
Release InMemoryDataset memory data, when data will not be used again.
732734
733735
Examples:

python/paddle/fluid/dygraph/base.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,10 @@ def enabled():
111111

112112
def enable_dygraph(place=None):
113113
"""
114+
:alias_main: paddle.enable_dygraph
115+
:alias: paddle.enable_dygraph,paddle.enable_imperative.enable_dygraph
116+
:old_api: paddle.fluid.dygraph.base.enable_dygraph
117+
114118
This function enables dynamic graph mode.
115119
116120
Parameters:
@@ -141,6 +145,10 @@ def enable_dygraph(place=None):
141145

142146
def disable_dygraph():
143147
"""
148+
:alias_main: paddle.disable_dygraph
149+
:alias: paddle.disable_dygraph,paddle.disable_imperative.disable_dygraph
150+
:old_api: paddle.fluid.dygraph.base.disable_dygraph
151+
144152
This function disables dynamic graph mode.
145153
146154
return:
@@ -178,6 +186,8 @@ def _switch_tracer_mode_guard_(is_train=True):
178186

179187
def no_grad(func=None):
180188
"""
189+
:api_attr: imperative
190+
181191
Create a context which disables dygraph gradient calculation.
182192
In this mode, the result of every computation will have `stop_gradient=True`.
183193
@@ -236,6 +246,8 @@ def __impl__(func, *args, **kwargs):
236246
@signature_safe_contextmanager
237247
def guard(place=None):
238248
"""
249+
:api_attr: imperative
250+
239251
This context will create a dygraph context for dygraph to run, using python ``with`` statement.
240252
241253
Parameters:
@@ -520,6 +532,8 @@ def check_in_out(in_out_list, name):
520532
@framework.dygraph_only
521533
def to_variable(value, name=None, zero_copy=None):
522534
"""
535+
:api_attr: imperative
536+
523537
The API will create a ``Variable`` or ``ComplexVariable`` object from
524538
numpy\.ndarray, Variable or ComplexVariable object.
525539

python/paddle/fluid/dygraph/checkpoint.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@
3232
@dygraph_only
3333
def save_dygraph(state_dict, model_path):
3434
'''
35+
:api_attr: imperative
36+
3537
Save Layer's state_dict to disk. This will generate a file with suffix ".pdparams"
3638
3739
The state_dict is get from Layers.state_dict function
@@ -95,6 +97,8 @@ def save_dygraph(state_dict, model_path):
9597
@dygraph_only
9698
def load_dygraph(model_path, keep_name_table=False):
9799
'''
100+
:api_attr: imperative
101+
98102
Load parameter state_dict from disk.
99103
100104
Args:

python/paddle/fluid/dygraph/jit.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -203,6 +203,8 @@ def _trace(layer,
203203

204204
class TracedLayer(object):
205205
"""
206+
:api_attr: imperative
207+
206208
TracedLayer is used to convert a forward dygraph model to a static
207209
graph model. This is mainly used to save the dygraph model for online
208210
inference using C++. Besides, users can also do inference in Python

0 commit comments

Comments
 (0)