Skip to content

Commit 94a0e98

Browse files
[CodeStyle][Xdoctest][6,13,81,84,142,144,146] Fix example code(paddle.Tensor.(angle,flatten),paddle.incubate.nn.(FusedFeedForward,FusedTransformerEncoderLayer),paddle.nn.(Transformer,TransformerDecoderLayer,TransformerEncoderLayer)) (PaddlePaddle#76563)
--------- Co-authored-by: Nyakku Shigure <[email protected]>
1 parent 242fc4e commit 94a0e98

File tree

8 files changed

+57
-42
lines changed

8 files changed

+57
-42
lines changed

python/paddle/incubate/nn/layer/fused_transformer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ class FusedFeedForward(Layer):
577577
this property. For more information, please refer to :ref:`api_guide_Name`.
578578
579579
Examples:
580-
.. code-block:: python
580+
.. code-block:: pycon
581581
582582
>>> # doctest: +REQUIRES(env:GPU)
583583
>>> import paddle
@@ -588,7 +588,7 @@ class FusedFeedForward(Layer):
588588
>>> x = paddle.rand((1, 8, 8))
589589
>>> out = fused_feedforward_layer(x)
590590
>>> print(out.shape)
591-
[1, 8, 8]
591+
paddle.Size([1, 8, 8])
592592
"""
593593

594594
name: str | None
@@ -789,7 +789,7 @@ class FusedTransformerEncoderLayer(Layer):
789789
790790
791791
Examples:
792-
.. code-block:: python
792+
.. code-block:: pycon
793793
794794
>>> # doctest: +REQUIRES(env:GPU)
795795
>>> import paddle
@@ -803,7 +803,7 @@ class FusedTransformerEncoderLayer(Layer):
803803
>>> encoder_layer = FusedTransformerEncoderLayer(128, 2, 512)
804804
>>> enc_output = encoder_layer(enc_input, attn_mask)
805805
>>> print(enc_output.shape)
806-
[2, 4, 128]
806+
paddle.Size([2, 4, 128])
807807
808808
"""
809809

python/paddle/jit/dy2static/transformers/base.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,8 @@ class ForLoopTuplePreTransformer(BaseTransformer):
102102
103103
will be changed into :
104104
105-
>>> UUID_iterator = _jst.Indexable(B) # make iterator-only to indexable list.
105+
>>> # make iterator-only to indexable list.
106+
>>> UUID_iterator = _jst.Indexable(B)
106107
>>> for UUID_target in UUID_iterator:
107108
>>> A = _jst.Unpack(UUID_target, structure)
108109
>>> C

python/paddle/jit/dy2static/utils.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -476,16 +476,18 @@ def wrap_as_closure(tree: gast.AST, closure_vars: list[str]) -> gast.AST:
476476
477477
Before:
478478
479-
>>> def fn(x):
480-
... ...
479+
>>> def fn(x): ...
481480
482481
After:
483482
484483
>>> def create_fn():
485484
... closure_var_1 = None
486-
... def fn(x):
487-
... ...
485+
...
486+
... def fn(x): ...
487+
...
488488
... return fn
489+
...
490+
...
489491
... fn = create_fn()
490492
"""
491493

python/paddle/jit/sot/opcode_translator/executor/dispatcher.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -200,9 +200,7 @@ class Dispatcher:
200200
201201
Examples:
202202
203-
>>> def builtin_add(a: int, b: int) -> int:
204-
... ...
205-
...
203+
>>> def builtin_add(a: int, b: int) -> int: ...
206204
>>> Dispatcher.register(builtin_add, ("int", "int"), lambda a, b: a + b)
207205
>>> handler = Dispatcher.dispatch(builtin_add, 1, 2)
208206
>>> handler(1, 2)
@@ -250,13 +248,10 @@ def register_decorator(cls, fn: Callable[..., Any]):
250248
fn: The function to be registered.
251249
252250
Examples:
253-
>>> def builtin_add(a: int, b: int) -> int:
254-
... ...
255-
...
251+
>>> def builtin_add(a: int, b: int) -> int: ...
256252
>>> @Dispatcher.register_decorator(builtin_add)
257253
... def builtin_add_dispatcher(a: int, b: int) -> int:
258254
... return a + b
259-
...
260255
>>> handler = Dispatcher.dispatch(builtin_add, 1, 2)
261256
>>> handler(1, 2)
262257
3

python/paddle/jit/sot/opcode_translator/executor/tracker.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,10 @@ class DanglingTracker(Tracker):
158158
159159
Examples:
160160
>>> import operator
161-
>>> from sot.opcode_translator.executor.variables import BuiltinVariable, ConstantVariable
161+
>>> from sot.opcode_translator.executor.variables import (
162+
... BuiltinVariable,
163+
... ConstantVariable,
164+
... )
162165
>>> a = ConstantVariable.wrap_literal(1, None)
163166
>>> b = ConstantVariable.wrap_literal(2, None)
164167
>>> c = BuiltinVariable(operator.add, None, DanglingTracker())(a, b)

python/paddle/nn/layer/transformer.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -608,7 +608,7 @@ class TransformerEncoderLayer(Layer):
608608
609609
Examples:
610610
611-
.. code-block:: python
611+
.. code-block:: pycon
612612
613613
>>> import paddle
614614
>>> from paddle.nn import TransformerEncoderLayer
@@ -620,7 +620,7 @@ class TransformerEncoderLayer(Layer):
620620
>>> encoder_layer = TransformerEncoderLayer(128, 2, 512)
621621
>>> enc_output = encoder_layer(enc_input, attn_mask)
622622
>>> print(enc_output.shape)
623-
[2, 4, 128]
623+
paddle.Size([2, 4, 128])
624624
"""
625625

626626
activation: Layer
@@ -972,7 +972,7 @@ class TransformerDecoderLayer(Layer):
972972
973973
Examples:
974974
975-
.. code-block:: python
975+
.. code-block:: pycon
976976
977977
>>> import paddle
978978
>>> from paddle.nn import TransformerDecoderLayer
@@ -986,12 +986,11 @@ class TransformerDecoderLayer(Layer):
986986
>>> # cross attention mask: [batch_size, n_head, tgt_len, src_len]
987987
>>> cross_attn_mask = paddle.rand((2, 2, 4, 6))
988988
>>> decoder_layer = TransformerDecoderLayer(128, 2, 512)
989-
>>> output = decoder_layer(dec_input,
990-
... enc_output,
991-
... self_attn_mask,
992-
... cross_attn_mask)
989+
>>> output = decoder_layer(
990+
... dec_input, enc_output, self_attn_mask, cross_attn_mask
991+
... )
993992
>>> print(output.shape)
994-
[2, 4, 128]
993+
paddle.Size([2, 4, 128])
995994
"""
996995

997996
normalize_before: bool
@@ -1498,7 +1497,7 @@ class Transformer(Layer):
14981497
14991498
Examples:
15001499
1501-
.. code-block:: python
1500+
.. code-block:: pycon
15021501
15031502
>>> import paddle
15041503
>>> from paddle.nn import Transformer
@@ -1514,13 +1513,15 @@ class Transformer(Layer):
15141513
>>> # memory_mask: [batch_size, n_head, tgt_len, src_len]
15151514
>>> cross_attn_mask = paddle.rand((2, 2, 6, 4))
15161515
>>> transformer = Transformer(128, 2, 4, 4, 512)
1517-
>>> output = transformer(enc_input,
1518-
... dec_input,
1519-
... enc_self_attn_mask,
1520-
... dec_self_attn_mask,
1521-
... cross_attn_mask)
1516+
>>> output = transformer(
1517+
... enc_input,
1518+
... dec_input,
1519+
... enc_self_attn_mask,
1520+
... dec_self_attn_mask,
1521+
... cross_attn_mask,
1522+
... )
15221523
>>> print(output.shape)
1523-
[2, 6, 128]
1524+
paddle.Size([2, 6, 128])
15241525
"""
15251526

15261527
encoder: Layer

python/paddle/tensor/manipulation.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2035,18 +2035,23 @@ def flatten(
20352035
20362036
Examples:
20372037
2038-
.. code-block:: python
2038+
.. code-block:: pycon
20392039
20402040
>>> import paddle
20412041
2042-
>>> image_shape=(2, 3, 4, 4)
2042+
>>> image_shape = (2, 3, 4, 4)
20432043
2044-
>>> x = paddle.arange(end=image_shape[0] * image_shape[1] * image_shape[2] * image_shape[3])
2044+
>>> x = paddle.arange(
2045+
... end=image_shape[0]
2046+
... * image_shape[1]
2047+
... * image_shape[2]
2048+
... * image_shape[3]
2049+
... )
20452050
>>> img = paddle.reshape(x, image_shape)
20462051
20472052
>>> out = paddle.flatten(img, start_axis=1, stop_axis=2)
20482053
>>> print(out.shape)
2049-
[2, 12, 4]
2054+
paddle.Size([2, 12, 4])
20502055
20512056
>>> # out shares data with img in dygraph mode
20522057
>>> img[0, 0, 0, 0] = -1

python/paddle/tensor/math.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5880,19 +5880,27 @@ def angle(x: Tensor, name: str | None = None) -> Tensor:
58805880
Tensor: An N-D Tensor of real data type with the same precision as that of x's data type.
58815881
58825882
Examples:
5883-
.. code-block:: python
5883+
.. code-block:: pycon
58845884
58855885
>>> import paddle
58865886
5887-
>>> x = paddle.to_tensor([-2, -1, 0, 1]).unsqueeze(-1).astype('float32')
5887+
>>> x = (
5888+
... paddle.to_tensor([-2, -1, 0, 1])
5889+
... .unsqueeze(-1)
5890+
... .astype('float32')
5891+
... )
58885892
>>> y = paddle.to_tensor([-2, -1, 0, 1]).astype('float32')
58895893
>>> z = x + 1j * y
58905894
>>> z
58915895
Tensor(shape=[4, 4], dtype=complex64, place=Place(cpu), stop_gradient=True,
5892-
[[(-2-2j), (-2-1j), (-2+0j), (-2+1j)],
5893-
[(-1-2j), (-1-1j), (-1+0j), (-1+1j)],
5894-
[-2j , -1j , 0j , 1j ],
5895-
[ (1-2j), (1-1j), (1+0j), (1+1j)]])
5896+
[[(-2.00000000-2.00000000j), (-2.00000000-1.00000000j),
5897+
(-2.00000000+0.00000000j), (-2.00000000+1.00000000j)],
5898+
[(-1.00000000-2.00000000j), (-1.00000000-1.00000000j),
5899+
(-1.00000000+0.00000000j), (-1.00000000+1.00000000j)],
5900+
[(0.00000000-2.00000000j) , (0.00000000-1.00000000j) ,
5901+
(0.00000000+0.00000000j), (0.00000000+1.00000000j)],
5902+
[ (1.00000000-2.00000000j), (1.00000000-1.00000000j),
5903+
(1.00000000+0.00000000j), (1.00000000+1.00000000j)]])
58965904
58975905
>>> theta = paddle.angle(z)
58985906
>>> theta

0 commit comments

Comments
 (0)