27
27
)
28
28
from paddle .utils .decorator_utils import (
29
29
ParamAliasDecorator ,
30
+ param_two_alias ,
30
31
param_two_alias_one_default ,
31
32
)
32
33
33
34
from ..base .data_feeder import check_type , check_variable_and_dtype
34
35
from ..common_ops_import import Variable
35
- from ..framework import (
36
- LayerHelper ,
37
- core ,
38
- )
36
+ from ..framework import LayerHelper , convert_np_dtype_to_dtype_ , core
37
+ from .manipulation import cast
39
38
from .math import _get_reduce_axis_with_tensor
40
39
41
40
if TYPE_CHECKING :
42
41
from collections .abc import Sequence
43
42
44
43
from paddle import Tensor
44
+ from paddle ._typing import DTypeLike
45
45
46
46
_Interpolation : TypeAlias = Literal [
47
47
'linear' , 'higher' , 'lower' , 'midpoint' , 'nearest'
48
48
]
49
49
__all__ = []
50
50
51
51
52
+ @param_two_alias (["x" , "input" ], ["axis" , "dim" ])
52
53
def mean (
53
54
x : Tensor ,
54
55
axis : int | Sequence [int ] | None = None ,
55
56
keepdim : bool = False ,
56
57
name : str | None = None ,
58
+ * ,
59
+ dtype : DTypeLike | None = None ,
60
+ out : Tensor | None = None ,
57
61
) -> Tensor :
58
62
"""
59
63
Computes the mean of the input tensor's elements along ``axis``.
60
64
61
65
Args:
62
66
x (Tensor): The input Tensor with data type bool, bfloat16, float16, float32,
63
67
float64, int32, int64, complex64, complex128.
68
+ alias: ``input``
64
69
axis (int|list|tuple|None, optional): The axis along which to perform mean
65
70
calculations. ``axis`` should be int, list(int) or tuple(int). If
66
71
``axis`` is a list/tuple of dimension(s), mean is calculated along
@@ -69,13 +74,16 @@ def mean(
69
74
``axis`` or element(s) of ``axis`` is less than 0, it works the
70
75
same way as :math:`axis + D` . If ``axis`` is None, mean is
71
76
calculated over all elements of ``x``. Default is None.
77
+ alias: ``dim``
72
78
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
73
79
in the output Tensor. If ``keepdim`` is True, the dimensions of
74
80
the output Tensor is the same as ``x`` except in the reduced
75
81
dimensions(it is of size 1 in this case). Otherwise, the shape of
76
82
the output Tensor is squeezed in ``axis`` . Default is False.
77
83
name (str|None, optional): Name for the operation (optional, default is None).
78
84
For more information, please refer to :ref:`api_guide_Name`.
85
+ dtype (str): The desired data type of returned tensor. Default: None.
86
+ out(Tensor|None, optional): The output tensor. Default: None.
79
87
80
88
Returns:
81
89
Tensor, results of average along ``axis`` of ``x``, with the same data
@@ -110,9 +118,19 @@ def mean(
110
118
>>> out4 = paddle.mean(x, axis=[0, 2])
111
119
>>> print(out4.numpy())
112
120
[ 8.5 12.5 16.5]
121
+ >>> out5 = paddle.mean(x, dtype='float64')
122
+ >>> out5
123
+ Tensor(shape=[], dtype=float64, place=Place(gpu:0), stop_gradient=True,
124
+ 12.50000000)
113
125
"""
126
+ if dtype is not None :
127
+ if not isinstance (dtype , (core .VarDesc .VarType , core .DataType )):
128
+ dtype = convert_np_dtype_to_dtype_ (dtype )
129
+ if x .dtype != dtype :
130
+ x = cast (x , dtype )
131
+
114
132
if in_dynamic_or_pir_mode ():
115
- return _C_ops .mean (x , axis , keepdim )
133
+ return _C_ops .mean (x , axis , keepdim , out = out )
116
134
else :
117
135
reduce_all , axis = _get_reduce_axis_with_tensor (axis , x )
118
136
check_variable_and_dtype (
@@ -146,14 +164,14 @@ def mean(
146
164
helper = LayerHelper ('mean' , ** locals ())
147
165
148
166
attrs = {'dim' : axis , 'keep_dim' : keepdim , 'reduce_all' : reduce_all }
149
- out = helper .create_variable_for_type_inference (x .dtype )
167
+ out_tensor = helper .create_variable_for_type_inference (x .dtype )
150
168
helper .append_op (
151
169
type = 'reduce_mean' ,
152
170
inputs = {'X' : x },
153
- outputs = {'Out' : out },
171
+ outputs = {'Out' : out_tensor },
154
172
attrs = attrs ,
155
173
)
156
- return out
174
+ return out_tensor
157
175
158
176
159
177
@ParamAliasDecorator ({"x" : ["input" ], "axis" : ["dim" ]})
0 commit comments