Skip to content

Commit 7a6f244

Browse files
authored
[API Compatibility] support AdaptiveMaxPool*d, LPPool*d (PaddlePaddle#76285)
* update * update * fix bugs * refine warning message and fix some tests * coverage ci
1 parent 21c9e8b commit 7a6f244

16 files changed

+385
-6
lines changed

python/paddle/nn/__init__.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,11 @@
225225
Conv1d = Conv1D
226226
Conv2d = Conv2D
227227
Conv3d = Conv3D
228-
228+
AdaptiveMaxPool1d = AdaptiveMaxPool1D
229+
AdaptiveMaxPool2d = AdaptiveMaxPool2D
230+
AdaptiveMaxPool3d = AdaptiveMaxPool3D
231+
LPPool2d = LPPool2D
232+
LPPool1d = LPPool1D
229233

230234
__all__ = [
231235
'BatchNorm',
@@ -399,4 +403,9 @@
399403
'ZeroPad1D',
400404
'ZeroPad3D',
401405
'Parameter',
406+
'AdaptiveMaxPool1d',
407+
'AdaptiveMaxPool2d',
408+
'AdaptiveMaxPool3d',
409+
'LPPool2d',
410+
'LPPool1d',
402411
]

python/paddle/nn/layer/activation.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from typing import TYPE_CHECKING, Literal
1919

2020
from paddle.framework import get_default_dtype
21+
from paddle.utils.decorator_utils import param_one_alias
2122

2223
from .. import functional as F
2324
from ..initializer import Constant
@@ -159,18 +160,28 @@ class GLU(Layer):
159160
[-1.05778778, -0.46985325]])
160161
"""
161162

163+
@param_one_alias(["axis", "dim"])
162164
def __init__(self, axis: int = -1, name: str | None = None) -> None:
163165
super().__init__()
164166
self._axis = axis
165167
self._name = name
166168

169+
@param_one_alias(["x", "input"])
167170
def forward(self, x: Tensor) -> Tensor:
168171
return F.glu(x, self._axis, self._name)
169172

170173
def extra_repr(self) -> str:
171174
name_str = f', name={self._name}' if self._name else ''
172175
return f'axis={self._axis}{name_str}'
173176

177+
@property
178+
def dim(self) -> int:
179+
return self._axis
180+
181+
@dim.setter
182+
def dim(self, value: int) -> None:
183+
self._axis = value
184+
174185

175186
class GELU(Layer):
176187
r"""
@@ -291,18 +302,28 @@ class Hardshrink(Layer):
291302
[-1. , 0. , 2.50000000])
292303
"""
293304

305+
@param_one_alias(["threshold", "lambd"])
294306
def __init__(self, threshold: float = 0.5, name: str | None = None) -> None:
295307
super().__init__()
296308
self._threshold = threshold
297309
self._name = name
298310

311+
@param_one_alias(["x", "input"])
299312
def forward(self, x: Tensor) -> Tensor:
300313
return F.hardshrink(x, self._threshold, self._name)
301314

302315
def extra_repr(self) -> str:
303316
name_str = f', name={self._name}' if self._name else ''
304317
return f'threshold={self._threshold}{name_str}'
305318

319+
@property
320+
def lambd(self) -> float:
321+
return self._threshold
322+
323+
@lambd.setter
324+
def lambd(self, value: float) -> None:
325+
self._threshold = value
326+
306327

307328
class Hardswish(Layer):
308329
r"""
@@ -1011,18 +1032,28 @@ class Softshrink(Layer):
10111032
[-0.39999998, 0. , 0. , 0.30000001])
10121033
"""
10131034

1035+
@param_one_alias(["threshold", "lambd"])
10141036
def __init__(self, threshold: float = 0.5, name: str | None = None) -> None:
10151037
super().__init__()
10161038
self._threshold = threshold
10171039
self._name = name
10181040

1041+
@param_one_alias(["x", "input"])
10191042
def forward(self, x: Tensor) -> Tensor:
10201043
return F.softshrink(x, self._threshold, self._name)
10211044

10221045
def extra_repr(self) -> str:
10231046
name_str = f', name={self._name}' if self._name else ''
10241047
return f'threshold={self._threshold}{name_str}'
10251048

1049+
@property
1050+
def lambd(self) -> float:
1051+
return self._threshold
1052+
1053+
@lambd.setter
1054+
def lambd(self, value: float) -> None:
1055+
self._threshold = value
1056+
10261057

10271058
class Softsign(Layer):
10281059
r"""

python/paddle/nn/layer/common.py

Lines changed: 59 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
import paddle
2020
from paddle import in_dynamic_mode
21-
from paddle.utils.decorator_utils import param_one_alias
21+
from paddle.utils.decorator_utils import ParamAliasDecorator, param_one_alias
2222

2323
from .. import functional as F
2424
from .layers import Layer
@@ -2250,6 +2250,7 @@ class CosineSimilarity(Layer):
22502250
[0.65079135, 0.98058069, 1. ])
22512251
"""
22522252

2253+
@param_one_alias(["axis", "dim"])
22532254
def __init__(self, axis: int = 1, eps: float = 1e-8) -> None:
22542255
super().__init__()
22552256
self._axis = axis
@@ -2261,6 +2262,14 @@ def forward(self, x1: Tensor, x2: Tensor) -> Tensor:
22612262
def extra_repr(self) -> str:
22622263
return 'axis={_axis}, eps={_eps}'.format(**self.__dict__)
22632264

2265+
@property
2266+
def dim(self) -> int:
2267+
return self._axis
2268+
2269+
@dim.setter
2270+
def dim(self, value: int) -> None:
2271+
self._axis = value
2272+
22642273

22652274
class Embedding(Layer):
22662275
r"""
@@ -2645,6 +2654,15 @@ class Fold(Layer):
26452654
strides: Size2
26462655
name: str | None
26472656

2657+
@ParamAliasDecorator(
2658+
{
2659+
"output_sizes": ["output_size"],
2660+
"kernel_sizes": ["kernel_size"],
2661+
"strides": ["stride"],
2662+
"paddings": ["padding"],
2663+
"dilations": ["dilation"],
2664+
}
2665+
)
26482666
def __init__(
26492667
self,
26502668
output_sizes: Size2,
@@ -2678,6 +2696,46 @@ def extra_repr(self) -> str:
26782696
name_str = f', name={self.name}' if self.name else ''
26792697
return f'kernel_size={self.kernel_sizes}, dilation={self.dilations}, padding={self.paddings}, stride={self.strides}{name_str}'
26802698

2699+
@property
2700+
def output_size(self) -> Size2:
2701+
return self.output_sizes
2702+
2703+
@output_size.setter
2704+
def output_size(self, value: Size2) -> None:
2705+
self.output_sizes = value
2706+
2707+
@property
2708+
def kernel_size(self) -> Size2:
2709+
return self.kernel_sizes
2710+
2711+
@kernel_size.setter
2712+
def kernel_size(self, value: Size2) -> None:
2713+
self.kernel_sizes = value
2714+
2715+
@property
2716+
def stride(self) -> Size2:
2717+
return self.strides
2718+
2719+
@stride.setter
2720+
def stride(self, value: Size2) -> None:
2721+
self.strides = value
2722+
2723+
@property
2724+
def padding(self) -> Size2 | Size4:
2725+
return self.paddings
2726+
2727+
@padding.setter
2728+
def padding(self, value: Size2 | Size4) -> None:
2729+
self.paddings = value
2730+
2731+
@property
2732+
def dilation(self) -> Size2:
2733+
return self.dilations
2734+
2735+
@dilation.setter
2736+
def dilation(self, value: Size2) -> None:
2737+
self.dilations = value
2738+
26812739

26822740
class Flatten(Layer):
26832741
"""

python/paddle/nn/layer/distance.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616

1717
from typing import TYPE_CHECKING
1818

19+
from paddle.utils.decorator_utils import param_one_alias, param_two_alias
20+
1921
from .. import functional as F
2022
from .layers import Layer
2123

@@ -68,6 +70,7 @@ class PairwiseDistance(Layer):
6870
[4.99999860, 4.99999860])
6971
"""
7072

73+
@param_one_alias(["epsilon", "eps"])
7174
def __init__(
7275
self,
7376
p: float = 2.0,
@@ -81,6 +84,7 @@ def __init__(
8184
self.keepdim = keepdim
8285
self.name = name
8386

87+
@param_two_alias(["x", "x1"], ["y", "x2"])
8488
def forward(self, x: paddle.Tensor, y: paddle.Tensor) -> paddle.Tensor:
8589
return F.pairwise_distance(
8690
x, y, self.p, self.epsilon, self.keepdim, self.name
@@ -95,3 +99,19 @@ def extra_repr(self) -> str:
9599
if self.name is not None:
96100
main_str += ', name={name}'
97101
return main_str.format(**self.__dict__)
102+
103+
@property
104+
def eps(self) -> float:
105+
return self.epsilon
106+
107+
@eps.setter
108+
def eps(self, value: float) -> None:
109+
self.epsilon = value
110+
111+
@property
112+
def norm(self) -> float:
113+
return self.p
114+
115+
@norm.setter
116+
def norm(self, value: float) -> None:
117+
self.p = value

python/paddle/nn/layer/pooling.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from typing import TYPE_CHECKING
2020

2121
from paddle.utils.decorator_utils import (
22+
lp_pool_decorator,
2223
param_one_alias,
2324
)
2425

@@ -444,6 +445,7 @@ class LPPool1D(Layer):
444445
data_format: DataLayout1D
445446
name: str | None
446447

448+
@lp_pool_decorator
447449
def __init__(
448450
self,
449451
norm_type: float,
@@ -463,6 +465,7 @@ def __init__(
463465
self.data_format = data_format
464466
self.name = name
465467

468+
@param_one_alias(["x", "input"])
466469
def forward(self, x: Tensor) -> Tensor:
467470
out = F.lp_pool1d(
468471
x,
@@ -559,6 +562,7 @@ class LPPool2D(Layer):
559562
data_format: DataLayout2D
560563
name: str | None
561564

565+
@lp_pool_decorator
562566
def __init__(
563567
self,
564568
norm_type: float,
@@ -578,6 +582,7 @@ def __init__(
578582
self.data_format = data_format
579583
self.name = name
580584

585+
@param_one_alias(["x", "input"])
581586
def forward(self, x: Tensor) -> Tensor:
582587
return F.lp_pool2d(
583588
x,
@@ -1276,6 +1281,7 @@ class AdaptiveMaxPool1D(Layer):
12761281
return_mask: bool
12771282
name: str | None
12781283

1284+
@param_one_alias(["return_mask", "return_indices"])
12791285
def __init__(
12801286
self,
12811287
output_size: int,
@@ -1295,6 +1301,14 @@ def forward(self, input: Tensor) -> Tensor:
12951301
def extra_repr(self) -> str:
12961302
return f'output_size={self.output_size}, return_mask={self.return_mask}'
12971303

1304+
@property
1305+
def return_indices(self) -> bool:
1306+
return self.return_mask
1307+
1308+
@return_indices.setter
1309+
def return_indices(self, value: bool) -> None:
1310+
self.return_mask = value
1311+
12981312

12991313
class AdaptiveMaxPool2D(Layer):
13001314
"""
@@ -1362,6 +1376,7 @@ class AdaptiveMaxPool2D(Layer):
13621376
[2, 3, 3, 3]
13631377
"""
13641378

1379+
@param_one_alias(["return_mask", "return_indices"])
13651380
def __init__(
13661381
self,
13671382
output_size: Size2,
@@ -1373,6 +1388,7 @@ def __init__(
13731388
self._return_mask = return_mask
13741389
self._name = name
13751390

1391+
@param_one_alias(["x", "input"])
13761392
def forward(self, x: Tensor) -> Tensor:
13771393
return F.adaptive_max_pool2d(
13781394
x,
@@ -1386,6 +1402,14 @@ def extra_repr(self) -> str:
13861402
f'output_size={self._output_size}, return_mask={self._return_mask}'
13871403
)
13881404

1405+
@property
1406+
def return_indices(self) -> bool:
1407+
return self._return_mask
1408+
1409+
@return_indices.setter
1410+
def return_indices(self, value: bool) -> None:
1411+
self._return_mask = value
1412+
13891413

13901414
class AdaptiveMaxPool3D(Layer):
13911415
"""
@@ -1464,6 +1488,7 @@ class AdaptiveMaxPool3D(Layer):
14641488
14651489
"""
14661490

1491+
@param_one_alias(["return_mask", "return_indices"])
14671492
def __init__(
14681493
self,
14691494
output_size: Size3,
@@ -1475,6 +1500,7 @@ def __init__(
14751500
self._return_mask = return_mask
14761501
self._name = name
14771502

1503+
@param_one_alias(["x", "input"])
14781504
def forward(self, x: Tensor) -> Tensor:
14791505
return F.adaptive_max_pool3d(
14801506
x,
@@ -1488,6 +1514,14 @@ def extra_repr(self) -> str:
14881514
f'output_size={self._output_size}, return_mask={self._return_mask}'
14891515
)
14901516

1517+
@property
1518+
def return_indices(self) -> bool:
1519+
return self._return_mask
1520+
1521+
@return_indices.setter
1522+
def return_indices(self, value: bool) -> None:
1523+
self._return_mask = value
1524+
14911525

14921526
class MaxUnPool1D(Layer):
14931527
r"""

0 commit comments

Comments
 (0)