Skip to content

Commit 7bbeeb5

Browse files
authored
cherry-pick fix output padding conv (#33587)
* cherry-pick fix_output_padding_conv * add repr unittest for conv
1 parent 63aeb02 commit 7bbeeb5

File tree

2 files changed

+19
-6
lines changed

2 files changed

+19
-6
lines changed

python/paddle/fluid/tests/unittests/test_conv2d_transpose_op.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import numpy as np
1919

2020
import paddle
21+
import paddle.nn as nn
2122
paddle.enable_static()
2223
import paddle.fluid.core as core
2324
import paddle.fluid as fluid
@@ -898,5 +899,17 @@ def attr_padding_with_data_format():
898899
self.assertRaises(ValueError, attr_padding_with_data_format)
899900

900901

902+
class TestConv2DTransposeRepr(unittest.TestCase):
903+
def test_case(self):
904+
paddle.disable_static()
905+
x_var = paddle.uniform((2, 4, 8, 8), dtype='float32', min=-1., max=1.)
906+
conv = nn.Conv2DTranspose(4, 6, (3, 3), output_padding=1, stride=2)
907+
print(conv)
908+
y_var = conv(x_var)
909+
y_np = y_var.numpy()
910+
self.assertIsNotNone(y_np)
911+
paddle.enable_static()
912+
913+
901914
if __name__ == '__main__':
902915
unittest.main()

python/paddle/nn/layer/conv.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def __init__(self,
9898
'kernel_size')
9999
self._padding = padding
100100
self._padding_mode = padding_mode
101-
self._output_padding = output_padding
101+
self.output_padding = output_padding
102102
if dims != 1:
103103
self._updated_padding, self._padding_algorithm = _update_padding_nd(
104104
padding, channel_last, dims)
@@ -163,8 +163,8 @@ def extra_repr(self):
163163
main_str += ', padding={_padding}'
164164
if self._padding_mode is not 'zeros':
165165
main_str += ', padding_mode={_padding_mode}'
166-
if self._output_padding != 0:
167-
main_str += ', output_padding={_output_padding}'
166+
if self.output_padding != 0:
167+
main_str += ', output_padding={output_padding}'
168168
if self._dilation != [1] * len(self._dilation):
169169
main_str += ', dilation={_dilation}'
170170
if self._groups != 1:
@@ -502,7 +502,7 @@ def forward(self, x, output_size=None):
502502
self.weight,
503503
bias=self.bias,
504504
output_size=output_size,
505-
output_padding=self._output_padding,
505+
output_padding=self.output_padding,
506506
padding=self._padding,
507507
stride=self._stride,
508508
dilation=self._dilation,
@@ -810,7 +810,7 @@ def __init__(self,
810810

811811
def forward(self, x, output_size=None):
812812
if output_size is None:
813-
output_padding = self._output_padding
813+
output_padding = self.output_padding
814814
else:
815815
output_padding = 0
816816

@@ -1139,7 +1139,7 @@ def __init__(self,
11391139

11401140
def forward(self, x, output_size=None):
11411141
if output_size is None:
1142-
output_padding = self._output_padding
1142+
output_padding = self.output_padding
11431143
else:
11441144
output_padding = 0
11451145

0 commit comments

Comments
 (0)