Skip to content

Commit e0bc942

Browse files
author
wangyang59
authored
Merge pull request #9224 from wangyang59/dilateconv
expose dilation to conv2d and bias/act to con2d_trans
2 parents c1e9b1e + 26734cf commit e0bc942

File tree

1 file changed

+18
-3
lines changed
  • python/paddle/fluid/layers

1 file changed

+18
-3
lines changed

python/paddle/fluid/layers/nn.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1117,12 +1117,14 @@ def conv2d(input,
11171117
filter_size,
11181118
stride=1,
11191119
padding=0,
1120+
dilation=1,
11201121
groups=None,
11211122
param_attr=None,
11221123
bias_attr=None,
11231124
use_cudnn=True,
11241125
use_mkldnn=False,
1125-
act=None):
1126+
act=None,
1127+
name=None):
11261128
"""
11271129
**Convlution2D Layer**
11281130
@@ -1183,6 +1185,9 @@ def conv2d(input,
11831185
padding(int|tuple): The padding size. If padding is a tuple, it must
11841186
contain two integers, (padding_H, padding_W). Otherwise, the
11851187
padding_H = padding_W = padding. Default: padding = 0.
1188+
dilation(int|tuple): The dilation size. If dilation is a tuple, it must
1189+
contain two integers, (dilation_H, dilation_W). Otherwise, the
1190+
dilation_H = dilation_W = dilation. Default: dilation = 1.
11861191
groups(int): The groups number of the Conv2d Layer. According to grouped
11871192
convolution in Alex Krizhevsky's Deep CNN paper: when group=2,
11881193
the first half of the filters is only connected to the first half
@@ -1193,6 +1198,8 @@ def conv2d(input,
11931198
use_cudnn(bool): Use cudnn kernel or not, it is valid only when the cudnn
11941199
library is installed. Default: True
11951200
act(str): Activation type. Default: None
1201+
name(str|None): A name for this layer(optional). If set None, the layer
1202+
will be named automatically.
11961203
11971204
Returns:
11981205
Variable: The tensor variable storing the convolution and \
@@ -1233,6 +1240,7 @@ def conv2d(input,
12331240
filter_size = utils.convert_to_list(filter_size, 2, 'filter_size')
12341241
stride = utils.convert_to_list(stride, 2, 'stride')
12351242
padding = utils.convert_to_list(padding, 2, 'padding')
1243+
dilation = utils.convert_to_list(dilation, 2, 'dilation')
12361244

12371245
if not isinstance(use_cudnn, bool):
12381246
raise ValueError("use_cudnn should be True or False")
@@ -1262,6 +1270,7 @@ def _get_default_param_initializer():
12621270
attrs={
12631271
'strides': stride,
12641272
'paddings': padding,
1273+
'dilations': dilation,
12651274
'groups': groups,
12661275
'use_cudnn': use_cudnn,
12671276
'use_mkldnn': use_mkldnn
@@ -1670,7 +1679,9 @@ def conv2d_transpose(input,
16701679
stride=1,
16711680
dilation=1,
16721681
param_attr=None,
1682+
bias_attr=None,
16731683
use_cudnn=True,
1684+
act=None,
16741685
name=None):
16751686
"""
16761687
**Convlution2D transpose layer**
@@ -1739,8 +1750,10 @@ def conv2d_transpose(input,
17391750
dilation_H = dilation_W = dilation. Default: dilation = 1.
17401751
param_attr(ParamAttr): The parameters to the Conv2d_transpose Layer.
17411752
Default: None
1753+
bias_attr(ParamAttr): Bias parameter for the Conv2d layer. Default: None
17421754
use_cudnn(bool): Use cudnn kernel or not, it is valid only when the cudnn
17431755
library is installed. Default: True
1756+
act(str): Activation type. Default: None
17441757
name(str|None): A name for this layer(optional). If set None, the layer
17451758
will be named automatically.
17461759
@@ -1793,19 +1806,21 @@ def conv2d_transpose(input,
17931806
img_filter = helper.create_parameter(
17941807
dtype=input.dtype, shape=filter_shape, attr=helper.param_attr)
17951808

1796-
out = helper.create_tmp_variable(dtype=input.dtype)
1809+
pre_bias = helper.create_tmp_variable(dtype=input.dtype)
17971810
helper.append_op(
17981811
type='conv2d_transpose',
17991812
inputs={'Input': [input],
18001813
'Filter': [img_filter]},
1801-
outputs={'Output': out},
1814+
outputs={'Output': pre_bias},
18021815
attrs={
18031816
'strides': stride,
18041817
'paddings': padding,
18051818
'dilations': dilation,
18061819
'use_cudnn': use_cudnn
18071820
})
18081821

1822+
pre_act = helper.append_bias_op(pre_bias, dim_start=1, dim_end=2)
1823+
out = helper.append_activation(pre_act)
18091824
return out
18101825

18111826

0 commit comments

Comments
 (0)