Skip to content

Commit 557be6f

Browse files
authored
Merge pull request #12902 from PaddlePaddle/revert-12736
Revert "Disable in_place in batch_norm API. (#12736)"
2 parents fd1875b + b1fc238 commit 557be6f

File tree

4 files changed

+5
-13
lines changed

4 files changed

+5
-13
lines changed

paddle/fluid/operators/batch_norm_op.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ class BatchNormOpMaker : public framework::OpProtoAndCheckerMaker {
135135
AddInput("Variance",
136136
"The global variance (for training) "
137137
"or estimated Variance (for testing)");
138-
AddOutput("Y", "result after normalization");
138+
AddOutput("Y", "result after normalization").Reuse("X");
139139
AddOutput("MeanOut",
140140
"Share memory with Mean. "
141141
"Store the global mean when training")

python/paddle/fluid/layers/nn.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
import random
2828
from .. import unique_name
2929
from functools import reduce
30-
import warnings
3130

3231
__all__ = [
3332
'fc',
@@ -2048,7 +2047,7 @@ def batch_norm(input,
20482047
param_attr(ParamAttr): The parameter attribute for Parameter `scale`.
20492048
bias_attr(ParamAttr): The parameter attribute for Parameter `bias`.
20502049
data_layout(string, default NCHW): NCHW|NHWC
2051-
in_place(bool, Default False): This argument is deprecated since 0.15.0.
2050+
in_place(bool, Default False): Make the input and output of batch norm reuse memory.
20522051
use_mkldnn(bool, Default false): ${use_mkldnn_comment}
20532052
name(string, Default None): A name for this layer(optional). If set None, the layer
20542053
will be named automatically.
@@ -2070,10 +2069,6 @@ def batch_norm(input,
20702069
helper = LayerHelper('batch_norm', **locals())
20712070
dtype = helper.input_dtype()
20722071

2073-
if in_place:
2074-
raise warnings.warn("The argument in_place is deprecated since 0.15.0, "
2075-
"please do not set it True.")
2076-
20772072
input_shape = input.shape
20782073
if data_layout == 'NCHW':
20792074
channel_num = input_shape[1]
@@ -2123,7 +2118,7 @@ def batch_norm(input,
21232118
saved_mean = helper.create_tmp_variable(dtype=dtype, stop_gradient=True)
21242119
saved_variance = helper.create_tmp_variable(dtype=dtype, stop_gradient=True)
21252120

2126-
batch_norm_out = helper.create_tmp_variable(dtype)
2121+
batch_norm_out = input if in_place else helper.create_tmp_variable(dtype)
21272122

21282123
helper.append_op(
21292124
type="batch_norm",

python/paddle/fluid/nets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ def __extend_list__(obj):
229229
use_mkldnn=use_mkldnn)
230230

231231
if conv_with_batchnorm[i]:
232-
tmp = layers.batch_norm(input=tmp, act=conv_act)
232+
tmp = layers.batch_norm(input=tmp, act=conv_act, in_place=True)
233233
drop_rate = conv_batchnorm_drop_rate[i]
234234
if abs(drop_rate) > 1e-5:
235235
tmp = layers.dropout(x=tmp, dropout_prob=drop_rate)

python/paddle/fluid/tests/book/test_image_classification.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -256,10 +256,7 @@ def main(net_type, use_cuda, is_local=True):
256256
save_dirname = "image_classification_" + net_type + ".inference.model"
257257

258258
train(net_type, use_cuda, save_dirname, is_local)
259-
260-
# There is bug in fluid.InferenceTranspiler for VGG.
261-
if net_type == "resnet":
262-
infer(use_cuda, save_dirname)
259+
infer(use_cuda, save_dirname)
263260

264261

265262
class TestImageClassification(unittest.TestCase):

0 commit comments

Comments
 (0)