Skip to content

Commit bbf3b47

Browse files
authored
Merge pull request #966 from qingqing01/batch_norm
Fix bug in config_parse.py when batch_norm layer is used in RecurrentLayerGroup
2 parents 446e3c2 + 567871f commit bbf3b47

File tree

1 file changed

+12
-4
lines changed

1 file changed

+12
-4
lines changed

python/paddle/trainer/config_parser.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -498,9 +498,16 @@ def __init__(
498498
is_static=None,
499499
is_shared=None,
500500
update_hooks=None,
501-
input_layer_argument=None, ):
501+
input_layer_argument=None,
502+
make_layer_name_in_submodel=True, ):
503+
"""
504+
@param make_layer_name_in_submodel True by defalut, you might need to
505+
set it carefully when adding Input in config_parser.py.
506+
"""
502507
self.add_keys(locals())
503-
self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name)
508+
self.input_layer_name = MakeLayerNameInSubmodel(
509+
input_layer_name
510+
) if make_layer_name_in_submodel else input_layer_name
504511

505512

506513
# Define a projection for iexed layer
@@ -1848,7 +1855,8 @@ def __init__(self,
18481855
initial_std=0.0,
18491856
initial_mean=0.0,
18501857
is_static=True,
1851-
is_shared=is_shared, ))
1858+
is_shared=is_shared,
1859+
make_layer_name_in_submodel=False, ))
18521860

18531861
parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0)))
18541862
cudnn_version = int(g_command_config_args.get("cudnn_version", 0))
@@ -1880,7 +1888,7 @@ def __init__(self,
18801888
# when either of it is non-zero.
18811889
if input_layer.width != 0 or input_layer.height != 0:
18821890
self.set_cnn_layer(name, image_conf.img_size_y, image_conf.img_size,
1883-
image_conf.channels, True)
1891+
image_conf.channels, False)
18841892
else:
18851893
self.set_layer_size(input_layer.size)
18861894

0 commit comments

Comments
 (0)