From f4f0f2daeb3bd0bffd8302a4388098e0ab1ffed6 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Tue, 20 Dec 2016 20:30:37 +0800 Subject: [PATCH 1/3] Fix bug in config_parse.py when batch_norm layer is used in RecurrentLayerGroup. --- python/paddle/trainer/config_parser.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 39892d0533aab4..0308d9df948398 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -498,9 +498,12 @@ def __init__( is_static=None, is_shared=None, update_hooks=None, - input_layer_argument=None, ): + input_layer_argument=None, + not_make_layer_name_in_submodel=None, ): self.add_keys(locals()) self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name) + if not_make_layer_name_in_submodel: + self.input_layer_name = input_layer_name # Define a projection for iexed layer @@ -1848,7 +1851,8 @@ def __init__(self, initial_std=0.0, initial_mean=0.0, is_static=True, - is_shared=is_shared, )) + is_shared=is_shared, + not_make_layer_name_in_submodel=True, )) parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0))) cudnn_version = int(g_command_config_args.get("cudnn_version", 0)) From 5bb29ece7fd5352b93100a20b4bf904c5b5bc2f0 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Wed, 21 Dec 2016 09:55:09 +0800 Subject: [PATCH 2/3] close log info in BN. --- python/paddle/trainer/config_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 0308d9df948398..8389476e6a5a5d 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -1884,7 +1884,7 @@ def __init__(self, # when either of it is non-zero. if input_layer.width != 0 or input_layer.height != 0: self.set_cnn_layer(name, image_conf.img_size_y, image_conf.img_size, - image_conf.channels, True) + image_conf.channels, False) else: self.set_layer_size(input_layer.size) From e4c492d3b8d6dc7b700aca16db7c410cf1961f23 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Wed, 21 Dec 2016 11:21:45 +0800 Subject: [PATCH 3/3] change type to bool. --- python/paddle/trainer/config_parser.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 8389476e6a5a5d..29704391f2be6c 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -499,11 +499,15 @@ def __init__( is_shared=None, update_hooks=None, input_layer_argument=None, - not_make_layer_name_in_submodel=None, ): + make_layer_name_in_submodel=True, ): + """ + @param make_layer_name_in_submodel True by defalut, you might need to + set it carefully when adding Input in config_parser.py. + """ self.add_keys(locals()) - self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name) - if not_make_layer_name_in_submodel: - self.input_layer_name = input_layer_name + self.input_layer_name = MakeLayerNameInSubmodel( + input_layer_name + ) if make_layer_name_in_submodel else input_layer_name # Define a projection for iexed layer @@ -1852,7 +1856,7 @@ def __init__(self, initial_mean=0.0, is_static=True, is_shared=is_shared, - not_make_layer_name_in_submodel=True, )) + make_layer_name_in_submodel=False, )) parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0))) cudnn_version = int(g_command_config_args.get("cudnn_version", 0))