From f4f0f2daeb3bd0bffd8302a4388098e0ab1ffed6 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Tue, 20 Dec 2016 20:30:37 +0800 Subject: [PATCH] Fix bug in config_parse.py when batch_norm layer is used in RecurrentLayerGroup. --- python/paddle/trainer/config_parser.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 39892d0533..0308d9df94 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -498,9 +498,12 @@ class Input(Cfg): is_static=None, is_shared=None, update_hooks=None, - input_layer_argument=None, ): + input_layer_argument=None, + not_make_layer_name_in_submodel=None, ): self.add_keys(locals()) self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name) + if not_make_layer_name_in_submodel: + self.input_layer_name = input_layer_name # Define a projection for iexed layer @@ -1848,7 +1851,8 @@ class BatchNormLayer(LayerBase): initial_std=0.0, initial_mean=0.0, is_static=True, - is_shared=is_shared, )) + is_shared=is_shared, + not_make_layer_name_in_submodel=True, )) parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0))) cudnn_version = int(g_command_config_args.get("cudnn_version", 0)) -- GitLab