diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 12a629a031a9020bf3c4812a273e940b921c4cfd..9c765b32d34b353e1abb25c602f4523f11cc5464 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -2366,6 +2366,7 @@ class BatchNormLayer(LayerBase): inputs, bias=True, use_global_stats=True, + mean_var_names=None, moving_average_fraction=0.9, batch_norm_type=None, **xargs): @@ -2421,11 +2422,11 @@ class BatchNormLayer(LayerBase): psize = self.calc_parameter_size(image_conf) dims = [1, psize] + if mean_var_names is not None: + assert len(mean_var_names) == 2 + self.inputs[1].parameter_name = mean_var_names[0] + self.inputs[2].parameter_name = mean_var_names[1] - self.inputs[1].parameter_name = self.inputs[0].parameter_name.split('.')[0] + '.' + \ - self.inputs[1].parameter_name.split('.')[1] - self.inputs[2].parameter_name = self.inputs[0].parameter_name.split('.')[0] + '.' + \ - self.inputs[2].parameter_name.split('.')[1] self.create_input_parameter(0, psize) self.create_input_parameter(1, psize, dims) self.create_input_parameter(2, psize, dims) diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index cba45bd3afa178ab4dd3a50f0947b144e7466e53..4749f38c3800d579e6ad6383a383fc23dfcf6b71 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -2957,6 +2957,7 @@ def batch_norm_layer(input, bias_attr=None, param_attr=None, layer_attr=None, + mean_var_names=None, batch_norm_type=None, moving_average_fraction=0.9, use_global_stats=None): @@ -3014,6 +3015,8 @@ def batch_norm_layer(input, :type param_attr: ParameterAttribute :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute + :param mean_var_names: [mean name, variance name] + :type mean_var_names: string list :param use_global_stats: whether use moving mean/variance statistics during testing peroid. If None or True, it will use moving mean/variance statistics during @@ -3044,6 +3047,7 @@ def batch_norm_layer(input, active_type=act.name, type=LayerType.BATCH_NORM_LAYER, batch_norm_type=batch_norm_type, + mean_var_names=mean_var_names, bias=ParamAttr.to_bias(bias_attr), moving_average_fraction=moving_average_fraction, use_global_stats=use_global_stats,