From 80a8e91fb8c267ef5b2b726fb7110e4d12c96c71 Mon Sep 17 00:00:00 2001 From: chengduoZH Date: Sat, 9 Sep 2017 11:54:38 +0800 Subject: [PATCH] fix order --- python/paddle/trainer/config_parser.py | 2 +- python/paddle/trainer_config_helpers/layers.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 9c765b32d..2a6b6d5e2 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -2366,9 +2366,9 @@ class BatchNormLayer(LayerBase): inputs, bias=True, use_global_stats=True, - mean_var_names=None, moving_average_fraction=0.9, batch_norm_type=None, + mean_var_names=None, **xargs): if inputs is None: inputs = [] diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index 4749f38c3..e1703c158 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -2957,10 +2957,10 @@ def batch_norm_layer(input, bias_attr=None, param_attr=None, layer_attr=None, - mean_var_names=None, batch_norm_type=None, moving_average_fraction=0.9, - use_global_stats=None): + use_global_stats=None, + mean_var_names=None): """ Batch Normalization Layer. The notation of this layer as follow. @@ -3015,8 +3015,6 @@ def batch_norm_layer(input, :type param_attr: ParameterAttribute :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute - :param mean_var_names: [mean name, variance name] - :type mean_var_names: string list :param use_global_stats: whether use moving mean/variance statistics during testing peroid. If None or True, it will use moving mean/variance statistics during @@ -3029,6 +3027,8 @@ def batch_norm_layer(input, :math:`runningMean = newMean*(1-factor) + runningMean*factor` :type moving_average_fraction: float. + :param mean_var_names: [mean name, variance name] + :type mean_var_names: string list :return: LayerOutput object. :rtype: LayerOutput """ @@ -3047,10 +3047,10 @@ def batch_norm_layer(input, active_type=act.name, type=LayerType.BATCH_NORM_LAYER, batch_norm_type=batch_norm_type, - mean_var_names=mean_var_names, bias=ParamAttr.to_bias(bias_attr), moving_average_fraction=moving_average_fraction, use_global_stats=use_global_stats, + mean_var_names=mean_var_names, **ExtraLayerAttribute.to_kwargs(layer_attr)) return LayerOutput( -- GitLab