diff --git a/python/paddle/v2/fluid/layers/nn.py b/python/paddle/v2/fluid/layers/nn.py index e8b4cec6ee638b839e2a7c38e032f74b9cd738ef..3453dd945d558a93a854f99209a6ea8055875d84 100644 --- a/python/paddle/v2/fluid/layers/nn.py +++ b/python/paddle/v2/fluid/layers/nn.py @@ -1519,21 +1519,21 @@ def batch_norm(input, bias = helper.create_parameter( attr=helper.bias_attr, shape=param_shape, dtype=dtype, is_bias=True) - mean = helper.create_global_variable( - name=moving_mean_name, - dtype=input.dtype, + mean = helper.create_parameter( + attr=ParamAttr( + name=moving_mean_name, initializer=Constant(0.0), trainable=False), shape=param_shape, - persistable=True, - stop_gradient=True) - helper.set_variable_initializer(var=mean, initializer=Constant(0.0)) + dtype=input.dtype) + mean.stop_gradient = True - variance = helper.create_global_variable( - name=moving_variance_name, - dtype=input.dtype, + variance = helper.create_parameter( + attr=ParamAttr( + name=moving_variance_name, + initializer=Constant(1.0), + trainable=False), shape=param_shape, - persistable=True, - stop_gradient=True) - helper.set_variable_initializer(var=variance, initializer=Constant(1.0)) + dtype=input.dtype) + variance.stop_gradient = True # create output # mean and mean_out share the same memory