From fe341bacde6e6a981ebf60561e228b612faaad43 Mon Sep 17 00:00:00 2001 From: QI JUN Date: Tue, 9 Jan 2018 17:14:35 +0800 Subject: [PATCH] refine batch norm python layer (#7348) --- python/paddle/v2/fluid/layer_helper.py | 5 +++-- python/paddle/v2/fluid/layers/nn.py | 14 ++++++++++---- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/python/paddle/v2/fluid/layer_helper.py b/python/paddle/v2/fluid/layer_helper.py index 4469f7285ef..325735e6793 100644 --- a/python/paddle/v2/fluid/layer_helper.py +++ b/python/paddle/v2/fluid/layer_helper.py @@ -120,11 +120,12 @@ class LayerHelper(object): raise ValueError("no Parameter name %s found" % name) return param - def create_tmp_variable(self, dtype): + def create_tmp_variable(self, dtype, stop_gradient=False): return self.main_program.current_block().create_var( name=unique_name(".".join([self.name, 'tmp'])), dtype=dtype, - persistable=False) + persistable=False, + stop_gradient=stop_gradient) def create_variable(self, *args, **kwargs): return self.main_program.current_block().create_var(*args, **kwargs) diff --git a/python/paddle/v2/fluid/layers/nn.py b/python/paddle/v2/fluid/layers/nn.py index 180a760150b..b1534c5a886 100644 --- a/python/paddle/v2/fluid/layers/nn.py +++ b/python/paddle/v2/fluid/layers/nn.py @@ -971,11 +971,17 @@ def batch_norm(input, attr=helper.param_attr, shape=param_shape, dtype=dtype, is_bias=True) mean = helper.create_global_variable( - dtype=input.dtype, shape=param_shape, persistable=True) + dtype=input.dtype, + shape=param_shape, + persistable=True, + stop_gradient=True) helper.set_variable_initializer(var=mean, initializer=Constant(0.0)) variance = helper.create_global_variable( - dtype=input.dtype, shape=param_shape, persistable=True) + dtype=input.dtype, + shape=param_shape, + persistable=True, + stop_gradient=True) helper.set_variable_initializer(var=variance, initializer=Constant(1.0)) # create output @@ -983,8 +989,8 @@ def batch_norm(input, mean_out = mean # variance and variance out share the same memory variance_out = variance - saved_mean = helper.create_tmp_variable(dtype) - saved_variance = helper.create_tmp_variable(dtype) + saved_mean = helper.create_tmp_variable(dtype=dtype, stop_gradient=True) + saved_variance = helper.create_tmp_variable(dtype=dtype, stop_gradient=True) batch_norm_out = helper.create_tmp_variable(dtype) -- GitLab