diff --git a/python/paddle/fluid/dygraph/nn.py b/python/paddle/fluid/dygraph/nn.py index f65515aa35d4165a7d68dfac3f8d433e27bb8814..3b32fe591e4d67d29ad89fa05fcc99c185c95f99 100644 --- a/python/paddle/fluid/dygraph/nn.py +++ b/python/paddle/fluid/dygraph/nn.py @@ -1200,8 +1200,6 @@ class BatchNorm(layers.Layer): moving_variance_name(str, optional): The name of the moving_variance which store the global Variance. Default: None. do_model_average_for_mean_and_var(bool, optional): Whether parameter mean and variance should do model average when model average is enabled. Default: True. - fuse_with_relu (bool, optional): When setting fuse_with_relu True, this OP performs relu after batch norm. - Default: False. use_global_stats(bool, optional): Whether to use global mean and variance. In inference or test mode, set use_global_stats to true or is_test to true, and the behavior is equivalent. @@ -1243,7 +1241,6 @@ class BatchNorm(layers.Layer): moving_mean_name=None, moving_variance_name=None, do_model_average_for_mean_and_var=True, - fuse_with_relu=False, use_global_stats=False, trainable_statistics=False): super(BatchNorm, self).__init__(name_scope, dtype) @@ -1302,7 +1299,7 @@ class BatchNorm(layers.Layer): self._momentum = momentum self._epsilon = epsilon self._is_test = is_test - self._fuse_with_relu = fuse_with_relu + self._fuse_with_relu = False self._use_global_stats = use_global_stats self._trainable_statistics = trainable_statistics diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 08c5309cfe0fb3d6c8c427cbe69d1bc3b8fa1caa..40a60f78a371aa23a8f424d7212cf14a73a59012 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -4126,7 +4126,6 @@ def batch_norm(input, moving_mean_name=None, moving_variance_name=None, do_model_average_for_mean_and_var=True, - fuse_with_relu=False, use_global_stats=False): """ **Batch Normalization Layer** @@ -4211,7 +4210,6 @@ def batch_norm(input, will save global variance with the string. do_model_average_for_mean_and_var(bool, Default True): Whether parameter mean and variance should do model average when model average is enabled. - fuse_with_relu (bool): if True, this OP performs relu after batch norm. use_global_stats(bool, Default False): Whether to use global mean and variance. In inference or test mode, set use_global_stats to true or is_test to true, and the behavior is equivalent. @@ -4327,7 +4325,7 @@ def batch_norm(input, "is_test": is_test, "data_layout": data_layout, "use_mkldnn": False, - "fuse_with_relu": fuse_with_relu, + "fuse_with_relu": False, "use_global_stats": use_global_stats })