From 4c56586a7af155deddca027e725d28bf763930d2 Mon Sep 17 00:00:00 2001 From: bingyanghuang <33643817+bingyanghuang@users.noreply.github.com> Date: Wed, 6 Nov 2019 22:07:39 +0800 Subject: [PATCH] [Cherry-pick] 21028: Remove fuse_with_relu argument from batch_norm constructor (#21049) --- python/paddle/fluid/dygraph/nn.py | 5 +---- python/paddle/fluid/layers/nn.py | 4 +--- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/python/paddle/fluid/dygraph/nn.py b/python/paddle/fluid/dygraph/nn.py index f65515aa35d..3b32fe591e4 100644 --- a/python/paddle/fluid/dygraph/nn.py +++ b/python/paddle/fluid/dygraph/nn.py @@ -1200,8 +1200,6 @@ class BatchNorm(layers.Layer): moving_variance_name(str, optional): The name of the moving_variance which store the global Variance. Default: None. do_model_average_for_mean_and_var(bool, optional): Whether parameter mean and variance should do model average when model average is enabled. Default: True. - fuse_with_relu (bool, optional): When setting fuse_with_relu True, this OP performs relu after batch norm. - Default: False. use_global_stats(bool, optional): Whether to use global mean and variance. In inference or test mode, set use_global_stats to true or is_test to true, and the behavior is equivalent. @@ -1243,7 +1241,6 @@ class BatchNorm(layers.Layer): moving_mean_name=None, moving_variance_name=None, do_model_average_for_mean_and_var=True, - fuse_with_relu=False, use_global_stats=False, trainable_statistics=False): super(BatchNorm, self).__init__(name_scope, dtype) @@ -1302,7 +1299,7 @@ class BatchNorm(layers.Layer): self._momentum = momentum self._epsilon = epsilon self._is_test = is_test - self._fuse_with_relu = fuse_with_relu + self._fuse_with_relu = False self._use_global_stats = use_global_stats self._trainable_statistics = trainable_statistics diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 08c5309cfe0..40a60f78a37 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -4126,7 +4126,6 @@ def batch_norm(input, moving_mean_name=None, moving_variance_name=None, do_model_average_for_mean_and_var=True, - fuse_with_relu=False, use_global_stats=False): """ **Batch Normalization Layer** @@ -4211,7 +4210,6 @@ def batch_norm(input, will save global variance with the string. do_model_average_for_mean_and_var(bool, Default True): Whether parameter mean and variance should do model average when model average is enabled. - fuse_with_relu (bool): if True, this OP performs relu after batch norm. use_global_stats(bool, Default False): Whether to use global mean and variance. In inference or test mode, set use_global_stats to true or is_test to true, and the behavior is equivalent. @@ -4327,7 +4325,7 @@ def batch_norm(input, "is_test": is_test, "data_layout": data_layout, "use_mkldnn": False, - "fuse_with_relu": fuse_with_relu, + "fuse_with_relu": False, "use_global_stats": use_global_stats }) -- GitLab