提交 226bc22a 编写于 作者: W Wojciech Uss 提交者: Tao Luo

Remove fuse_with_relu argument from batch_norm constructor (#21028)

test=develop
上级 f0e95a60
......@@ -1200,8 +1200,6 @@ class BatchNorm(layers.Layer):
moving_variance_name(str, optional): The name of the moving_variance which store the global Variance. Default: None.
do_model_average_for_mean_and_var(bool, optional): Whether parameter mean and variance should do model
average when model average is enabled. Default: True.
fuse_with_relu (bool, optional): When setting fuse_with_relu True, this OP performs relu after batch norm.
Default: False.
use_global_stats(bool, optional): Whether to use global mean and
variance. In inference or test mode, set use_global_stats to true
or is_test to true, and the behavior is equivalent.
......@@ -1243,7 +1241,6 @@ class BatchNorm(layers.Layer):
moving_mean_name=None,
moving_variance_name=None,
do_model_average_for_mean_and_var=True,
fuse_with_relu=False,
use_global_stats=False,
trainable_statistics=False):
super(BatchNorm, self).__init__(name_scope, dtype)
......@@ -1302,7 +1299,7 @@ class BatchNorm(layers.Layer):
self._momentum = momentum
self._epsilon = epsilon
self._is_test = is_test
self._fuse_with_relu = fuse_with_relu
self._fuse_with_relu = False
self._use_global_stats = use_global_stats
self._trainable_statistics = trainable_statistics
......
......@@ -4148,7 +4148,6 @@ def batch_norm(input,
moving_mean_name=None,
moving_variance_name=None,
do_model_average_for_mean_and_var=True,
fuse_with_relu=False,
use_global_stats=False):
"""
**Batch Normalization Layer**
......@@ -4233,7 +4232,6 @@ def batch_norm(input,
will save global variance with the string.
do_model_average_for_mean_and_var(bool, Default True): Whether parameter mean and variance should do model
average when model average is enabled.
fuse_with_relu (bool): if True, this OP performs relu after batch norm.
use_global_stats(bool, Default False): Whether to use global mean and
variance. In inference or test mode, set use_global_stats to true
or is_test to true, and the behavior is equivalent.
......@@ -4349,7 +4347,7 @@ def batch_norm(input,
"is_test": is_test,
"data_layout": data_layout,
"use_mkldnn": False,
"fuse_with_relu": fuse_with_relu,
"fuse_with_relu": False,
"use_global_stats": use_global_stats
})
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册