提交 4c56586a 编写于 作者: B bingyanghuang 提交者: Tao Luo

[Cherry-pick] 21028: Remove fuse_with_relu argument from batch_norm constructor (#21049)

上级 f504d6f1
......@@ -1200,8 +1200,6 @@ class BatchNorm(layers.Layer):
moving_variance_name(str, optional): The name of the moving_variance which store the global Variance. Default: None.
do_model_average_for_mean_and_var(bool, optional): Whether parameter mean and variance should do model
average when model average is enabled. Default: True.
fuse_with_relu (bool, optional): When setting fuse_with_relu True, this OP performs relu after batch norm.
Default: False.
use_global_stats(bool, optional): Whether to use global mean and
variance. In inference or test mode, set use_global_stats to true
or is_test to true, and the behavior is equivalent.
......@@ -1243,7 +1241,6 @@ class BatchNorm(layers.Layer):
moving_mean_name=None,
moving_variance_name=None,
do_model_average_for_mean_and_var=True,
fuse_with_relu=False,
use_global_stats=False,
trainable_statistics=False):
super(BatchNorm, self).__init__(name_scope, dtype)
......@@ -1302,7 +1299,7 @@ class BatchNorm(layers.Layer):
self._momentum = momentum
self._epsilon = epsilon
self._is_test = is_test
self._fuse_with_relu = fuse_with_relu
self._fuse_with_relu = False
self._use_global_stats = use_global_stats
self._trainable_statistics = trainable_statistics
......
......@@ -4126,7 +4126,6 @@ def batch_norm(input,
moving_mean_name=None,
moving_variance_name=None,
do_model_average_for_mean_and_var=True,
fuse_with_relu=False,
use_global_stats=False):
"""
**Batch Normalization Layer**
......@@ -4211,7 +4210,6 @@ def batch_norm(input,
will save global variance with the string.
do_model_average_for_mean_and_var(bool, Default True): Whether parameter mean and variance should do model
average when model average is enabled.
fuse_with_relu (bool): if True, this OP performs relu after batch norm.
use_global_stats(bool, Default False): Whether to use global mean and
variance. In inference or test mode, set use_global_stats to true
or is_test to true, and the behavior is equivalent.
......@@ -4327,7 +4325,7 @@ def batch_norm(input,
"is_test": is_test,
"data_layout": data_layout,
"use_mkldnn": False,
"fuse_with_relu": fuse_with_relu,
"fuse_with_relu": False,
"use_global_stats": use_global_stats
})
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册