提交 80a8e91f 编写于 作者: C chengduoZH

fix order

上级 aea05b6e
......@@ -2366,9 +2366,9 @@ class BatchNormLayer(LayerBase):
inputs,
bias=True,
use_global_stats=True,
mean_var_names=None,
moving_average_fraction=0.9,
batch_norm_type=None,
mean_var_names=None,
**xargs):
if inputs is None:
inputs = []
......
......@@ -2957,10 +2957,10 @@ def batch_norm_layer(input,
bias_attr=None,
param_attr=None,
layer_attr=None,
mean_var_names=None,
batch_norm_type=None,
moving_average_fraction=0.9,
use_global_stats=None):
use_global_stats=None,
mean_var_names=None):
"""
Batch Normalization Layer. The notation of this layer as follow.
......@@ -3015,8 +3015,6 @@ def batch_norm_layer(input,
:type param_attr: ParameterAttribute
:param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute
:param mean_var_names: [mean name, variance name]
:type mean_var_names: string list
:param use_global_stats: whether use moving mean/variance statistics
during testing peroid. If None or True,
it will use moving mean/variance statistics during
......@@ -3029,6 +3027,8 @@ def batch_norm_layer(input,
:math:`runningMean = newMean*(1-factor)
+ runningMean*factor`
:type moving_average_fraction: float.
:param mean_var_names: [mean name, variance name]
:type mean_var_names: string list
:return: LayerOutput object.
:rtype: LayerOutput
"""
......@@ -3047,10 +3047,10 @@ def batch_norm_layer(input,
active_type=act.name,
type=LayerType.BATCH_NORM_LAYER,
batch_norm_type=batch_norm_type,
mean_var_names=mean_var_names,
bias=ParamAttr.to_bias(bias_attr),
moving_average_fraction=moving_average_fraction,
use_global_stats=use_global_stats,
mean_var_names=mean_var_names,
**ExtraLayerAttribute.to_kwargs(layer_attr))
return LayerOutput(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册