diff --git a/paddle/operators/layer_norm_op.cc b/paddle/operators/layer_norm_op.cc index 76d5d571c31c0cdec207cd171291da1f58d29b61..d9b774272cb7c9d87140bf30d2eabb44f49b2b7c 100644 --- a/paddle/operators/layer_norm_op.cc +++ b/paddle/operators/layer_norm_op.cc @@ -116,8 +116,6 @@ class LayerNormGradOp : public framework::OperatorWithKernel { // check input PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) of LayerNormOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("Scale"), - "Input(Scale) of LayerNormOp should not be null."); PADDLE_ENFORCE(ctx->HasInput("Mean"), "Input(Mean) of LayerNormOp should not be null."); PADDLE_ENFORCE(ctx->HasInput("Variance"), diff --git a/python/paddle/v2/fluid/layers/nn.py b/python/paddle/v2/fluid/layers/nn.py index e8455a8b41ed7497a4b28a67835218ce3fca74cf..0b64e09cd359fc89ddc868ae87c1afdbfface541 100644 --- a/python/paddle/v2/fluid/layers/nn.py +++ b/python/paddle/v2/fluid/layers/nn.py @@ -1637,7 +1637,7 @@ def layer_norm(input, dtype=dtype, default_initializer=Constant(1.0)) inputs['Scale'] = scale - if center: + if shift: assert bias_attr is not False bias = helper.create_parameter( attr=helper.bias_attr, shape=param_shape, dtype=dtype, is_bias=True)