diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index 4a12ceb13ab29f1220ae13f4990b85d396df2eca..7b98da04bc35f90a025a6a03d743aa3a26e0b7d2 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -49,11 +49,13 @@ static constexpr bool CanInplaceAct() { " operator, a Tensor with shape same as input."); \ AddAttr("use_mkldnn", \ "(bool, default false) Only used in mkldnn kernel") \ - .SetDefault(false); \ + .SetDefault(false) \ + .AsExtra(); \ AddAttr("use_cudnn", \ "(bool, default false) Only used in cudnn kernel, need " \ "install cudnn") \ - .SetDefault(false); \ + .SetDefault(false) \ + .AsExtra(); \ AddComment(OP_COMMENT); \ } \ } diff --git a/paddle/fluid/operators/gelu_op.cc b/paddle/fluid/operators/gelu_op.cc index 3293800e1c6206a7a810781d204ca5779a9ce400..3d338f00d4fcbf4be35b2392a10c275526dc5d4b 100644 --- a/paddle/fluid/operators/gelu_op.cc +++ b/paddle/fluid/operators/gelu_op.cc @@ -108,16 +108,19 @@ class GeluOpMaker : public framework::OpProtoAndCheckerMaker { .SetDefault(false); AddAttr("use_mkldnn", "(bool, default false) Only used in mkldnn kernel") - .SetDefault(false); + .SetDefault(false) + .AsExtra(); AddAttr( "mkldnn_data_type", "(string, default \"float32\"). Data type of mkldnn kernel") .SetDefault("float32") - .InEnum({"float32", "int8", "bfloat16"}); + .InEnum({"float32", "int8", "bfloat16"}) + .AsExtra(); AddAttr("use_cudnn", "(bool, default false) Only used in cudnn kernel, need " "install cudnn") - .SetDefault(false); + .SetDefault(false) + .AsExtra(); AddComment(R"DOC( Gelu Activation Operator.