未验证 提交 652da1f4 编写于 作者: H huangjun12 提交者: GitHub

Add As_extra to dropout op and lrn op (#35349)

* add as_extra to dropout op and lrn op

* refine details

* fix dropout op maker
上级 9c9eba13
...@@ -51,9 +51,12 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -51,9 +51,12 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker {
AddInput("Seed", AddInput("Seed",
"The seed of dropout op, it has higher priority than the attr " "The seed of dropout op, it has higher priority than the attr "
"fix_seed and seed") "fix_seed and seed")
.AsDispensable(); .AsDispensable()
.AsExtra();
AddOutput("Out", "The output of dropout op."); AddOutput("Out", "The output of dropout op.");
AddOutput("Mask", "The random sampled dropout mask.").AsIntermediate(); AddOutput("Mask", "The random sampled dropout mask.")
.AsIntermediate()
.AsExtra();
AddAttr<float>("dropout_prob", "Probability of setting units to zero.") AddAttr<float>("dropout_prob", "Probability of setting units to zero.")
.SetDefault(.5f) .SetDefault(.5f)
...@@ -72,8 +75,9 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -72,8 +75,9 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker {
"training. Setting this flag to true is only useful in " "training. Setting this flag to true is only useful in "
"unittest or for debug that always the same output units " "unittest or for debug that always the same output units "
"will be dropped.") "will be dropped.")
.SetDefault(false); .SetDefault(false)
AddAttr<int>("seed", "Dropout random seed.").SetDefault(0); .AsExtra();
AddAttr<int>("seed", "Dropout random seed.").SetDefault(0).AsExtra();
AddAttr<std::string>( AddAttr<std::string>(
"dropout_implementation", "dropout_implementation",
"[\"downgrade_in_infer\"|\"upscale_in_train\"]" "[\"downgrade_in_infer\"|\"upscale_in_train\"]"
......
...@@ -274,7 +274,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -274,7 +274,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker {
.GreaterThan(0.0); .GreaterThan(0.0);
AddAttr<bool>("use_mkldnn", AddAttr<bool>("use_mkldnn",
"(bool, default false) Only used in mkldnn kernel") "(bool, default false) Only used in mkldnn kernel")
.SetDefault(false); .SetDefault(false)
.AsExtra();
AddAttr<std::string>( AddAttr<std::string>(
"data_format", "data_format",
"(string, default NCHW) Only used in " "(string, default NCHW) Only used in "
...@@ -285,7 +286,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -285,7 +286,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker {
AddAttr<bool>("is_test", AddAttr<bool>("is_test",
"(bool, default false) Set to true for inference only, false " "(bool, default false) Set to true for inference only, false "
"for training. Some layers may run faster when this is true.") "for training. Some layers may run faster when this is true.")
.SetDefault(false); .SetDefault(false)
.AsExtra();
AddComment(R"DOC( AddComment(R"DOC(
Local Response Normalization Operator. Local Response Normalization Operator.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册