未验证 提交 652da1f4 编写于 作者: H huangjun12 提交者: GitHub

Add As_extra to dropout op and lrn op (#35349)

* add as_extra to dropout op and lrn op

* refine details

* fix dropout op maker
上级 9c9eba13
......@@ -51,9 +51,12 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker {
AddInput("Seed",
"The seed of dropout op, it has higher priority than the attr "
"fix_seed and seed")
.AsDispensable();
.AsDispensable()
.AsExtra();
AddOutput("Out", "The output of dropout op.");
AddOutput("Mask", "The random sampled dropout mask.").AsIntermediate();
AddOutput("Mask", "The random sampled dropout mask.")
.AsIntermediate()
.AsExtra();
AddAttr<float>("dropout_prob", "Probability of setting units to zero.")
.SetDefault(.5f)
......@@ -72,8 +75,9 @@ class DropoutOpMaker : public framework::OpProtoAndCheckerMaker {
"training. Setting this flag to true is only useful in "
"unittest or for debug that always the same output units "
"will be dropped.")
.SetDefault(false);
AddAttr<int>("seed", "Dropout random seed.").SetDefault(0);
.SetDefault(false)
.AsExtra();
AddAttr<int>("seed", "Dropout random seed.").SetDefault(0).AsExtra();
AddAttr<std::string>(
"dropout_implementation",
"[\"downgrade_in_infer\"|\"upscale_in_train\"]"
......
......@@ -274,7 +274,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker {
.GreaterThan(0.0);
AddAttr<bool>("use_mkldnn",
"(bool, default false) Only used in mkldnn kernel")
.SetDefault(false);
.SetDefault(false)
.AsExtra();
AddAttr<std::string>(
"data_format",
"(string, default NCHW) Only used in "
......@@ -285,7 +286,8 @@ class LRNOpMaker : public framework::OpProtoAndCheckerMaker {
AddAttr<bool>("is_test",
"(bool, default false) Set to true for inference only, false "
"for training. Some layers may run faster when this is true.")
.SetDefault(false);
.SetDefault(false)
.AsExtra();
AddComment(R"DOC(
Local Response Normalization Operator.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册