未验证 提交 83424033 编写于 作者: 王明冬 提交者: GitHub

fix some op extra error, test=develop (#35667)

上级 adaa207b
...@@ -441,7 +441,7 @@ void OperatorBase::CheckAllInputOutputSet() const { ...@@ -441,7 +441,7 @@ void OperatorBase::CheckAllInputOutputSet() const {
if (info_ == nullptr || info_->proto_ == nullptr) return; if (info_ == nullptr || info_->proto_ == nullptr) return;
for (auto& in : info_->Proto().inputs()) { for (auto& in : info_->Proto().inputs()) {
if (!in.dispensable()) { if (!in.dispensable() && !in.extra()) {
PADDLE_ENFORCE_NE( PADDLE_ENFORCE_NE(
inputs_.find(in.name()), inputs_.end(), inputs_.find(in.name()), inputs_.end(),
platform::errors::NotFound("Operator %s's input (%s) is not set.", platform::errors::NotFound("Operator %s's input (%s) is not set.",
...@@ -450,7 +450,7 @@ void OperatorBase::CheckAllInputOutputSet() const { ...@@ -450,7 +450,7 @@ void OperatorBase::CheckAllInputOutputSet() const {
} }
for (auto& out : info_->Proto().outputs()) { for (auto& out : info_->Proto().outputs()) {
if (!out.dispensable()) { if (!out.dispensable() && !out.extra()) {
PADDLE_ENFORCE_NE( PADDLE_ENFORCE_NE(
outputs_.find(out.name()), outputs_.end(), outputs_.find(out.name()), outputs_.end(),
platform::errors::NotFound("Operator %s's output (%s) is not set.", platform::errors::NotFound("Operator %s's output (%s) is not set.",
......
...@@ -686,12 +686,15 @@ class Conv2DGradMaker : public framework::SingleGradOpMaker<T> { ...@@ -686,12 +686,15 @@ class Conv2DGradMaker : public framework::SingleGradOpMaker<T> {
op->SetType(this->ForwardOpType() + "_grad"); op->SetType(this->ForwardOpType() + "_grad");
op->SetInput("Input", this->Input("Input")); op->SetInput("Input", this->Input("Input"));
op->SetInput("Filter", this->Input("Filter")); op->SetInput("Filter", this->Input("Filter"));
op->SetInput("Bias", this->Input("Bias"));
op->SetInput(framework::GradVarName("Output"), this->OutputGrad("Output")); op->SetInput(framework::GradVarName("Output"), this->OutputGrad("Output"));
op->SetOutput(framework::GradVarName("Input"), this->InputGrad("Input")); op->SetOutput(framework::GradVarName("Input"), this->InputGrad("Input"));
op->SetOutput(framework::GradVarName("Filter"), this->InputGrad("Filter")); op->SetOutput(framework::GradVarName("Filter"), this->InputGrad("Filter"));
op->SetOutput(framework::GradVarName("Bias"), this->InputGrad("Bias"));
if (this->HasInput("Bias")) {
op->SetInput("Bias", this->Input("Bias"));
op->SetOutput(framework::GradVarName("Bias"), this->InputGrad("Bias"));
}
op->SetAttrMap(this->Attrs()); op->SetAttrMap(this->Attrs());
} }
}; };
......
...@@ -186,9 +186,7 @@ class LSTMOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -186,9 +186,7 @@ class LSTMOpMaker : public framework::OpProtoAndCheckerMaker {
"(bool, default: False) " "(bool, default: False) "
"whether to compute reversed LSTM.") "whether to compute reversed LSTM.")
.SetDefault(false); .SetDefault(false);
AddAttr<bool>("is_test", "True if in test phase.") AddAttr<bool>("is_test", "True if in test phase.").SetDefault(false);
.SetDefault(false)
.AsExtra();
AddAttr<std::string>( AddAttr<std::string>(
"gate_activation", "gate_activation",
"(string, default: sigmoid)" "(string, default: sigmoid)"
......
...@@ -223,7 +223,7 @@ class Transpose2Op : public TransposeOp { ...@@ -223,7 +223,7 @@ class Transpose2Op : public TransposeOp {
void InferShape(framework::InferShapeContext *ctx) const override { void InferShape(framework::InferShapeContext *ctx) const override {
TransposeOp::InferShape(ctx); TransposeOp::InferShape(ctx);
OP_INOUT_CHECK(ctx->HasOutput("XShape"), "Output", "XShape", "Transpose2"); if (!ctx->HasOutput("XShape")) return;
const auto &in_dims = ctx->GetInputDim("X"); const auto &in_dims = ctx->GetInputDim("X");
std::vector<int64_t> x_shape_dim(in_dims.size() + 1); std::vector<int64_t> x_shape_dim(in_dims.size() + 1);
x_shape_dim[0] = 0; x_shape_dim[0] = 0;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册