未验证 提交 a0452475 编写于 作者: 1 123malin 提交者: GitHub

Enhance Op's Error Message (#27455)

* test=develop, update error message
上级 827ac36f
...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and ...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include "paddle/fluid/operators/concat_op.h" #include "paddle/fluid/operators/concat_op.h"
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
...@@ -78,7 +79,8 @@ class ConcatOp : public framework::OperatorWithKernel { ...@@ -78,7 +79,8 @@ class ConcatOp : public framework::OperatorWithKernel {
} }
} }
if (flag == 0) { if (flag == 0) {
PADDLE_THROW("All Inputs of Concat OP are Empty!"); PADDLE_THROW(platform::errors::InvalidArgument(
"All Inputs of Concat OP are Empty!"));
} }
#ifdef PADDLE_WITH_MKLDNN #ifdef PADDLE_WITH_MKLDNN
if (platform::CanMKLDNNBeUsed(ctx)) { if (platform::CanMKLDNNBeUsed(ctx)) {
......
...@@ -23,46 +23,54 @@ class DecayedAdagradOp : public framework::OperatorWithKernel { ...@@ -23,46 +23,54 @@ class DecayedAdagradOp : public framework::OperatorWithKernel {
using framework::OperatorWithKernel::OperatorWithKernel; using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override { void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("Param"), OP_INOUT_CHECK(ctx->HasInput("Param"), "Input", "Param",
"Input(Param) of DecayedAdagradOp should not be null."); "DecayedAdagradOp");
PADDLE_ENFORCE(ctx->HasInput("Grad"), OP_INOUT_CHECK(ctx->HasInput("Grad"), "Input", "Grad", "DecayedAdagradOp");
"Input(Grad) of DecayedAdagradOp should not be null."); OP_INOUT_CHECK(ctx->HasInput("Moment"), "Input", "Moment",
PADDLE_ENFORCE(ctx->HasInput("Moment"), "DecayedAdagradOp");
"Input(Moment) of DecayedAdagradOp should not be null."); OP_INOUT_CHECK(ctx->HasInput("LearningRate"), "Input", "LearningRate",
PADDLE_ENFORCE( "DecayedAdagradOp");
ctx->HasInput("LearningRate"), PADDLE_ENFORCE_EQ(
"Input(LearningRate) of DecayedAdagradOp should not be null."); ctx->GetInputsVarType("Param").front(),
PADDLE_ENFORCE(
ctx->GetInputsVarType("Param").front() ==
framework::proto::VarType::LOD_TENSOR, framework::proto::VarType::LOD_TENSOR,
platform::errors::InvalidArgument(
"The input var's type should be LoDTensor, but the received is %s", "The input var's type should be LoDTensor, but the received is %s",
ctx->Inputs("Param").front(), ctx->GetInputsVarType("Param").front()); ctx->Inputs("Param").front(),
PADDLE_ENFORCE( ctx->GetInputsVarType("Param").front()));
ctx->GetInputsVarType("Grad").front() == PADDLE_ENFORCE_EQ(
ctx->GetInputsVarType("Grad").front(),
framework::proto::VarType::LOD_TENSOR, framework::proto::VarType::LOD_TENSOR,
platform::errors::InvalidArgument(
"The input var's type should be LoDTensor, but the received is %s", "The input var's type should be LoDTensor, but the received is %s",
ctx->Inputs("Grad").front(), ctx->GetInputsVarType("Grad").front()); ctx->Inputs("Grad").front(),
ctx->GetInputsVarType("Grad").front()));
PADDLE_ENFORCE(ctx->HasOutput("ParamOut"), OP_INOUT_CHECK(ctx->HasOutput("ParamOut"), "Output", "ParamOut",
"Output(ParamOut) of DecayedAdagradOp should not be null."); "DecayedAdagradOp");
PADDLE_ENFORCE(ctx->HasOutput("MomentOut"), OP_INOUT_CHECK(ctx->HasOutput("MomentOut"), "Output", "MomentOut",
"Output(MomentOut) of DecayedAdagradOp should not be null."); "DecayedAdagradOp");
auto lr_dims = ctx->GetInputDim("LearningRate"); auto lr_dims = ctx->GetInputDim("LearningRate");
PADDLE_ENFORCE_NE(framework::product(lr_dims), 0, PADDLE_ENFORCE_NE(framework::product(lr_dims), 0,
platform::errors::InvalidArgument(
"Maybe the Input variable LearningRate has not " "Maybe the Input variable LearningRate has not "
"been initialized. You may need to confirm " "been initialized. You may need to confirm "
"if you put exe.run(startup_program) " "if you put exe.run(startup_program) "
"after optimizer.minimize function."); "after optimizer.minimize function."));
PADDLE_ENFORCE_EQ(framework::product(lr_dims), 1, PADDLE_ENFORCE_EQ(framework::product(lr_dims), 1,
"LearningRate should have one element"); platform::errors::InvalidArgument(
"LearningRate should have one element"));
auto param_dims = ctx->GetInputDim("Param"); auto param_dims = ctx->GetInputDim("Param");
PADDLE_ENFORCE_EQ(param_dims, ctx->GetInputDim("Grad"), PADDLE_ENFORCE_EQ(
param_dims, ctx->GetInputDim("Grad"),
platform::errors::InvalidArgument(
"Param and Grad input of DecayedAdagradOp should have " "Param and Grad input of DecayedAdagradOp should have "
"the same dimension."); "the same dimension."));
PADDLE_ENFORCE_EQ(param_dims, ctx->GetInputDim("Moment"), PADDLE_ENFORCE_EQ(
param_dims, ctx->GetInputDim("Moment"),
platform::errors::InvalidArgument(
"Param and Moment input of DecayedAdagradOp should have " "Param and Moment input of DecayedAdagradOp should have "
"the same dimension."); "the same dimension."));
ctx->SetOutputDim("ParamOut", param_dims); ctx->SetOutputDim("ParamOut", param_dims);
ctx->SetOutputDim("MomentOut", param_dims); ctx->SetOutputDim("MomentOut", param_dims);
......
...@@ -24,17 +24,19 @@ class DecayedAdagradOpKernel : public framework::OpKernel<T> { ...@@ -24,17 +24,19 @@ class DecayedAdagradOpKernel : public framework::OpKernel<T> {
public: public:
void Compute(const framework::ExecutionContext& ctx) const override { void Compute(const framework::ExecutionContext& ctx) const override {
const auto* param_var = ctx.InputVar("Param"); const auto* param_var = ctx.InputVar("Param");
PADDLE_ENFORCE(param_var->IsType<framework::LoDTensor>(), PADDLE_ENFORCE_EQ(param_var->IsType<framework::LoDTensor>(), true,
platform::errors::InvalidArgument(
"The Var(%s)'s type should be LoDTensor, " "The Var(%s)'s type should be LoDTensor, "
"but the received is %s", "but the received is %s",
ctx.InputNames("Param").front(), ctx.InputNames("Param").front(),
framework::ToTypeName(param_var->Type())); framework::ToTypeName(param_var->Type())));
const auto* grad_var = ctx.InputVar("Grad"); const auto* grad_var = ctx.InputVar("Grad");
PADDLE_ENFORCE(grad_var->IsType<framework::LoDTensor>(), PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true,
platform::errors::InvalidArgument(
"The Var(%s)'s type should be LoDTensor, " "The Var(%s)'s type should be LoDTensor, "
"but the received is %s", "but the received is %s",
ctx.InputNames("Grad").front(), ctx.InputNames("Grad").front(),
framework::ToTypeName(grad_var->Type())); framework::ToTypeName(grad_var->Type())));
auto param_out_tensor = ctx.Output<framework::Tensor>("ParamOut"); auto param_out_tensor = ctx.Output<framework::Tensor>("ParamOut");
auto moment_out_tensor = ctx.Output<framework::Tensor>("MomentOut"); auto moment_out_tensor = ctx.Output<framework::Tensor>("MomentOut");
......
...@@ -30,7 +30,12 @@ class LarsMomentumOpKernel : public framework::OpKernel<T> { ...@@ -30,7 +30,12 @@ class LarsMomentumOpKernel : public framework::OpKernel<T> {
auto learning_rate = ctx.Input<framework::LoDTensor>("LearningRate"); auto learning_rate = ctx.Input<framework::LoDTensor>("LearningRate");
auto* grad_var = ctx.InputVar("Grad"); auto* grad_var = ctx.InputVar("Grad");
// only support dense for now. // only support dense for now.
PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true); PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true,
platform::errors::InvalidArgument(
"The Var(%s)'s type should be LoDTensor, "
"but the received is %s",
ctx.InputNames("Grad").front(),
framework::ToTypeName(grad_var->Type())));
auto grad = ctx.Input<framework::LoDTensor>("Grad"); auto grad = ctx.Input<framework::LoDTensor>("Grad");
param_out->mutable_data<T>(ctx.GetPlace()); param_out->mutable_data<T>(ctx.GetPlace());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册