From f1c57d648caf103de8e64aa8c2bded1820b50306 Mon Sep 17 00:00:00 2001 From: gongweibao Date: Wed, 13 May 2020 19:58:12 +0800 Subject: [PATCH] Enhance error message of prefetch_op, proximal_adagrad_op, proximal_gd_op (#24436) --- .../operators/distributed_ops/prefetch_op.cc | 8 ++- .../optimizers/proximal_adagrad_op.cc | 56 +++++++++++-------- .../operators/optimizers/proximal_gd_op.cc | 28 ++++++---- 3 files changed, 56 insertions(+), 36 deletions(-) diff --git a/paddle/fluid/operators/distributed_ops/prefetch_op.cc b/paddle/fluid/operators/distributed_ops/prefetch_op.cc index c6395289f8f..6037ab1523e 100644 --- a/paddle/fluid/operators/distributed_ops/prefetch_op.cc +++ b/paddle/fluid/operators/distributed_ops/prefetch_op.cc @@ -57,7 +57,13 @@ class PrefetchOp : public framework::OperatorBase { } } for (size_t i = 0; i < rets.size(); i++) { - PADDLE_ENFORCE(rets[i]->Wait(), "internal error in RPCClient"); + PADDLE_ENFORCE_EQ( + rets[i]->Wait(), true, + platform::errors::Fatal( + "It's a fatal error of RPCClient that RPCClient can't " + "get the wait result. It may happen when trainers or " + "parameter servers exit un normally or the network " + "issue!")); } } }; diff --git a/paddle/fluid/operators/optimizers/proximal_adagrad_op.cc b/paddle/fluid/operators/optimizers/proximal_adagrad_op.cc index 3e2f12137af..7fe2a9a94ac 100644 --- a/paddle/fluid/operators/optimizers/proximal_adagrad_op.cc +++ b/paddle/fluid/operators/optimizers/proximal_adagrad_op.cc @@ -24,34 +24,42 @@ class ProximalAdagradOp : public framework::OperatorWithKernel { protected: void InferShape(framework::InferShapeContext *ctx) const override { - PADDLE_ENFORCE(ctx->HasInput("Param"), - "Input(Param) of ProximalAdagradOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("Moment"), - "Input(Moment) of ProximalAdagradOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("Grad"), - "Input(Grad) of ProximalAdagradOp should not be null."); - PADDLE_ENFORCE( - ctx->HasInput("LearningRate"), - "Input(LearningRate) of ProximalAdagradOp should not be null."); - - PADDLE_ENFORCE(ctx->HasOutput("ParamOut"), - "Output(ParamOut) of ProximalAdagradOp should not be null."); - PADDLE_ENFORCE( - ctx->HasOutput("MomentOut"), - "Output(MomentOut) of ProximalAdagradOp should not be null."); + OP_INOUT_CHECK(ctx->HasInput("Param"), "Input", "Param", + "ProximalAdagradOp"); + OP_INOUT_CHECK(ctx->HasInput("Moment"), "Input", "Moment", + "ProximalAdagradOp"); + OP_INOUT_CHECK(ctx->HasInput("Grad"), "Input", "Grad", "ProximalAdagradOp"); + OP_INOUT_CHECK(ctx->HasInput("LearningRate"), "Input", "LearningRate", + "ProximalAdagradOp"); + + OP_INOUT_CHECK(ctx->HasOutput("ParamOut"), "Output", "ParamOut", + "ProximalAdagradOp"); + OP_INOUT_CHECK(ctx->HasOutput("MomentOut"), "Output", "MomentOut", + "ProximalAdagradOp"); auto param_dim = ctx->GetInputDim("Param"); - PADDLE_ENFORCE_EQ( - param_dim, ctx->GetInputDim("Grad"), - "Param and Grad of ProximalAdagrad Op must have same dimension."); - - PADDLE_ENFORCE_EQ( - param_dim, ctx->GetInputDim("Moment"), - "Param and Moment of ProximalAdagrad Op must have same dimension."); + PADDLE_ENFORCE_EQ(param_dim, ctx->GetInputDim("Grad"), + platform::errors::InvalidArgument( + "The shape of Intput(Param) should be equal to the " + "Input(Grad) of ProximalAdagrad Op. But received " + "Input(Param).dimensions=[%s], " + "Input(Grad).dimensions=[%s]", + param_dim, ctx->GetInputDim("Grad"))); + + PADDLE_ENFORCE_EQ(param_dim, ctx->GetInputDim("Moment"), + platform::errors::InvalidArgument( + "The shape of Intput(Param) should be equal to the " + "Input(Moment) of ProximalAdagrad Op. But received " + "Input(Param).dimensions=[%s], " + "Input(Moment).dimensions=[%s]", + param_dim, ctx->GetInputDim("Moment"))); auto lr_dim = ctx->GetInputDim("LearningRate"); - PADDLE_ENFORCE_EQ(framework::product(lr_dim), 1, - "Learning Rate should be a scalar."); + PADDLE_ENFORCE_EQ( + framework::product(lr_dim), 1, + platform::errors::InvalidArgument( + "Learning Rate should be a scalar. But received dimension[%s]", + lr_dim)); ctx->SetOutputDim("ParamOut", param_dim); ctx->SetOutputDim("MomentOut", param_dim); diff --git a/paddle/fluid/operators/optimizers/proximal_gd_op.cc b/paddle/fluid/operators/optimizers/proximal_gd_op.cc index cf3c3e2ccb9..edaf55129ad 100644 --- a/paddle/fluid/operators/optimizers/proximal_gd_op.cc +++ b/paddle/fluid/operators/optimizers/proximal_gd_op.cc @@ -24,23 +24,29 @@ class ProximalGDOp : public framework::OperatorWithKernel { protected: void InferShape(framework::InferShapeContext *ctx) const override { - PADDLE_ENFORCE(ctx->HasInput("Param"), - "Input(Param) of ProximalGDOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("Grad"), - "Input(Grad) of ProximalGDOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("LearningRate"), - "Input(LearningRate) of ProximalGDOp should not be null."); + OP_INOUT_CHECK(ctx->HasInput("Param"), "Input", "Param", "ProximalGDOp"); + OP_INOUT_CHECK(ctx->HasInput("Grad"), "Input", "Grad", "ProximalGDOp"); + OP_INOUT_CHECK(ctx->HasInput("LearningRate"), "Input", "LearningRate", + "ProximalGDOp"); - PADDLE_ENFORCE(ctx->HasOutput("ParamOut"), - "Output(ParamOut) of ProximalGDOp should not be null."); + OP_INOUT_CHECK(ctx->HasOutput("ParamOut"), "Output", "Paramout", + "ProximalGDOp"); auto param_dim = ctx->GetInputDim("Param"); PADDLE_ENFORCE_EQ(param_dim, ctx->GetInputDim("Grad"), - "Two input of ProximalGD Op's dimension must be same."); + platform::errors::InvalidArgument( + "The shape of Intput(Param) should be equal to the " + "Input(Grad) of ProximalGD Op. But received " + "Input(Param).dimensions=[%s], " + "Input(Grad).dimensions=[%s]", + param_dim, ctx->GetInputDim("Grad"))); auto lr_dim = ctx->GetInputDim("LearningRate"); - PADDLE_ENFORCE_EQ(framework::product(lr_dim), 1, - "Learning Rate should be a scalar."); + PADDLE_ENFORCE_EQ( + framework::product(lr_dim), 1, + platform::errors::InvalidArgument( + "Learning Rate should be a scalar. But received dimmensions:[%s]", + lr_dim)); ctx->SetOutputDim("ParamOut", param_dim); } -- GitLab