未验证 提交 a0452475 编写于 作者: 1 123malin 提交者: GitHub

Enhance Op's Error Message (#27455)

* test=develop, update error message
上级 827ac36f
...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and ...@@ -13,6 +13,7 @@ See the License for the specific language governing permissions and
limitations under the License. */ limitations under the License. */
#include "paddle/fluid/operators/concat_op.h" #include "paddle/fluid/operators/concat_op.h"
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
...@@ -78,7 +79,8 @@ class ConcatOp : public framework::OperatorWithKernel { ...@@ -78,7 +79,8 @@ class ConcatOp : public framework::OperatorWithKernel {
} }
} }
if (flag == 0) { if (flag == 0) {
PADDLE_THROW("All Inputs of Concat OP are Empty!"); PADDLE_THROW(platform::errors::InvalidArgument(
"All Inputs of Concat OP are Empty!"));
} }
#ifdef PADDLE_WITH_MKLDNN #ifdef PADDLE_WITH_MKLDNN
if (platform::CanMKLDNNBeUsed(ctx)) { if (platform::CanMKLDNNBeUsed(ctx)) {
......
...@@ -23,46 +23,54 @@ class DecayedAdagradOp : public framework::OperatorWithKernel { ...@@ -23,46 +23,54 @@ class DecayedAdagradOp : public framework::OperatorWithKernel {
using framework::OperatorWithKernel::OperatorWithKernel; using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override { void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("Param"), OP_INOUT_CHECK(ctx->HasInput("Param"), "Input", "Param",
"Input(Param) of DecayedAdagradOp should not be null."); "DecayedAdagradOp");
PADDLE_ENFORCE(ctx->HasInput("Grad"), OP_INOUT_CHECK(ctx->HasInput("Grad"), "Input", "Grad", "DecayedAdagradOp");
"Input(Grad) of DecayedAdagradOp should not be null."); OP_INOUT_CHECK(ctx->HasInput("Moment"), "Input", "Moment",
PADDLE_ENFORCE(ctx->HasInput("Moment"), "DecayedAdagradOp");
"Input(Moment) of DecayedAdagradOp should not be null."); OP_INOUT_CHECK(ctx->HasInput("LearningRate"), "Input", "LearningRate",
PADDLE_ENFORCE( "DecayedAdagradOp");
ctx->HasInput("LearningRate"), PADDLE_ENFORCE_EQ(
"Input(LearningRate) of DecayedAdagradOp should not be null."); ctx->GetInputsVarType("Param").front(),
PADDLE_ENFORCE( framework::proto::VarType::LOD_TENSOR,
ctx->GetInputsVarType("Param").front() == platform::errors::InvalidArgument(
framework::proto::VarType::LOD_TENSOR, "The input var's type should be LoDTensor, but the received is %s",
"The input var's type should be LoDTensor, but the received is %s", ctx->Inputs("Param").front(),
ctx->Inputs("Param").front(), ctx->GetInputsVarType("Param").front()); ctx->GetInputsVarType("Param").front()));
PADDLE_ENFORCE( PADDLE_ENFORCE_EQ(
ctx->GetInputsVarType("Grad").front() == ctx->GetInputsVarType("Grad").front(),
framework::proto::VarType::LOD_TENSOR, framework::proto::VarType::LOD_TENSOR,
"The input var's type should be LoDTensor, but the received is %s", platform::errors::InvalidArgument(
ctx->Inputs("Grad").front(), ctx->GetInputsVarType("Grad").front()); "The input var's type should be LoDTensor, but the received is %s",
ctx->Inputs("Grad").front(),
PADDLE_ENFORCE(ctx->HasOutput("ParamOut"), ctx->GetInputsVarType("Grad").front()));
"Output(ParamOut) of DecayedAdagradOp should not be null.");
PADDLE_ENFORCE(ctx->HasOutput("MomentOut"), OP_INOUT_CHECK(ctx->HasOutput("ParamOut"), "Output", "ParamOut",
"Output(MomentOut) of DecayedAdagradOp should not be null."); "DecayedAdagradOp");
OP_INOUT_CHECK(ctx->HasOutput("MomentOut"), "Output", "MomentOut",
"DecayedAdagradOp");
auto lr_dims = ctx->GetInputDim("LearningRate"); auto lr_dims = ctx->GetInputDim("LearningRate");
PADDLE_ENFORCE_NE(framework::product(lr_dims), 0, PADDLE_ENFORCE_NE(framework::product(lr_dims), 0,
"Maybe the Input variable LearningRate has not " platform::errors::InvalidArgument(
"been initialized. You may need to confirm " "Maybe the Input variable LearningRate has not "
"if you put exe.run(startup_program) " "been initialized. You may need to confirm "
"after optimizer.minimize function."); "if you put exe.run(startup_program) "
"after optimizer.minimize function."));
PADDLE_ENFORCE_EQ(framework::product(lr_dims), 1, PADDLE_ENFORCE_EQ(framework::product(lr_dims), 1,
"LearningRate should have one element"); platform::errors::InvalidArgument(
"LearningRate should have one element"));
auto param_dims = ctx->GetInputDim("Param"); auto param_dims = ctx->GetInputDim("Param");
PADDLE_ENFORCE_EQ(param_dims, ctx->GetInputDim("Grad"), PADDLE_ENFORCE_EQ(
"Param and Grad input of DecayedAdagradOp should have " param_dims, ctx->GetInputDim("Grad"),
"the same dimension."); platform::errors::InvalidArgument(
PADDLE_ENFORCE_EQ(param_dims, ctx->GetInputDim("Moment"), "Param and Grad input of DecayedAdagradOp should have "
"Param and Moment input of DecayedAdagradOp should have " "the same dimension."));
"the same dimension."); PADDLE_ENFORCE_EQ(
param_dims, ctx->GetInputDim("Moment"),
platform::errors::InvalidArgument(
"Param and Moment input of DecayedAdagradOp should have "
"the same dimension."));
ctx->SetOutputDim("ParamOut", param_dims); ctx->SetOutputDim("ParamOut", param_dims);
ctx->SetOutputDim("MomentOut", param_dims); ctx->SetOutputDim("MomentOut", param_dims);
......
...@@ -24,17 +24,19 @@ class DecayedAdagradOpKernel : public framework::OpKernel<T> { ...@@ -24,17 +24,19 @@ class DecayedAdagradOpKernel : public framework::OpKernel<T> {
public: public:
void Compute(const framework::ExecutionContext& ctx) const override { void Compute(const framework::ExecutionContext& ctx) const override {
const auto* param_var = ctx.InputVar("Param"); const auto* param_var = ctx.InputVar("Param");
PADDLE_ENFORCE(param_var->IsType<framework::LoDTensor>(), PADDLE_ENFORCE_EQ(param_var->IsType<framework::LoDTensor>(), true,
"The Var(%s)'s type should be LoDTensor, " platform::errors::InvalidArgument(
"but the received is %s", "The Var(%s)'s type should be LoDTensor, "
ctx.InputNames("Param").front(), "but the received is %s",
framework::ToTypeName(param_var->Type())); ctx.InputNames("Param").front(),
framework::ToTypeName(param_var->Type())));
const auto* grad_var = ctx.InputVar("Grad"); const auto* grad_var = ctx.InputVar("Grad");
PADDLE_ENFORCE(grad_var->IsType<framework::LoDTensor>(), PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true,
"The Var(%s)'s type should be LoDTensor, " platform::errors::InvalidArgument(
"but the received is %s", "The Var(%s)'s type should be LoDTensor, "
ctx.InputNames("Grad").front(), "but the received is %s",
framework::ToTypeName(grad_var->Type())); ctx.InputNames("Grad").front(),
framework::ToTypeName(grad_var->Type())));
auto param_out_tensor = ctx.Output<framework::Tensor>("ParamOut"); auto param_out_tensor = ctx.Output<framework::Tensor>("ParamOut");
auto moment_out_tensor = ctx.Output<framework::Tensor>("MomentOut"); auto moment_out_tensor = ctx.Output<framework::Tensor>("MomentOut");
......
...@@ -30,7 +30,12 @@ class LarsMomentumOpKernel : public framework::OpKernel<T> { ...@@ -30,7 +30,12 @@ class LarsMomentumOpKernel : public framework::OpKernel<T> {
auto learning_rate = ctx.Input<framework::LoDTensor>("LearningRate"); auto learning_rate = ctx.Input<framework::LoDTensor>("LearningRate");
auto* grad_var = ctx.InputVar("Grad"); auto* grad_var = ctx.InputVar("Grad");
// only support dense for now. // only support dense for now.
PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true); PADDLE_ENFORCE_EQ(grad_var->IsType<framework::LoDTensor>(), true,
platform::errors::InvalidArgument(
"The Var(%s)'s type should be LoDTensor, "
"but the received is %s",
ctx.InputNames("Grad").front(),
framework::ToTypeName(grad_var->Type())));
auto grad = ctx.Input<framework::LoDTensor>("Grad"); auto grad = ctx.Input<framework::LoDTensor>("Grad");
param_out->mutable_data<T>(ctx.GetPlace()); param_out->mutable_data<T>(ctx.GetPlace());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册