diff --git a/paddle/fluid/operators/cvm_op.cc b/paddle/fluid/operators/cvm_op.cc index a1a8744c323ca1cd783e0adb83cc260ffe8ce978..be7d4780f83ae5f3dbc1442353e95e85666d77b9 100644 --- a/paddle/fluid/operators/cvm_op.cc +++ b/paddle/fluid/operators/cvm_op.cc @@ -30,8 +30,10 @@ class CVMOp : public framework::OperatorWithKernel { OP_INOUT_CHECK(ctx->HasOutput("Y"), "Output", "Y", "CVM"); auto x_dims = ctx->GetInputDim("X"); - PADDLE_ENFORCE_EQ(x_dims.size(), 2UL, platform::errors::InvalidArgument( - "Input(X)'s rank should be 2.")); + PADDLE_ENFORCE_EQ( + x_dims.size(), 2UL, + platform::errors::InvalidArgument( + "Input(X)'s rank should be 2, but got %d", x_dims.size())); if (ctx->Attrs().Get("use_cvm")) { ctx->SetOutputDim("Y", {x_dims[0], x_dims[1]}); @@ -68,26 +70,31 @@ class CVMGradientOp : public framework::OperatorWithKernel { auto x_dims = ctx->GetInputDim("X"); auto cvm_dims = ctx->GetInputDim("CVM"); auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y")); - PADDLE_ENFORCE_EQ(x_dims.size(), 2, platform::errors::InvalidArgument( - "Input(X)'s rank should be 2.")); + PADDLE_ENFORCE_EQ( + x_dims.size(), 2, + platform::errors::InvalidArgument( + "Expect Input(X)'s rank == 2, but got %d", x_dims.size())); PADDLE_ENFORCE_EQ( dy_dims.size(), 2, - platform::errors::InvalidArgument("Input(Y@Grad)'s rank should be 2.")); + platform::errors::InvalidArgument( + "Expect Input(X)'s rank == 2, but got %d", dy_dims.size())); PADDLE_ENFORCE_EQ( cvm_dims.size(), 2, - platform::errors::InvalidArgument("Input(CVM)'s rank should be 2.")); + platform::errors::InvalidArgument( + "Expect Input(X)'s rank == 2, but got %d", cvm_dims.size())); PADDLE_ENFORCE_EQ( x_dims[0], dy_dims[0], platform::errors::InvalidArgument( "The 1st dimension of Input(X) and Input(Y@Grad) should " - "be equal.")); + "be equal, X is %d, Y@Grad is %d", + x_dims[0], dy_dims[0])); PADDLE_ENFORCE_EQ( cvm_dims[1], 2, platform::errors::InvalidArgument( "When Attr(soft_label) == false, the 2nd dimension of " - "Input(CVM) should be 2.")); + "Input(CVM) should be 2, but got %d cvm_dims[1]")); ctx->SetOutputDim(framework::GradVarName("X"), x_dims); ctx->ShareLoD("X", framework::GradVarName("X")); } diff --git a/paddle/fluid/operators/optimizers/ftrl_op.cc b/paddle/fluid/operators/optimizers/ftrl_op.cc index 3bdafbb96d5c40be651d6cad68806e14a214a28d..a75be6e580dcd7b1f39c313382a9759986e3a1da 100644 --- a/paddle/fluid/operators/optimizers/ftrl_op.cc +++ b/paddle/fluid/operators/optimizers/ftrl_op.cc @@ -42,7 +42,9 @@ class FTRLOp : public framework::OperatorWithKernel { auto param_dim = ctx->GetInputDim("Param"); PADDLE_ENFORCE_EQ(param_dim, ctx->GetInputDim("Grad"), platform::errors::InvalidArgument( - "Two input of FTRL Op's dimension must be same.")); + "Two input of FTRL Op's dimension must be same, but " + "param_dim is %d, Grad is %d", + param_dim, ctx->GetInputDim("Grad"))); auto lr_dim = ctx->GetInputDim("LearningRate"); PADDLE_ENFORCE_NE(framework::product(lr_dim), 0, @@ -51,9 +53,10 @@ class FTRLOp : public framework::OperatorWithKernel { "been initialized. You may need to confirm " "if you put exe.run(startup_program) " "after optimizer.minimize function.")); - PADDLE_ENFORCE_EQ( - framework::product(lr_dim), 1, - platform::errors::InvalidArgument("Learning Rate should be a scalar.")); + PADDLE_ENFORCE_EQ(framework::product(lr_dim), 1, + platform::errors::InvalidArgument( + "Learning Rate should be a scalar, but got %d", + framework::product(lr_dim))); ctx->SetOutputDim("ParamOut", param_dim); ctx->SetOutputDim("SquaredAccumOut", param_dim);