未验证 提交 39075b3d 编写于 作者: G GaoWei8 提交者: GitHub

[Cherry-Pick] [2.0-beta] error enhancement of Print, fused_embedding_fc_lstm...

[Cherry-Pick] [2.0-beta] error enhancement of Print, fused_embedding_fc_lstm and fusion_gru (#24097)
上级 7dd68aec
......@@ -24,68 +24,94 @@ namespace operators {
void FusedEmbeddingFCLSTMOp::InferShape(
framework::InferShapeContext* ctx) const {
PADDLE_ENFORCE(ctx->HasInput("Embeddings"),
"Assert only one Input(Embeddings) of LSTM.");
PADDLE_ENFORCE(ctx->HasInput("WeightH"),
"Assert only one Input(WeightH) of LSTM.");
PADDLE_ENFORCE(ctx->HasInput("Bias"), "Assert only one Input(Bias) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("XX"), "Assert only one Output(XX) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("Hidden"),
"Assert only one Output(Hidden) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("Cell"),
"Assert only one Output(Cell) of LSTM.");
PADDLE_ENFORCE(ctx->HasInput("Ids"),
"Input(Ids) of LookupTableOp should not be null.");
OP_INOUT_CHECK(ctx->HasInput("Embeddings"), "Input", "Embeddings",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasInput("WeightH"), "Input", "WeightH",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasInput("Bias"), "Input", "Bias",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("XX"), "Output", "XX",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("Hidden"), "Output", "Hidden",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("Cell"), "Output", "Cell",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasInput("Ids"), "Input", "Ids",
"fused_embedding_fc_lstm");
auto table_dims = ctx->GetInputDim("Embeddings");
auto ids_dims = ctx->GetInputDim("Ids");
int ids_rank = ids_dims.size();
PADDLE_ENFORCE_EQ(table_dims.size(), 2);
PADDLE_ENFORCE_EQ(
table_dims.size(), 2,
platform::errors::InvalidArgument(
"The Embeddings's rank should be 2, but received value is:%d.",
table_dims.size()));
PADDLE_ENFORCE_EQ(ids_dims[ids_rank - 1], 1,
"The last dimension of the 'Ids' tensor must be 1.");
platform::errors::InvalidArgument(
"The last dimension of the 'Ids' tensor must be 1, but "
"received value is:%d.",
ids_dims[ids_rank - 1]));
auto x_dims = ctx->GetInputDim("Ids");
PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(Ids)'s rank must be 2.");
PADDLE_ENFORCE_EQ(
x_dims.size(), 2,
platform::errors::InvalidArgument(
"Input(Ids)'s rank must be 2, but received value is:%d.",
x_dims.size()));
if (ctx->HasInput("H0")) {
PADDLE_ENFORCE(ctx->HasInput("C0"),
"Input(Cell) and Input(Hidden) of LSTM should not "
"be null at the same time.");
PADDLE_ENFORCE_EQ(ctx->HasInput("C0"), true,
platform::errors::InvalidArgument(
"Input(Cell) and Input(Hidden) of LSTM should exist "
"at the same time."));
auto h_dims = ctx->GetInputDim("H0");
auto c_dims = ctx->GetInputDim("C0");
PADDLE_ENFORCE(h_dims == c_dims,
PADDLE_ENFORCE_EQ(
h_dims, c_dims,
platform::errors::InvalidArgument(
"The dimension of Input(H0) and Input(C0) "
"should be the same.");
"should be the same, but received H0 dim is:[%s], C0 dim is[%s]",
h_dims, c_dims));
}
auto embeddings_dims = ctx->GetInputDim("Embeddings");
PADDLE_ENFORCE_EQ(embeddings_dims.size(), 2,
"The rank of Input(Embeddings) should be 2.");
auto wh_dims = ctx->GetInputDim("WeightH");
int frame_size = wh_dims[1] / 4;
PADDLE_ENFORCE_EQ(wh_dims.size(), 2,
"The rank of Input(WeightH) should be 2.");
PADDLE_ENFORCE_EQ(
wh_dims.size(), 2,
platform::errors::InvalidArgument(
"The rank of Input(WeightH) should be 2, but received value is:%d.",
wh_dims.size()));
PADDLE_ENFORCE_EQ(wh_dims[0], frame_size,
"The first dimension of Input(WeightH) "
"should be %d.",
frame_size);
platform::errors::InvalidArgument(
"The first dimension of Input(WeightH) should equal to "
"frame size:%d, but received value is:%d.",
frame_size, wh_dims[0]));
PADDLE_ENFORCE_EQ(wh_dims[1], 4 * frame_size,
"The second dimension of Input(WeightH) "
"should be 4 * %d.",
frame_size);
platform::errors::InvalidArgument(
"The second dimension of Input(WeightH) should equal "
"to 4 * %d, but received value is:%d.",
frame_size, wh_dims[1]));
auto b_dims = ctx->GetInputDim("Bias");
PADDLE_ENFORCE_EQ(b_dims.size(), 2, "The rank of Input(Bias) should be 2.");
PADDLE_ENFORCE_EQ(b_dims[0], 1,
"The first dimension of Input(Bias) should be 1.");
PADDLE_ENFORCE_EQ(
b_dims.size(), 2,
platform::errors::InvalidArgument(
"The rank of Input(Bias) should be 2, but received value is:%d.",
b_dims.size()));
PADDLE_ENFORCE_EQ(b_dims[0], 1, platform::errors::InvalidArgument(
"The first dimension of Input(Bias) "
"should be 1, but received value is:%d.",
b_dims[0]));
PADDLE_ENFORCE_EQ(
b_dims[1], (ctx->Attrs().Get<bool>("use_peepholes") ? 7 : 4) * frame_size,
platform::errors::InvalidArgument(
"The second dimension of Input(Bias) should be "
"7 * %d if enable peepholes connection or"
"4 * %d if disable peepholes",
frame_size, frame_size);
"4 * %d if disable peepholes, bias dim is:%d, use_peepholes:%d",
frame_size, frame_size, b_dims[1],
ctx->Attrs().Get<bool>("use_peepholes")));
framework::DDim out_dims({x_dims[0], frame_size});
ctx->SetOutputDim("Hidden", out_dims);
......@@ -93,16 +119,17 @@ void FusedEmbeddingFCLSTMOp::InferShape(
ctx->ShareLoD("Ids", "Hidden");
ctx->ShareLoD("Ids", "Cell");
if (!ctx->Attrs().Get<bool>("use_seq")) {
PADDLE_ENFORCE(ctx->HasOutput("BatchedInput"),
"Assert only one Output(BatchedInput) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("BatchedHidden"),
"Assert only one Output(BatchedHidden) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("BatchedCell"),
"Assert only one Output(BatchedCell) of LSTM.");
PADDLE_ENFORCE(ctx->HasOutput("ReorderedH0"),
"Assert only one Output(ReorderedH0) of LSTM");
PADDLE_ENFORCE(ctx->HasOutput("ReorderedC0"),
"Assert only one Output(ReorderedC0) of LSTM.");
OP_INOUT_CHECK(ctx->HasOutput("BatchedInput"), "Output", "BatchedInput",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("BatchedHidden"), "Output", "BatchedHidden",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("BatchedCell"), "Output", "BatchedCell",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("ReorderedH0"), "Output", "ReorderedH0",
"fused_embedding_fc_lstm");
OP_INOUT_CHECK(ctx->HasOutput("ReorderedC0"), "Output", "ReorderedC0",
"fused_embedding_fc_lstm");
ctx->SetOutputDim("BatchedInput", {x_dims[0], wh_dims[1]});
ctx->SetOutputDim("BatchedHidden", out_dims);
ctx->SetOutputDim("BatchedCell", out_dims);
......
......@@ -24,51 +24,80 @@ namespace paddle {
namespace operators {
void FusionGRUOp::InferShape(framework::InferShapeContext* ctx) const {
PADDLE_ENFORCE(ctx->HasInput("X"), "Assert only one Input(X) of GRU.");
PADDLE_ENFORCE(ctx->HasInput("WeightX"),
"Assert only one Input(WeightX) of GRU.");
PADDLE_ENFORCE(ctx->HasInput("WeightH"),
"Assert only one Input(WeightH) of GRU.");
PADDLE_ENFORCE(ctx->HasOutput("XX"), "Assert only one Output(XX) of GRU.");
PADDLE_ENFORCE(ctx->HasOutput("Hidden"),
"Assert only one Output(Hidden) of GRU.");
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "fusion_gru");
OP_INOUT_CHECK(ctx->HasInput("WeightX"), "Input", "WeightX", "fusion_gru");
OP_INOUT_CHECK(ctx->HasInput("WeightH"), "Input", "WeightH", "fusion_gru");
OP_INOUT_CHECK(ctx->HasOutput("XX"), "Output", "XX", "fusion_gru");
OP_INOUT_CHECK(ctx->HasOutput("Hidden"), "Output", "Hidden", "fusion_gru");
auto x_dims = ctx->GetInputDim("X");
PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(X)'s rank must be 2.");
PADDLE_ENFORCE_EQ(x_dims.size(), 2,
platform::errors::InvalidArgument(
"Input(X)'s rank must be 2, but received input dim "
"size is:%d, input dim is:[%s]",
x_dims.size(), x_dims));
auto wx_dims = ctx->GetInputDim("WeightX");
PADDLE_ENFORCE_EQ(wx_dims.size(), 2,
"The rank of Input(WeightX) should be 2.");
platform::errors::InvalidArgument(
"The rank of Input(WeightX) should be 2, but received "
"WeightX dim size is:%d, WeightX dim is:[%s] ",
wx_dims.size(), wx_dims));
PADDLE_ENFORCE_EQ(wx_dims[0], x_dims[1],
platform::errors::InvalidArgument(
"The first dimension of Input(WeightX) "
"should be %d.",
x_dims[1]);
"should equal to second dimension of input x, but "
"received WeightX dimension is:%d, x dimension is:%d",
wx_dims[0], x_dims[1]));
int frame_size = wx_dims[1] / 3;
auto wh_dims = ctx->GetInputDim("WeightH");
PADDLE_ENFORCE_EQ(wh_dims.size(), 2,
"The rank of Input(WeightH) should be 2.");
platform::errors::InvalidArgument(
"The rank of Input(WeightH) should be 2, but received "
"WeightH dim size is:%d, WeightH dim is:[%s]",
wh_dims.size(), wh_dims));
PADDLE_ENFORCE_EQ(wh_dims[0], frame_size,
"The first dimension of Input(WeightH) "
"should be %d.",
frame_size);
platform::errors::InvalidArgument(
"The first dimension of WeightH "
"should equal to frame_size, but received WeightH's "
"first dimension is: "
"%d, frame size is:%d",
wh_dims[0], frame_size));
PADDLE_ENFORCE_EQ(wh_dims[1], 3 * frame_size,
platform::errors::InvalidArgument(
"The second dimension of Input(WeightH) "
"should be 3 * %d.",
frame_size);
"should equal to 3 * frame_size, but received WeightH "
"is:%d, frame size is:%d",
wh_dims[1], frame_size));
if (ctx->HasInput("H0")) {
auto h0_dims = ctx->GetInputDim("H0");
PADDLE_ENFORCE_EQ(h0_dims[1], frame_size,
"The width of H0 must be equal to frame_size.");
platform::errors::InvalidArgument(
"The width of H0 must be equal to frame_size, but "
"receiced the width of H0 is:%d, frame size is:%d",
h0_dims[1], frame_size));
}
if (ctx->HasInput("Bias")) {
auto b_dims = ctx->GetInputDim("Bias");
PADDLE_ENFORCE_EQ(b_dims.size(), 2, "The rank of Input(Bias) should be 2.");
PADDLE_ENFORCE_EQ(b_dims.size(), 2,
platform::errors::InvalidArgument(
"The rank of Input(Bias) should be 2, but received "
"Bias rank is:%d, Bias dim is:[%s]",
b_dims.size(), b_dims));
PADDLE_ENFORCE_EQ(b_dims[0], 1,
"The first dimension of Input(Bias) should be 1.");
platform::errors::InvalidArgument(
"The first dimension of Input(Bias) should be 1, but "
"received Bias first dim is:%d, Bias dim is:[%s]",
b_dims[0], b_dims));
PADDLE_ENFORCE_EQ(b_dims[1], frame_size * 3,
"The shape of Bias must be [1, frame_size * 3].");
platform::errors::InvalidArgument(
"The shape of Bias must be [1, frame_size * 3], but "
"received bias dim is:[%s], frame size is:%d",
b_dims, frame_size));
}
framework::DDim out_dims({x_dims[0], frame_size});
ctx->SetOutputDim("Hidden", out_dims);
......@@ -78,12 +107,12 @@ void FusionGRUOp::InferShape(framework::InferShapeContext* ctx) const {
xx_width = wx_dims[1];
} else {
xx_width = x_dims[1] > wx_dims[1] ? wx_dims[1] : x_dims[1];
PADDLE_ENFORCE(ctx->HasOutput("ReorderedH0"),
"Assert only one Output(ReorderedH0) of GRU.");
PADDLE_ENFORCE(ctx->HasOutput("BatchedInput"),
"Assert only one Output(BatchedInput) of GRU.");
PADDLE_ENFORCE(ctx->HasOutput("BatchedOut"),
"Assert only one Output(BatchedOut) of GRU.");
OP_INOUT_CHECK(ctx->HasOutput("ReorderedH0"), "Output", "ReorderedH0",
"fusion_gru");
OP_INOUT_CHECK(ctx->HasOutput("BatchedInput"), "Output", "BatchedInput",
"fusion_gru");
OP_INOUT_CHECK(ctx->HasOutput("BatchedOut"), "Output", "BatchedOut",
"fusion_gru");
ctx->SetOutputDim("BatchedInput", {x_dims[0], wx_dims[1]});
ctx->SetOutputDim("BatchedOut", out_dims);
}
......
......@@ -148,10 +148,14 @@ class PrintOp : public framework::OperatorBase {
const platform::Place &place) const override {
const auto in_var = scope.FindVar(Input("In"));
auto out_var = scope.FindVar(Output("Out"));
PADDLE_ENFORCE_NOT_NULL(in_var, "The input should not be found in scope",
Input("In"));
PADDLE_ENFORCE_NOT_NULL(out_var, "The output should not be found in scope",
Output("Out"));
PADDLE_ENFORCE_NOT_NULL(
in_var, platform::errors::NotFound("The input:%s not found in scope",
Input("In")));
PADDLE_ENFORCE_NOT_NULL(
out_var, platform::errors::NotFound("The output:%s not found in scope",
Output("Out")));
auto &in_tensor = in_var->Get<framework::LoDTensor>();
framework::LoDTensor *out_tensor =
out_var->GetMutable<framework::LoDTensor>();
......@@ -246,8 +250,8 @@ class PrintOpInferShape : public framework::InferShapeBase {
public:
void operator()(framework::InferShapeContext *ctx) const override {
VLOG(10) << "PrintOpInferShape";
PADDLE_ENFORCE(ctx->HasInput("In"), "Input(In) should not be null.");
PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should not be null.");
OP_INOUT_CHECK(ctx->HasInput("In"), "Input", "In", "Print");
OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out", "Print");
ctx->ShareDim("In", /*->*/ "Out");
ctx->ShareLoD("In", /*->*/ "Out");
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册