diff --git a/paddle/fluid/operators/attention_lstm_op.cc b/paddle/fluid/operators/attention_lstm_op.cc index 912ec79910301b67bc520b1aa78d3fa1fd165d1f..aecd3d430231855fa29cf7716eb636cdb28182ce 100644 --- a/paddle/fluid/operators/attention_lstm_op.cc +++ b/paddle/fluid/operators/attention_lstm_op.cc @@ -64,12 +64,19 @@ void AttentionLSTMOp::InferShape(framework::InferShapeContext* ctx) const { auto c_dims = ctx->GetInputDim("C0"); PADDLE_ENFORCE_EQ(c_dims.size(), 2, "Input(C0)'s rank must be 2."); - PADDLE_ENFORCE_EQ(c_dims[1], D, "C0 dims should be N x %d.", D); + if (ctx->IsRuntime()) { + PADDLE_ENFORCE_EQ(c_dims[1], D, "C0 dims should be N x %d.", D); + } + if (ctx->HasInput("H0")) { auto h_dims = ctx->GetInputDim("H0"); - PADDLE_ENFORCE(h_dims == c_dims, - "The dimension of Input(H0) and Input(C0) " - "should be the same."); + PADDLE_ENFORCE_EQ(h_dims.size(), 2UL, "Input(H0)'s rank must be 2."); + if (ctx->IsRuntime() || + (framework::product(c_dims) > 0 && framework::product(h_dims) > 0)) { + PADDLE_ENFORCE(h_dims == c_dims, + "The dimension of Input(H0) and Input(C0) " + "should be the same."); + } } auto atten_w_dims = ctx->GetInputDim("AttentionWeight"); @@ -79,6 +86,7 @@ void AttentionLSTMOp::InferShape(framework::InferShapeContext* ctx) const { "AttentionWeight shapes must be (%d + %d) * 1.", M, D); PADDLE_ENFORCE_EQ(atten_w_dims[1], 1, "AttentionWeight shapes must be (%d + %d) * 1.", M, D); + if (ctx->HasInput("AttentionBias")) { auto atten_b_dims = ctx->GetInputDim("AttentionBias"); PADDLE_ENFORCE_EQ(atten_b_dims.size(), 2, diff --git a/paddle/fluid/operators/bpr_loss_op.cc b/paddle/fluid/operators/bpr_loss_op.cc index b2dbaecfcfd67cc679d02e22d4e89cfedeeba80c..51c4d878142dcd93a170c9ea4211b9c6ec8e4422 100644 --- a/paddle/fluid/operators/bpr_loss_op.cc +++ b/paddle/fluid/operators/bpr_loss_op.cc @@ -32,10 +32,14 @@ class BprLossOp : public framework::OperatorWithKernel { int rank = x_dims.size(); PADDLE_ENFORCE_EQ(rank, label_dims.size(), "Input(X) and Input(Label) shall have the same rank."); - PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1), - framework::slice_ddim(label_dims, 0, rank - 1), - "Input(X) and Input(Label) shall have the same shape " - "except the last dimension."); + + if (ctx->IsRuntime() || (framework::product(x_dims) > 0 && + framework::product(label_dims) > 0)) { + PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1), + framework::slice_ddim(label_dims, 0, rank - 1), + "Input(X) and Input(Label) shall have the same shape " + "except the last dimension."); + } auto y_dims = x_dims; y_dims[rank - 1] = 1; diff --git a/paddle/fluid/operators/conv_shift_op.cc b/paddle/fluid/operators/conv_shift_op.cc index 08506ddd18ed35831702814e70962cb36ec958b1..fa4edb70b48e529102f11a1b0b9cac2110a33966 100644 --- a/paddle/fluid/operators/conv_shift_op.cc +++ b/paddle/fluid/operators/conv_shift_op.cc @@ -36,14 +36,17 @@ class ConvShiftOp : public framework::OperatorWithKernel { auto y_dims = ctx->GetInputDim("Y"); PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(X)'s rank should be 2."); PADDLE_ENFORCE_EQ(y_dims.size(), 2, "Input(Y)'s rank should be 2."); - PADDLE_ENFORCE_EQ(x_dims[0], y_dims[0], - "The 1st dimension of Input(X) and Input(Y) should " - "be equal."); - PADDLE_ENFORCE_EQ(y_dims[1] % 2, 1, - "The 2nd dimension of Input(Y) should be odd."); - PADDLE_ENFORCE_LE(y_dims[1], x_dims[1], - "The 2nd dimension of Input(Y) should be less than or " - "equal to the 2nd dimension of Input(X)."); + if (ctx->IsRuntime() || (x_dims[0] > 0 && y_dims[0] > 0)) + PADDLE_ENFORCE_EQ(x_dims[0], y_dims[0], + "The 1st dimension of Input(X) and Input(Y) should " + "be equal."); + if (ctx->IsRuntime() || y_dims[1] > 0) + PADDLE_ENFORCE_EQ(y_dims[1] % 2, 1, + "The 2nd dimension of Input(Y) should be odd."); + if (ctx->IsRuntime() || (x_dims[1] > 0 && y_dims[1] > 0)) + PADDLE_ENFORCE_LE(y_dims[1], x_dims[1], + "The 2nd dimension of Input(Y) should be less than or " + "equal to the 2nd dimension of Input(X)."); ctx->ShareDim("X", /*->*/ "Out"); ctx->ShareLoD("X", /*->*/ "Out"); } diff --git a/paddle/fluid/operators/merge_lod_tensor_op.cc b/paddle/fluid/operators/merge_lod_tensor_op.cc index da7fa1b81d601f4dd03d6716de601a4b1abc7fa0..5edc233f6f73262c3d1b803aae0089f5b15d403d 100644 --- a/paddle/fluid/operators/merge_lod_tensor_op.cc +++ b/paddle/fluid/operators/merge_lod_tensor_op.cc @@ -164,7 +164,9 @@ class MergeLoDTensorInferShape : public framework::InferShapeBase { auto mask_dim = context->GetInputDim("Mask"); PADDLE_ENFORCE_EQ(mask_dim.size(), 2); - PADDLE_ENFORCE_EQ(mask_dim[1], 1); + if (context->IsRuntime() || mask_dim[1] > 0) { + PADDLE_ENFORCE_EQ(mask_dim[1], 1); + } context->SetOutputDim("Out", context->GetInputDim("InTrue")); } diff --git a/paddle/fluid/operators/positive_negative_pair_op.cc b/paddle/fluid/operators/positive_negative_pair_op.cc index 99256e408d44802418728c0970cc2efeaa682587..e917e778e41ff8994f248e905635da702b428fc2 100644 --- a/paddle/fluid/operators/positive_negative_pair_op.cc +++ b/paddle/fluid/operators/positive_negative_pair_op.cc @@ -61,23 +61,31 @@ class PositiveNegativePairOp : public framework::OperatorWithKernel { auto query_dim = ctx->GetInputDim("QueryID"); PADDLE_ENFORCE_EQ(score_dim.size(), 2, "Score should be a 2-D tensor."); PADDLE_ENFORCE_EQ(label_dim.size(), 2, "Label should be a 2-D tensor."); - PADDLE_ENFORCE_EQ( - label_dim[0], score_dim[0], - "Tensor Score and Label should have the same height (batch size)."); - PADDLE_ENFORCE_EQ(label_dim[1], 1, - "The width of Label should be 1, i.e. each item should " - "have a scalar label."); - PADDLE_ENFORCE(query_dim == label_dim, - "QueryID should have the same shape as Label."); - if (ctx->HasInput("Weight")) { - PADDLE_ENFORCE(ctx->GetInputDim("Weight") == label_dim, - "Weight should have the same shape as Label."); + + if (ctx->IsRuntime() || + (score_dim[0] > 0 && label_dim[0] > 0 && query_dim[0] > 0)) { + PADDLE_ENFORCE_EQ( + label_dim[0], score_dim[0], + "Tensor Score and Label should have the same height (batch size)."); + + PADDLE_ENFORCE_EQ(label_dim[1], 1, + "The width of Label should be 1, i.e. each item should " + "have a scalar label."); + + PADDLE_ENFORCE(query_dim == label_dim, + "QueryID should have the same shape as Label."); + + if (ctx->HasInput("Weight")) { + PADDLE_ENFORCE(ctx->GetInputDim("Weight") == label_dim, + "Weight should have the same shape as Label."); + } + + int column = ctx->Attrs().Get("column"); + auto depth = score_dim[1]; + PADDLE_ENFORCE(column < depth && column >= -depth, + "Attribute column should be in the range of [-%l, %l)", + depth, depth); } - int column = ctx->Attrs().Get("column"); - auto depth = score_dim[1]; - PADDLE_ENFORCE(column < depth && column >= -depth, - "Attribute column should be in the range of [-%l, %l)", - depth, depth); ctx->SetOutputDim("PositivePair", scalar_dim); ctx->SetOutputDim("NegativePair", scalar_dim); diff --git a/paddle/fluid/operators/scatter_op.cc b/paddle/fluid/operators/scatter_op.cc index 8e0e3bd6054018852b242d1dba5c250394ed81ce..68ad223b3c311bec5968eb18b50f15e9da84e6d3 100644 --- a/paddle/fluid/operators/scatter_op.cc +++ b/paddle/fluid/operators/scatter_op.cc @@ -42,10 +42,6 @@ class ScatterOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_EQ(ctx->GetInputDim("Updates")[0], ctx->GetInputDim("Ids")[0], "Updates and Ids should have same batch-size."); - framework::DDim data_dim(updates_dims); - for (int i = 1; i < data_dim.size(); ++i) { - PADDLE_ENFORCE_EQ(data_dim[i], updates_dims[i]); - } ctx->SetOutputDim("Out", ref_dims); } diff --git a/paddle/fluid/operators/split_lod_tensor_op.cc b/paddle/fluid/operators/split_lod_tensor_op.cc index 5ede972c71ff3ef8ff00756b97662aabb54d6349..c89e683d766580113d160eecf4f04d4ead4594f6 100644 --- a/paddle/fluid/operators/split_lod_tensor_op.cc +++ b/paddle/fluid/operators/split_lod_tensor_op.cc @@ -157,7 +157,9 @@ class SplitLoDTensorInferShape : public framework::InferShapeBase { auto mask_dim = context->GetInputDim("Mask"); PADDLE_ENFORCE_EQ(mask_dim.size(), 2); - PADDLE_ENFORCE_EQ(mask_dim[1], 1); + if (context->IsRuntime()) { + PADDLE_ENFORCE_EQ(mask_dim[1], 1); + } context->SetOutputDim("OutTrue", context->GetInputDim("X")); context->SetOutputDim("OutFalse", context->GetInputDim("X")); diff --git a/paddle/fluid/operators/sum_op.cc b/paddle/fluid/operators/sum_op.cc index 1391148ccf5d13082cb31ef2e143249e8ef95bfc..67f7510e874d4b3dcb857510e42cbfa7081becfe 100644 --- a/paddle/fluid/operators/sum_op.cc +++ b/paddle/fluid/operators/sum_op.cc @@ -65,7 +65,21 @@ class SumOp : public framework::OperatorWithKernel { if (framework::product(in_dim) == 0) { in_dim = x_dim; } else { - PADDLE_ENFORCE_EQ(in_dim, x_dim, "Input tensors must have same shape"); + if (ctx->IsRuntime()) { + PADDLE_ENFORCE_EQ(in_dim, x_dim, + "Input tensors must have same shape"); + } else { + PADDLE_ENFORCE_EQ(in_dim.size(), x_dim.size(), + "Input tensors must have same shape size"); + // if in_dim or x_dim has -1, not check equal + for (int i = 0; i < x_dim.size(); ++i) { + if (x_dim[i] == -1 || in_dim[i] == -1) { + continue; + } + PADDLE_ENFORCE_EQ(in_dim[i], x_dim[i], + "Input tensors must have same shape if not -1"); + } + } } } ctx->SetOutputDim("Out", in_dim); diff --git a/paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc b/paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc index 640644a94690d9682a5e6b1aa788a9ebdc5d2a54..56379603de6800342c9fc740a403678da083e03f 100644 --- a/paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc +++ b/paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc @@ -34,12 +34,14 @@ class TeacherStudentSigmoidLossOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_EQ(x_dims.size(), 2UL, "Input(X)'s rank should be 2."); PADDLE_ENFORCE_EQ(label_dims.size(), 2UL, "Input(Label)'s rank should be 2."); - PADDLE_ENFORCE_EQ(x_dims[0], label_dims[0], - "The 1st dimension of Input(X) and Input(Label) should " - "be equal."); - PADDLE_ENFORCE_EQ(label_dims[1], 1UL, - "The 2nd dimension of " - "Input(Label) should be 1."); + if (ctx->IsRuntime()) { + PADDLE_ENFORCE_EQ(x_dims[0], label_dims[0], + "The 1st dimension of Input(X) and Input(Label) should " + "be equal."); + PADDLE_ENFORCE_EQ(label_dims[1], 1UL, + "The 2nd dimension of " + "Input(Label) should be 1."); + } ctx->SetOutputDim("Y", {x_dims[0], 1}); ctx->ShareLoD("X", /*->*/ "Y"); } @@ -74,17 +76,20 @@ class TeacherStudentSigmoidLossGradientOp PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(X)'s rank should be 2."); PADDLE_ENFORCE_EQ(dy_dims.size(), 2, "Input(Y@Grad)'s rank should be 2."); PADDLE_ENFORCE_EQ(label_dims.size(), 2, "Input(Label)'s rank should be 2."); - PADDLE_ENFORCE_EQ(x_dims[0], label_dims[0], - "The 1st dimension of Input(X) and Input(Label) should " - "be equal."); - PADDLE_ENFORCE_EQ(x_dims[0], dy_dims[0], - "The 1st dimension of Input(X) and Input(Y@Grad) should " - "be equal."); - PADDLE_ENFORCE_EQ(dy_dims[1], 1, - "The 2nd dimension of Input(Y@Grad) should be 1."); - PADDLE_ENFORCE_EQ(label_dims[1], 1, - "When Attr(soft_label) == false, the 2nd dimension of " - "Input(Label) should be 1."); + if (ctx->IsRuntime()) { + PADDLE_ENFORCE_EQ(x_dims[0], label_dims[0], + "The 1st dimension of Input(X) and Input(Label) should " + "be equal."); + PADDLE_ENFORCE_EQ( + x_dims[0], dy_dims[0], + "The 1st dimension of Input(X) and Input(Y@Grad) should " + "be equal."); + PADDLE_ENFORCE_EQ(dy_dims[1], 1, + "The 2nd dimension of Input(Y@Grad) should be 1."); + PADDLE_ENFORCE_EQ(label_dims[1], 1, + "When Attr(soft_label) == false, the 2nd dimension of " + "Input(Label) should be 1."); + } ctx->SetOutputDim(framework::GradVarName("X"), x_dims); ctx->ShareLoD("X", framework::GradVarName("X")); }