diff --git a/paddle/fluid/framework/tensor_impl.h b/paddle/fluid/framework/tensor_impl.h index 7f678f869aac4616c8bca440d0431f765da41dd6..b7b62eef23ec351686378c913d18fc72308fd7b2 100644 --- a/paddle/fluid/framework/tensor_impl.h +++ b/paddle/fluid/framework/tensor_impl.h @@ -59,6 +59,14 @@ inline T* Tensor::mutable_data(platform::Place place) { } inline Tensor ReshapeToMatrix(const Tensor& src, int num_col_dims) { + int rank = src.dims().size(); + PADDLE_ENFORCE_GE( + rank, 2, + "'ReshapeToMatrix()' is only used for flatten high rank " + "tensors to matrixs. Can not be used in reshaping vectors."); + if (rank == 2) { + return src; + } Tensor res; res.ShareDataWith(src); res.Resize(flatten_to_2d(src.dims(), num_col_dims)); diff --git a/paddle/fluid/operators/cross_entropy_op.cc b/paddle/fluid/operators/cross_entropy_op.cc index 97d6a19311ae75488097cecf8e9bbfaad574afd0..578ab63bc380ee62d76e34b7cf3cbd590bfa2eda 100644 --- a/paddle/fluid/operators/cross_entropy_op.cc +++ b/paddle/fluid/operators/cross_entropy_op.cc @@ -45,11 +45,9 @@ class CrossEntropyOp : public framework::OperatorWithKernel { "Input(Label) should be 1."); } - auto out_dim_vec = - framework::vectorize(framework::slice_ddim(x_dims, 0, rank - 1)); - out_dim_vec.push_back(1); - - ctx->SetOutputDim("Y", framework::make_ddim(out_dim_vec)); + auto y_dims = x_dims; + y_dims[rank - 1] = 1; + ctx->SetOutputDim("Y", y_dims); ctx->ShareLoD("X", /*->*/ "Y"); } diff --git a/paddle/fluid/operators/cross_entropy_op.h b/paddle/fluid/operators/cross_entropy_op.h index e26c85ad9e128d82242d3c167081838301861d11..36b58d80144d242277f6fc970a3a61a6721d4b50 100644 --- a/paddle/fluid/operators/cross_entropy_op.h +++ b/paddle/fluid/operators/cross_entropy_op.h @@ -34,10 +34,9 @@ class CrossEntropyOpKernel : public framework::OpKernel { y->mutable_data(ctx.GetPlace()); int rank = x->dims().size(); - Tensor x_2d = rank > 2 ? framework::ReshapeToMatrix(*x, rank - 1) : *x; - Tensor labels_2d = - rank > 2 ? framework::ReshapeToMatrix(*labels, rank - 1) : *labels; - Tensor y_2d = rank > 2 ? framework::ReshapeToMatrix(*y, rank - 1) : *y; + Tensor x_2d = framework::ReshapeToMatrix(*x, rank - 1); + Tensor labels_2d = framework::ReshapeToMatrix(*labels, rank - 1); + Tensor y_2d = framework::ReshapeToMatrix(*y, rank - 1); math::CrossEntropyFunctor()( ctx.template device_context(), &y_2d, &x_2d, &labels_2d, diff --git a/paddle/fluid/operators/softmax_op.h b/paddle/fluid/operators/softmax_op.h index febd9380226d9c20b7189e97cb5e17e3fb545f2f..cf1eeb017d666f605a431aa54637d8cbc99c7c46 100644 --- a/paddle/fluid/operators/softmax_op.h +++ b/paddle/fluid/operators/softmax_op.h @@ -32,9 +32,8 @@ class SoftmaxKernel : public framework::OpKernel { Out->mutable_data(context.GetPlace()); int rank = X->dims().size(); - Tensor X_2d = rank > 2 ? framework::ReshapeToMatrix(*X, rank - 1) : *X; - Tensor Out_2d = - rank > 2 ? framework::ReshapeToMatrix(*Out, rank - 1) : *Out; + Tensor X_2d = framework::ReshapeToMatrix(*X, rank - 1); + Tensor Out_2d = framework::ReshapeToMatrix(*Out, rank - 1); math::SoftmaxFunctor()( context.template device_context(), &X_2d, &Out_2d); @@ -53,11 +52,9 @@ class SoftmaxGradKernel : public framework::OpKernel { dX->mutable_data(context.GetPlace()); int rank = Out->dims().size(); - Tensor Out_2d = - rank > 2 ? framework::ReshapeToMatrix(*Out, rank - 1) : *Out; - Tensor dOut_2d = - rank > 2 ? framework::ReshapeToMatrix(*dOut, rank - 1) : *dOut; - Tensor dX_2d = rank > 2 ? framework::ReshapeToMatrix(*dX, rank - 1) : *dX; + Tensor Out_2d = framework::ReshapeToMatrix(*Out, rank - 1); + Tensor dOut_2d = framework::ReshapeToMatrix(*dOut, rank - 1); + Tensor dX_2d = framework::ReshapeToMatrix(*dX, rank - 1); math::SoftmaxGradFunctor()( context.template device_context(), &Out_2d, &dOut_2d,