From 6bef079660f689a1b9c061e31c8273de353f98da Mon Sep 17 00:00:00 2001 From: yangyaming Date: Thu, 31 Aug 2017 22:31:34 +0800 Subject: [PATCH] Follow coding style and move reshaping operation to paddle tensor. --- paddle/operators/squared_l2_distance_op.cc | 47 ++--- paddle/operators/squared_l2_distance_op.h | 170 ++++++------------ .../tests/test_squared_l2_distance_op.py | 10 ++ 3 files changed, 92 insertions(+), 135 deletions(-) diff --git a/paddle/operators/squared_l2_distance_op.cc b/paddle/operators/squared_l2_distance_op.cc index 3049f0f8ba0..b19c274dcc0 100644 --- a/paddle/operators/squared_l2_distance_op.cc +++ b/paddle/operators/squared_l2_distance_op.cc @@ -30,22 +30,27 @@ class SquaredL2DistanceOp : public framework::OperatorWithKernel { "Target of SquaredL2DistanceOp " "must be initialized."); - auto* X = ctx.Input("X"); - auto xDims = X->dims(); - auto* Y = ctx.Input("Y"); - auto yDims = Y->dims(); + auto* x = ctx.Input("X"); + auto x_dims = x->dims(); + auto* y = ctx.Input("Y"); + auto y_dims = y->dims(); - PADDLE_ENFORCE_EQ(framework::arity(xDims), framework::arity(yDims), + PADDLE_ENFORCE_EQ(framework::arity(x_dims), framework::arity(y_dims), "Tensor rank of both SquaredL2DistanceOp's " "inputs must be same."); - int rank = framework::arity(xDims); - PADDLE_ENFORCE(rank >= 2 || rank <= 6, "Tensor rank should be in [2, 6]."); - PADDLE_ENFORCE(yDims[0] == 1 || yDims[0] == xDims[0], + + int rank = framework::arity(x_dims); + PADDLE_ENFORCE(rank >= 2, "Tensor rank should be at least equal to 2."); + PADDLE_ENFORCE_EQ(framework::product(x_dims) / x_dims[0], + framework::product(y_dims) / y_dims[0], + "Product of dimensions expcet the first dimension of " + "input and target must be equal."); + PADDLE_ENFORCE(y_dims[0] == 1 || y_dims[0] == x_dims[0], "First dimension of target must be equal to input " "or to 1."); - ctx.Output("sub_result")->Resize(xDims); - ctx.Output("Out")->Resize({xDims[0], 1}); + ctx.Output("sub_result")->Resize(x_dims); + ctx.Output("Out")->Resize({x_dims[0], 1}); } }; @@ -66,8 +71,8 @@ class SquaredL2DistanceOpMaker : public framework::OpProtoAndCheckerMaker { input and target. Number of distance value equals to the first dimension of input. First dimension of target could be equal to input or to 1. If the first dimension of target is 1, SquaredL2DistanceOp - will broadcast the first dimension to the first dimension of input. - You can decide whether calculate the gradient of target. + will broadcast target's first dimension to input's first dimension. + You can decide whether calculate the gradient of input and target. )DOC"); } }; @@ -81,19 +86,19 @@ class SquaredL2DistanceGradOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_NOT_NULL(ctx.InputVar(framework::GradVarName("Out")), "Gradient of Out should not be null"); // check out grad dimensions - auto outDims = ctx.Input(framework::GradVarName("Out"))->dims(); - auto xDims = ctx.Input("X")->dims(); - auto yDims = ctx.Input("Y")->dims(); - PADDLE_ENFORCE_EQ(outDims[0], xDims[0], + auto out_dims = ctx.Input(framework::GradVarName("Out"))->dims(); + auto x_dims = ctx.Input("X")->dims(); + auto y_dims = ctx.Input("Y")->dims(); + PADDLE_ENFORCE_EQ(out_dims[0], x_dims[0], "First dimension of output gradient and " "input value must be equal."); - PADDLE_ENFORCE_EQ(outDims[1], 1, + PADDLE_ENFORCE_EQ(out_dims[1], 1, "Second dimension of output gradient " "must be 1."); - auto* xGrad = ctx.Output(framework::GradVarName("X")); - auto* yGrad = ctx.Output(framework::GradVarName("Y")); - if (xGrad != nullptr) xGrad->Resize(xDims); - if (yGrad != nullptr) yGrad->Resize(yDims); + auto* x_grad = ctx.Output(framework::GradVarName("X")); + auto* y_grad = ctx.Output(framework::GradVarName("Y")); + if (x_grad != nullptr) x_grad->Resize(x_dims); + if (y_grad != nullptr) y_grad->Resize(y_dims); } }; diff --git a/paddle/operators/squared_l2_distance_op.h b/paddle/operators/squared_l2_distance_op.h index e95364c7069..ec8c34ddf8d 100644 --- a/paddle/operators/squared_l2_distance_op.h +++ b/paddle/operators/squared_l2_distance_op.h @@ -20,9 +20,6 @@ namespace paddle { namespace operators { using Tensor = framework::Tensor; -template -using EigenTensor = framework::EigenTensor; template using EigenMatrix = framework::EigenMatrix; @@ -31,64 +28,39 @@ template class SquaredL2DistanceKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& context) const override { - auto* input0 = context.Input("X"); - const int rank = framework::arity(input0->dims()); - switch (rank) { - case 2: - Operate<2>(context); - break; - case 3: - Operate<3>(context); - break; - case 4: - Operate<4>(context); - break; - case 5: - Operate<5>(context); - break; - case 6: - Operate<6>(context); - break; - default: - // already asserted in SquaredL2DistanceOpMaker - break; - } - } - - private: - template - void Operate(const framework::ExecutionContext& context) const { - auto* input0 = context.Input("X"); - auto* input1 = context.Input("Y"); - auto* output0 = context.Output("sub_result"); - auto* output1 = context.Output("Out"); - - output0->mutable_data(context.GetPlace()); - output1->mutable_data(context.GetPlace()); - - auto X = EigenTensor::From(*input0); - auto Y = EigenTensor::From(*input1); - auto subResult = EigenTensor::From(*output0); - auto Z = EigenMatrix::From(*output1); - - auto xDims = X.dimensions(); - auto yDims = Y.dimensions(); + auto* in0 = context.Input("X"); + auto* in1 = context.Input("Y"); + auto* out0 = context.Output("sub_result"); + auto* out1 = context.Output("Out"); + + auto in0_dims = in0->dims(); + auto in1_dims = in1->dims(); + + int cols = framework::product(in0_dims) / in0_dims[0]; + // reduce dimensions except the first + auto x = + EigenMatrix::From(*in0, framework::make_ddim({in0_dims[0], cols})); + auto y = + EigenMatrix::From(*in1, framework::make_ddim({in1_dims[0], cols})); + + out0->mutable_data(context.GetPlace()); + out1->mutable_data(context.GetPlace()); + auto sub_result = EigenMatrix::From(*out0); + auto z = EigenMatrix::From(*out1); auto place = context.GetEigenDevice(); - + auto x_dims = x.dimensions(); + auto y_dims = y.dimensions(); // buffer the substraction result - if (yDims[0] == 1 && xDims[0] != yDims[0]) { - auto yBroadcastDims = yDims; - yBroadcastDims[0] = xDims[0]; - subResult.device(place) = X - Y.broadcast(yBroadcastDims); + if (y_dims[0] == 1 && x_dims[0] > y_dims[0]) { + auto y_broadcast_dims = y_dims; + y_broadcast_dims[0] = x_dims[0]; + sub_result.device(place) = x - y.broadcast(y_broadcast_dims); } else { - subResult.device(place) = X - Y; + sub_result.device(place) = x - y; } - // create matrix view for substraction result - const auto& subResMat = subResult.reshape(Eigen::array( - {static_cast(xDims[0]), static_cast(X.size() / xDims[0])})); - Z.device(place) = subResMat.pow(2).sum(Eigen::array({1})); + z.device(place) = sub_result.pow(2).sum(Eigen::array({1})); } }; @@ -96,77 +68,47 @@ template class SquaredL2DistanceGradKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& context) const override { - auto* input0 = context.Input("sub_result"); - const int rank = framework::arity(input0->dims()); - switch (rank) { - case 2: - Operate<2>(context); - break; - case 3: - Operate<3>(context); - break; - case 4: - Operate<4>(context); - break; - case 5: - Operate<5>(context); - break; - case 6: - Operate<6>(context); - break; - default: - // already asserted in SquaredL2DistanceOpMaker - break; - } - } + auto* in0 = context.Input("sub_result"); + auto* in1 = context.Input(framework::GradVarName("Out")); + auto* x_g = context.Output(framework::GradVarName("X")); + auto* y_g = context.Output(framework::GradVarName("Y")); - private: - template - void Operate(const framework::ExecutionContext& context) const { - auto* input0 = context.Input("sub_result"); - auto* OG = context.Input(framework::GradVarName("Out")); - auto* XG = context.Output(framework::GradVarName("X")); - auto* YG = context.Output(framework::GradVarName("Y")); + auto sub_result = EigenMatrix::From(*in0); + auto out_grad = EigenMatrix::From(*in1); - auto subResult = EigenTensor::From(*input0); - auto outGrad = EigenMatrix::From(*OG); - - auto subResDims = subResult.dimensions(); - int firstDim = static_cast(subResDims[0]); - int cols = subResult.size() / firstDim; - const auto subResMat = - subResult.reshape(Eigen::array({firstDim, cols})); + auto x_dims = x_g->dims(); + auto y_dims = y_g->dims(); + int cols = framework::product(x_dims) / x_dims[0]; // calculate gradient - auto gradMat = - 2 * (outGrad.broadcast(Eigen::array({1, cols}))) * subResMat; + auto grad_mat = + 2 * (out_grad.broadcast(Eigen::array({1, cols}))) * sub_result; // propagate back to input - auto eigenPlace = context.GetEigenDevice(); - if (XG != nullptr) { - XG->mutable_data(context.GetPlace()); - auto xGrad = EigenTensor::From(*XG); + auto eigen_place = context.GetEigenDevice(); + if (x_g != nullptr) { + x_g->mutable_data(context.GetPlace()); + // eigen matrix + auto x_grad = + EigenMatrix::From(*x_g, framework::make_ddim({x_dims[0], cols})); // dimensions are same with subResult - auto xGradMat = xGrad.reshape(Eigen::array({firstDim, cols})); - xGradMat.device(eigenPlace) = gradMat; + x_grad.device(eigen_place) = grad_mat; } - if (YG != nullptr) { - YG->mutable_data(context.GetPlace()); - auto yGrad = EigenTensor::From(*YG); - auto dimsYGrad = yGrad.dimensions(); - auto yGradMat = yGrad.reshape(Eigen::array( - {static_cast(dimsYGrad[0]), - static_cast(yGrad.size() / dimsYGrad[0])})); - - PADDLE_ENFORCE(dimsYGrad[0] <= firstDim, + + if (y_g != nullptr) { + y_g->mutable_data(context.GetPlace()); + auto y_grad = + EigenMatrix::From(*y_g, framework::make_ddim({y_dims[0], cols})); + + PADDLE_ENFORCE(sub_result.dimensions()[0] >= y_dims[0], "First dimension of gradient must be greater or " "equal than first dimension of target"); - if (dimsYGrad[0] == firstDim) { - yGradMat.device(eigenPlace) = -1 * gradMat; + if (sub_result.dimensions()[0] == y_dims[0]) { + y_grad.device(eigen_place) = -1 * grad_mat; } else { - yGradMat.device(eigenPlace) = - -1 * (gradMat.sum(Eigen::array({0}))); + y_grad.device(eigen_place) = + -1 * (grad_mat.sum(Eigen::array({0}))); } } } diff --git a/python/paddle/v2/framework/tests/test_squared_l2_distance_op.py b/python/paddle/v2/framework/tests/test_squared_l2_distance_op.py index eeddb5a3bf6..51c95b286a8 100644 --- a/python/paddle/v2/framework/tests/test_squared_l2_distance_op.py +++ b/python/paddle/v2/framework/tests/test_squared_l2_distance_op.py @@ -21,5 +21,15 @@ class TestSquaredL2DistanceOp(unittest.TestCase): } +class TestSquaredL2DistanceGradOp(GradientChecker): + def test_squared_l2_distance(self): + op = create_op("squared_l2_distance") + inputs = { + 'X': np.random.uniform(0.1, 1., (2, 3)).astype('float32'), + 'Y': np.random.uniform(0.1, 1., (2, 3)).astype('float32') + } + self.check_grad(op, inputs, set(["X", "Y"]), "Out") + + if __name__ == '__main__': unittest.main() -- GitLab