squared_l2_distance_op.cc 8.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/squared_l2_distance_op.h"
16

H
Huihuang Zheng 已提交
17 18 19 20
#include <memory>

#include "paddle/fluid/framework/no_need_buffer_vars_inference.h"

21 22 23 24 25 26 27
namespace paddle {
namespace operators {

class SquaredL2DistanceOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

28
  void InferShape(framework::InferShapeContext* ctx) const override {
29 30 31 32 33 34
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "SquaredL2DistanceOp");
    OP_INOUT_CHECK(ctx->HasInput("Y"), "Input", "Y", "SquaredL2DistanceOp");
    OP_INOUT_CHECK(ctx->HasOutput("sub_result"), "Output", "sub_result",
                   "SquaredL2DistanceOp");
    OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out",
                   "SquaredL2DistanceOp");
35

Q
Qiao Longfei 已提交
36 37
    auto x_dims = ctx->GetInputDim("X");
    auto y_dims = ctx->GetInputDim("Y");
38

39
    PADDLE_ENFORCE_EQ(framework::arity(x_dims), framework::arity(y_dims),
40 41 42 43 44 45 46
                      platform::errors::InvalidArgument(
                          "Input(X) and Input(X) of SquaredL2DistanceOp should "
                          "have same dimensions. "
                          "But received X's shape = [%s] and Y's shape = [%s], "
                          "the dimensions are %d and %d respectively",
                          x_dims, y_dims, framework::arity(x_dims),
                          framework::arity(y_dims)));
47 48

    int rank = framework::arity(x_dims);
49 50 51 52 53 54
    PADDLE_ENFORCE_GE(
        rank, 2,
        platform::errors::InvalidArgument(
            "Input dimensions of SquaredL2DistanceOp should be at least 2."
            "But received shape = [%s] and dimension is %d.",
            x_dims, rank));
X
xuezhong 已提交
55 56 57 58 59 60
    bool check = true;
    if ((!ctx->IsRuntime()) &&
        (framework::product(x_dims) <= 0 || framework::product(y_dims) <= 0)) {
      check = false;
    }
    if (check) {
61 62 63 64 65 66 67 68 69
      PADDLE_ENFORCE_EQ(
          product(x_dims) / x_dims[0], product(y_dims) / y_dims[0],
          platform::errors::InvalidArgument(
              "Input(X) and Input(Y) of SquaredL2DistanceOp should "
              "have same dimensions."
              "But received X's shape = [%s] and Y's shape = [%s]"
              ", the products are %d and %d respectively",
              x_dims, y_dims, product(x_dims) / x_dims[0],
              product(y_dims) / y_dims[0]));
X
xuezhong 已提交
70 71 72 73 74 75
    }
    check = true;
    if ((!ctx->IsRuntime()) && (y_dims[0] <= 0 || x_dims[0] <= 0)) {
      check = false;
    }
    if (check) {
76 77 78 79 80 81 82 83
      PADDLE_ENFORCE_EQ(
          y_dims[0] == 1 || y_dims[0] == x_dims[0], true,
          platform::errors::InvalidArgument(
              "First dimension of Input(Y) of SquaredL2DistanceOp "
              "must be equal to 1 or to first dimension of Input(X)."
              "But received X's shape = [%s] and Y's shape = [%s],"
              "the first dimensions are %d and %d respectively",
              x_dims, y_dims, x_dims[0], y_dims[0]));
X
xuezhong 已提交
84
    }
Q
Qiao Longfei 已提交
85 86 87
    ctx->SetOutputDim("sub_result", {x_dims[0], product(x_dims) / x_dims[0]});
    ctx->SetOutputDim("Out", {x_dims[0], 1});
    ctx->ShareLoD("X", /*->*/ "Out");
88 89 90
  }
};

Z
Zeng Jinle 已提交
91
DECLARE_NO_NEED_BUFFER_VARS_INFERER(SquaredL2DistanceGradOpNoBuffer, "X", "Y");
H
Huihuang Zheng 已提交
92

H
hong 已提交
93 94
template <typename T>
class SquaredL2DistanceGradOpMaker : public framework::SingleGradOpMaker<T> {
H
Huihuang Zheng 已提交
95
 public:
H
hong 已提交
96
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
H
Huihuang Zheng 已提交
97 98

 protected:
99
  void Apply(GradOpPtr<T> op) const override {
H
Huihuang Zheng 已提交
100 101
    op->SetType("squared_l2_distance_grad");

H
hong 已提交
102 103 104 105
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetInput("sub_result", this->Output("sub_result"));
    op->SetInput("X", this->Input("X"));
    op->SetInput("Y", this->Input("Y"));
H
Huihuang Zheng 已提交
106

H
hong 已提交
107 108
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
    op->SetOutput(framework::GradVarName("Y"), this->InputGrad("Y"));
H
Huihuang Zheng 已提交
109

H
hong 已提交
110
    op->SetAttrMap(this->Attrs());
H
Huihuang Zheng 已提交
111 112 113
  }
};

114 115
class SquaredL2DistanceOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
116
  void Make() override {
117 118
    AddInput("X", "(Tensor) Input of SquaredL2DistanceOp.");
    AddInput("Y", "(Tensor) Target of SquaredL2DistanceOp.");
119
    AddOutput("sub_result",
120
              "(Tensor) Buffering subtraction result which "
121 122
              "will be reused in backward.")
        .AsIntermediate();
123
    AddOutput("Out", "(Tensor) Squared l2 distance between input and target.");
124
    AddComment(R"DOC(
125 126 127 128 129 130 131 132 133 134 135 136
SquaredL2Distance operator

This operator will cacluate the squared L2 distance for the input and 
the target. Number of distance value will be equal to the first dimension 
of input. First dimension of the target could be equal to the input or to 1. 
If the first dimension of target is 1, the operator will broadcast target's 
first dimension to input's first dimension. During backward propagation, 
the user can decide whether to calculate the gradient of the input or 
the target or both.

Both the input X and Y can carry the LoD (Level of Details) information. 
However, the output only shares the LoD information with input X.
137 138 139 140 141 142 143 144
    )DOC");
  }
};

class SquaredL2DistanceGradOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

145
  void InferShape(framework::InferShapeContext* ctx) const override {
146 147 148 149
    OP_INOUT_CHECK(ctx->HasInput("sub_result"), "Input", "sub_result",
                   "SquaredL2DistanceGradOp");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   "Out@GRAD", "SquaredL2DistanceGradOp");
Q
Qiao Longfei 已提交
150 151 152
    auto out_dims = ctx->GetInputDim(framework::GradVarName("Out"));
    auto x_dims = ctx->GetInputDim("X");
    auto y_dims = ctx->GetInputDim("Y");
153
    if (ctx->IsRuntime()) {
154 155 156 157 158 159 160 161
      PADDLE_ENFORCE_EQ(
          out_dims[0], x_dims[0],
          platform::errors::InvalidArgument(
              "First dimension of output gradient and Input(X) "
              "of SquaredL2DistanceGradOp must be equal "
              "But received X's shape = [%s] and grad's shape = [%s], "
              "the first dimensions are %d and %d respectively",
              x_dims, out_dims, x_dims[0], out_dims[0]));
162
      PADDLE_ENFORCE_EQ(out_dims[1], 1,
163 164 165 166 167 168
                        platform::errors::InvalidArgument(
                            "Second dimension of output gradient of "
                            "SquaredL2DistanceGradOp must be 1. "
                            "But received grad's shape = [%s], "
                            "with second dimension %d",
                            out_dims, out_dims[1]));
169
    }
Q
Qiao Longfei 已提交
170 171 172 173
    auto x_grad_name = framework::GradVarName("X");
    auto y_grad_name = framework::GradVarName("Y");
    if (ctx->HasOutput(x_grad_name)) ctx->SetOutputDim(x_grad_name, x_dims);
    if (ctx->HasOutput(y_grad_name)) ctx->SetOutputDim(y_grad_name, y_dims);
174
  }
H
Huihuang Zheng 已提交
175 176 177 178

 protected:
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
179 180 181
    return framework::OpKernelType(
        OperatorWithKernel::IndicateVarDataType(ctx, "sub_result"),
        ctx.GetPlace());
H
Huihuang Zheng 已提交
182
  }
183 184 185 186 187 188
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
H
hong 已提交
189 190 191 192 193
REGISTER_OPERATOR(
    squared_l2_distance, ops::SquaredL2DistanceOp,
    ops::SquaredL2DistanceOpMaker,
    ops::SquaredL2DistanceGradOpMaker<paddle::framework::OpDesc>,
    ops::SquaredL2DistanceGradOpMaker<paddle::imperative::OpBase>);
H
Huihuang Zheng 已提交
194 195
REGISTER_OPERATOR(squared_l2_distance_grad, ops::SquaredL2DistanceGradOp,
                  ops::SquaredL2DistanceGradOpNoBuffer);
196 197
REGISTER_OP_CPU_KERNEL(
    squared_l2_distance,
Q
QI JUN 已提交
198 199 200 201
    ops::SquaredL2DistanceKernel<paddle::platform::CPUDeviceContext, float>);
REGISTER_OP_CPU_KERNEL(squared_l2_distance_grad,
                       ops::SquaredL2DistanceGradKernel<
                           paddle::platform::CPUDeviceContext, float>);