rank_loss_op.cc 8.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yibing Liu 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yibing Liu 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yibing Liu 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yibing Liu 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/rank_loss_op.h"
W
wanghuancoder 已提交
16

17
#include <string>
Y
Yibing Liu 已提交
18

W
wanghuancoder 已提交
19 20 21 22 23 24 25 26 27 28 29 30 31 32
namespace paddle {
namespace framework {
class InferShapeContext;
class OpDesc;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
namespace platform {
class CPUDeviceContext;
struct CPUPlace;
}  // namespace platform
}  // namespace paddle

Y
Yibing Liu 已提交
33 34 35 36 37 38 39 40 41 42
namespace paddle {
namespace operators {

class RankLossOp : public framework::OperatorWithKernel {
 public:
  RankLossOp(const std::string &type, const framework::VariableNameMap &inputs,
             const framework::VariableNameMap &outputs,
             const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

43
  void InferShape(framework::InferShapeContext *ctx) const override {
44 45 46
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLoss");
Q
Qiao Longfei 已提交
47 48 49 50

    auto label_dims = ctx->GetInputDim("Label");
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
51
    // check label_dims valid
52 53 54 55 56 57
    PADDLE_ENFORCE_GE(
        label_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Label) must be greater than "
            "or equal to 1, but received %d.",
            label_dims.size()));
58 59
    PADDLE_ENFORCE_LE(
        label_dims.size(), 2,
60 61 62 63
        platform::errors::InvalidArgument("The dimension size of Input(Label) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          label_dims.size()));
64
    if (label_dims.size() == 2U) {
65 66 67 68 69
      PADDLE_ENFORCE_EQ(
          label_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Label) must be 1, but received %d.",
              label_dims[1]));
70 71
    }
    // check left_dims valid
72 73 74 75 76 77
    PADDLE_ENFORCE_GE(
        left_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Left) must be greater than "
            "or equal to 1, but received %d.",
            left_dims.size()));
78 79
    PADDLE_ENFORCE_LE(
        left_dims.size(), 2,
80 81 82 83
        platform::errors::InvalidArgument("The dimension size of Input(Left) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          left_dims.size()));
84
    if (left_dims.size() == 2U) {
85 86 87 88 89
      PADDLE_ENFORCE_EQ(
          left_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Left) must be 1, but received %d.",
              left_dims[1]));
90 91
    }
    // check right_dims valid
92 93 94 95 96 97
    PADDLE_ENFORCE_GE(
        right_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Right) must be greater than "
            "or equal to 1, but received %d.",
            right_dims.size()));
98 99
    PADDLE_ENFORCE_LE(
        right_dims.size(), 2,
100 101 102 103
        platform::errors::InvalidArgument("The dimension size of Input(Right) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          right_dims.size()));
104
    if (right_dims.size() == 2U) {
105 106 107 108 109
      PADDLE_ENFORCE_EQ(
          right_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Right) must be 1, but received %d.",
              right_dims[1]));
110
    }
111 112 113 114 115 116 117 118 119 120 121 122 123 124
    PADDLE_ENFORCE_EQ(
        label_dims[0], left_dims[0],
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Left) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Left.dims[0]=%d.",
            label_dims[0], left_dims[0]));
    PADDLE_ENFORCE_EQ(
        label_dims[0], right_dims[0],
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Right) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Right.dims[0]=%d.",
            label_dims[0], right_dims[0]));
Q
Qiao Longfei 已提交
125
    ctx->SetOutputDim("Out", label_dims);
Y
Yibing Liu 已提交
126 127 128 129 130
  }
};

class RankLossOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
131
  void Make() override {
Y
Yibing Liu 已提交
132
    AddInput("Label",
Y
Yibing Liu 已提交
133 134 135 136 137 138 139 140 141 142 143
             "(2-D Tensor with shape [batch_size x 1]) "
             "The label indicating A ranked higher than B or not.");
    AddInput("Left",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc A.");
    AddInput("Right",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc B.");
    AddOutput("Out",
              "(2-D Tensor with shape [batch_size x 1]) "
              "The output loss of RankLoss operator.");
K
kexinzhao 已提交
144 145
    AddComment(R"DOC(
RankLoss Operator.
Y
Yibing Liu 已提交
146

K
kexinzhao 已提交
147 148 149
RankLoss operator for RankNet
(http://icml.cc/2015/wp-content/uploads/2015/06/icml_ranking.pdf). 
RankNet is a pairwise ranking model with
Y
Yibing Liu 已提交
150 151 152 153 154 155
one training sample consisting of a pair of doc A and B, and the label P
indicating that A is ranked higher than B or not:

P = {0, 1} or {0, 0.5, 1}, where 0.5 means no information about the rank of
the input pair.

K
kexinzhao 已提交
156
The RankLoss operator takes three inputs: Left (o_i), Right (o_j) and Label
Y
Yibing Liu 已提交
157 158 159
(P_{i,j}), which represent the output score of RankNet for the two docs and 
the label respectively, and yields the rank loss C_{i,j} using the following 
equation:
Y
Yibing Liu 已提交
160

161 162
$$
  C_{i,j} = -\tilde{P_{ij}} * o_{i,j} + \log(1 + e^{o_{i,j}}) \\
Y
Yibing Liu 已提交
163 164
  o_{i,j} =  o_i - o_j  \\
  \tilde{P_{i,j}} = \left \{0, 0.5, 1 \right \} \ or \ \left \{0, 1 \right \}
165
$$
Y
Yibing Liu 已提交
166

Y
Yibing Liu 已提交
167
The operator can take batch inputs with size batch_size (batch_size >= 1).
Y
Yibing Liu 已提交
168

Y
Yibing Liu 已提交
169 170 171 172 173 174 175 176 177 178 179 180
)DOC");
  }
};

class RankLossGradOp : public framework::OperatorWithKernel {
 public:
  RankLossGradOp(const std::string &type,
                 const framework::VariableNameMap &inputs,
                 const framework::VariableNameMap &outputs,
                 const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

181
  void InferShape(framework::InferShapeContext *ctx) const override {
182 183 184 185 186 187
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   framework::GradVarName("Out"), "RankLossGrad");

188 189
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
Q
Qiao Longfei 已提交
190 191 192 193
    auto left_grad_name = framework::GradVarName("Left");
    auto right_grad_name = framework::GradVarName("Right");

    if (ctx->HasOutput(left_grad_name)) {
194
      ctx->SetOutputDim(left_grad_name, left_dims);
Y
Yibing Liu 已提交
195
    }
Q
Qiao Longfei 已提交
196 197

    if (ctx->HasOutput(right_grad_name)) {
198
      ctx->SetOutputDim(right_grad_name, right_dims);
Y
Yibing Liu 已提交
199
    }
Y
Yibing Liu 已提交
200 201 202
  }
};

H
hong 已提交
203 204
template <typename T>
class RankLossGradMaker : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
205
 public:
H
hong 已提交
206
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
207 208

 protected:
209
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
210
    op->SetType("rank_loss_grad");
H
hong 已提交
211 212 213 214 215 216 217
    op->SetInput("Label", this->Input("Label"));
    op->SetInput("Left", this->Input("Left"));
    op->SetInput("Right", this->Input("Right"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetOutput(framework::GradVarName("Left"), this->InputGrad("Left"));
    op->SetOutput(framework::GradVarName("Right"), this->InputGrad("Right"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
218 219 220
  }
};

Y
Yibing Liu 已提交
221 222 223 224
}  // namespace operators
}  // namespace paddle
namespace ops = paddle::operators;

Y
Yang Yang 已提交
225
REGISTER_OPERATOR(rank_loss, ops::RankLossOp, ops::RankLossOpMaker,
H
hong 已提交
226 227
                  ops::RankLossGradMaker<paddle::framework::OpDesc>,
                  ops::RankLossGradMaker<paddle::imperative::OpBase>);
228
REGISTER_OPERATOR(rank_loss_grad, ops::RankLossGradOp);
Y
Yibing Liu 已提交
229
REGISTER_OP_CPU_KERNEL(
Q
QI JUN 已提交
230 231 232 233
    rank_loss, ops::RankLossKernel<paddle::platform::CPUDeviceContext, float>);
REGISTER_OP_CPU_KERNEL(
    rank_loss_grad,
    ops::RankLossGradKernel<paddle::platform::CPUDeviceContext, float>);