rank_loss_op.cc 9.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yibing Liu 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yibing Liu 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yibing Liu 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yibing Liu 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/rank_loss_op.h"
W
wanghuancoder 已提交
16

17
#include <string>
Y
Yibing Liu 已提交
18

W
wanghuancoder 已提交
19 20 21 22 23 24 25 26 27 28 29 30 31
namespace paddle {
namespace framework {
class InferShapeContext;
class OpDesc;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
namespace platform {
class CPUDeviceContext;
}  // namespace platform
}  // namespace paddle

Y
Yibing Liu 已提交
32 33 34 35 36
namespace paddle {
namespace operators {

class RankLossOp : public framework::OperatorWithKernel {
 public:
37 38
  RankLossOp(const std::string &type,
             const framework::VariableNameMap &inputs,
Y
Yibing Liu 已提交
39 40 41 42
             const framework::VariableNameMap &outputs,
             const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

43
  void InferShape(framework::InferShapeContext *ctx) const override {
44 45 46
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLoss");
Q
Qiao Longfei 已提交
47 48 49 50

    auto label_dims = ctx->GetInputDim("Label");
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
51
    // check label_dims valid
52
    PADDLE_ENFORCE_GE(
53 54
        label_dims.size(),
        1,
55 56 57 58
        platform::errors::InvalidArgument(
            "The dimension size of Input(Label) must be greater than "
            "or equal to 1, but received %d.",
            label_dims.size()));
59
    PADDLE_ENFORCE_LE(
60 61
        label_dims.size(),
        2,
62 63 64 65
        platform::errors::InvalidArgument("The dimension size of Input(Label) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          label_dims.size()));
66
    if (label_dims.size() == 2U) {
67
      PADDLE_ENFORCE_EQ(
68 69
          label_dims[1],
          1,
70 71 72
          platform::errors::InvalidArgument(
              "The last dimension of Input(Label) must be 1, but received %d.",
              label_dims[1]));
73 74
    }
    // check left_dims valid
75
    PADDLE_ENFORCE_GE(
76 77
        left_dims.size(),
        1,
78 79 80 81
        platform::errors::InvalidArgument(
            "The dimension size of Input(Left) must be greater than "
            "or equal to 1, but received %d.",
            left_dims.size()));
82
    PADDLE_ENFORCE_LE(
83 84
        left_dims.size(),
        2,
85 86 87 88
        platform::errors::InvalidArgument("The dimension size of Input(Left) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          left_dims.size()));
89
    if (left_dims.size() == 2U) {
90
      PADDLE_ENFORCE_EQ(
91 92
          left_dims[1],
          1,
93 94 95
          platform::errors::InvalidArgument(
              "The last dimension of Input(Left) must be 1, but received %d.",
              left_dims[1]));
96 97
    }
    // check right_dims valid
98
    PADDLE_ENFORCE_GE(
99 100
        right_dims.size(),
        1,
101 102 103 104
        platform::errors::InvalidArgument(
            "The dimension size of Input(Right) must be greater than "
            "or equal to 1, but received %d.",
            right_dims.size()));
105
    PADDLE_ENFORCE_LE(
106 107
        right_dims.size(),
        2,
108 109 110 111
        platform::errors::InvalidArgument("The dimension size of Input(Right) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          right_dims.size()));
112
    if (right_dims.size() == 2U) {
113
      PADDLE_ENFORCE_EQ(
114 115
          right_dims[1],
          1,
116 117 118
          platform::errors::InvalidArgument(
              "The last dimension of Input(Right) must be 1, but received %d.",
              right_dims[1]));
119
    }
120
    PADDLE_ENFORCE_EQ(
121 122
        label_dims[0],
        left_dims[0],
123 124 125 126
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Left) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Left.dims[0]=%d.",
127 128
            label_dims[0],
            left_dims[0]));
129
    PADDLE_ENFORCE_EQ(
130 131
        label_dims[0],
        right_dims[0],
132 133 134 135
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Right) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Right.dims[0]=%d.",
136 137
            label_dims[0],
            right_dims[0]));
Q
Qiao Longfei 已提交
138
    ctx->SetOutputDim("Out", label_dims);
Y
Yibing Liu 已提交
139 140 141 142 143
  }
};

class RankLossOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
144
  void Make() override {
Y
Yibing Liu 已提交
145
    AddInput("Label",
Y
Yibing Liu 已提交
146 147 148 149 150 151 152 153 154 155 156
             "(2-D Tensor with shape [batch_size x 1]) "
             "The label indicating A ranked higher than B or not.");
    AddInput("Left",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc A.");
    AddInput("Right",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc B.");
    AddOutput("Out",
              "(2-D Tensor with shape [batch_size x 1]) "
              "The output loss of RankLoss operator.");
K
kexinzhao 已提交
157 158
    AddComment(R"DOC(
RankLoss Operator.
Y
Yibing Liu 已提交
159

K
kexinzhao 已提交
160 161 162
RankLoss operator for RankNet
(http://icml.cc/2015/wp-content/uploads/2015/06/icml_ranking.pdf). 
RankNet is a pairwise ranking model with
Y
Yibing Liu 已提交
163 164 165 166 167 168
one training sample consisting of a pair of doc A and B, and the label P
indicating that A is ranked higher than B or not:

P = {0, 1} or {0, 0.5, 1}, where 0.5 means no information about the rank of
the input pair.

K
kexinzhao 已提交
169
The RankLoss operator takes three inputs: Left (o_i), Right (o_j) and Label
Y
Yibing Liu 已提交
170 171 172
(P_{i,j}), which represent the output score of RankNet for the two docs and 
the label respectively, and yields the rank loss C_{i,j} using the following 
equation:
Y
Yibing Liu 已提交
173

174 175
$$
  C_{i,j} = -\tilde{P_{ij}} * o_{i,j} + \log(1 + e^{o_{i,j}}) \\
Y
Yibing Liu 已提交
176 177
  o_{i,j} =  o_i - o_j  \\
  \tilde{P_{i,j}} = \left \{0, 0.5, 1 \right \} \ or \ \left \{0, 1 \right \}
178
$$
Y
Yibing Liu 已提交
179

Y
Yibing Liu 已提交
180
The operator can take batch inputs with size batch_size (batch_size >= 1).
Y
Yibing Liu 已提交
181

Y
Yibing Liu 已提交
182 183 184 185 186 187 188 189 190 191 192 193
)DOC");
  }
};

class RankLossGradOp : public framework::OperatorWithKernel {
 public:
  RankLossGradOp(const std::string &type,
                 const framework::VariableNameMap &inputs,
                 const framework::VariableNameMap &outputs,
                 const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

194
  void InferShape(framework::InferShapeContext *ctx) const override {
195 196 197
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLossGrad");
198 199 200 201
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")),
                   "Input",
                   framework::GradVarName("Out"),
                   "RankLossGrad");
202

203 204
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
Q
Qiao Longfei 已提交
205 206 207 208
    auto left_grad_name = framework::GradVarName("Left");
    auto right_grad_name = framework::GradVarName("Right");

    if (ctx->HasOutput(left_grad_name)) {
209
      ctx->SetOutputDim(left_grad_name, left_dims);
Y
Yibing Liu 已提交
210
    }
Q
Qiao Longfei 已提交
211 212

    if (ctx->HasOutput(right_grad_name)) {
213
      ctx->SetOutputDim(right_grad_name, right_dims);
Y
Yibing Liu 已提交
214
    }
Y
Yibing Liu 已提交
215 216 217
  }
};

H
hong 已提交
218 219
template <typename T>
class RankLossGradMaker : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
220
 public:
H
hong 已提交
221
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
222 223

 protected:
224
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
225
    op->SetType("rank_loss_grad");
H
hong 已提交
226 227 228 229 230 231 232
    op->SetInput("Label", this->Input("Label"));
    op->SetInput("Left", this->Input("Left"));
    op->SetInput("Right", this->Input("Right"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetOutput(framework::GradVarName("Left"), this->InputGrad("Left"));
    op->SetOutput(framework::GradVarName("Right"), this->InputGrad("Right"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
233 234 235
  }
};

Y
Yibing Liu 已提交
236 237 238 239
}  // namespace operators
}  // namespace paddle
namespace ops = paddle::operators;

240 241 242
REGISTER_OPERATOR(rank_loss,
                  ops::RankLossOp,
                  ops::RankLossOpMaker,
H
hong 已提交
243 244
                  ops::RankLossGradMaker<paddle::framework::OpDesc>,
                  ops::RankLossGradMaker<paddle::imperative::OpBase>);
245
REGISTER_OPERATOR(rank_loss_grad, ops::RankLossGradOp);
Y
Yibing Liu 已提交
246
REGISTER_OP_CPU_KERNEL(
Q
QI JUN 已提交
247 248 249 250
    rank_loss, ops::RankLossKernel<paddle::platform::CPUDeviceContext, float>);
REGISTER_OP_CPU_KERNEL(
    rank_loss_grad,
    ops::RankLossGradKernel<paddle::platform::CPUDeviceContext, float>);
251

252 253 254 255 256 257 258 259
REGISTER_OP_CUDA_KERNEL(
    rank_loss,
    paddle::operators::RankLossKernel<paddle::platform::CUDADeviceContext,
                                      float>);
REGISTER_OP_CUDA_KERNEL(
    rank_loss_grad,
    paddle::operators::RankLossGradKernel<paddle::platform::CUDADeviceContext,
                                          float>);