rank_loss_op.cc 9.1 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yibing Liu 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yibing Liu 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yibing Liu 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yibing Liu 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/rank_loss_op.h"
W
wanghuancoder 已提交
16

17
#include <string>
Y
Yibing Liu 已提交
18

W
wanghuancoder 已提交
19 20 21 22 23 24 25 26 27 28
namespace paddle {
namespace framework {
class InferShapeContext;
class OpDesc;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
}  // namespace paddle

Y
Yibing Liu 已提交
29 30 31 32 33
namespace paddle {
namespace operators {

class RankLossOp : public framework::OperatorWithKernel {
 public:
34 35
  RankLossOp(const std::string &type,
             const framework::VariableNameMap &inputs,
Y
Yibing Liu 已提交
36 37 38 39
             const framework::VariableNameMap &outputs,
             const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

40
  void InferShape(framework::InferShapeContext *ctx) const override {
41 42 43
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLoss");
Q
Qiao Longfei 已提交
44 45 46 47

    auto label_dims = ctx->GetInputDim("Label");
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
48
    // check label_dims valid
49
    PADDLE_ENFORCE_GE(
50 51
        label_dims.size(),
        1,
52 53 54 55
        platform::errors::InvalidArgument(
            "The dimension size of Input(Label) must be greater than "
            "or equal to 1, but received %d.",
            label_dims.size()));
56
    PADDLE_ENFORCE_LE(
57 58
        label_dims.size(),
        2,
59 60 61 62
        platform::errors::InvalidArgument("The dimension size of Input(Label) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          label_dims.size()));
63
    if (label_dims.size() == 2U) {
64
      PADDLE_ENFORCE_EQ(
65 66
          label_dims[1],
          1,
67 68 69
          platform::errors::InvalidArgument(
              "The last dimension of Input(Label) must be 1, but received %d.",
              label_dims[1]));
70 71
    }
    // check left_dims valid
72
    PADDLE_ENFORCE_GE(
73 74
        left_dims.size(),
        1,
75 76 77 78
        platform::errors::InvalidArgument(
            "The dimension size of Input(Left) must be greater than "
            "or equal to 1, but received %d.",
            left_dims.size()));
79
    PADDLE_ENFORCE_LE(
80 81
        left_dims.size(),
        2,
82 83 84 85
        platform::errors::InvalidArgument("The dimension size of Input(Left) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          left_dims.size()));
86
    if (left_dims.size() == 2U) {
87
      PADDLE_ENFORCE_EQ(
88 89
          left_dims[1],
          1,
90 91 92
          platform::errors::InvalidArgument(
              "The last dimension of Input(Left) must be 1, but received %d.",
              left_dims[1]));
93 94
    }
    // check right_dims valid
95
    PADDLE_ENFORCE_GE(
96 97
        right_dims.size(),
        1,
98 99 100 101
        platform::errors::InvalidArgument(
            "The dimension size of Input(Right) must be greater than "
            "or equal to 1, but received %d.",
            right_dims.size()));
102
    PADDLE_ENFORCE_LE(
103 104
        right_dims.size(),
        2,
105 106 107 108
        platform::errors::InvalidArgument("The dimension size of Input(Right) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          right_dims.size()));
109
    if (right_dims.size() == 2U) {
110
      PADDLE_ENFORCE_EQ(
111 112
          right_dims[1],
          1,
113 114 115
          platform::errors::InvalidArgument(
              "The last dimension of Input(Right) must be 1, but received %d.",
              right_dims[1]));
116
    }
117
    PADDLE_ENFORCE_EQ(
118 119
        label_dims[0],
        left_dims[0],
120 121 122 123
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Left) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Left.dims[0]=%d.",
124 125
            label_dims[0],
            left_dims[0]));
126
    PADDLE_ENFORCE_EQ(
127 128
        label_dims[0],
        right_dims[0],
129 130 131 132
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Right) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Right.dims[0]=%d.",
133 134
            label_dims[0],
            right_dims[0]));
Q
Qiao Longfei 已提交
135
    ctx->SetOutputDim("Out", label_dims);
Y
Yibing Liu 已提交
136 137 138 139 140
  }
};

class RankLossOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
141
  void Make() override {
Y
Yibing Liu 已提交
142
    AddInput("Label",
Y
Yibing Liu 已提交
143 144 145 146 147 148 149 150 151 152 153
             "(2-D Tensor with shape [batch_size x 1]) "
             "The label indicating A ranked higher than B or not.");
    AddInput("Left",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc A.");
    AddInput("Right",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc B.");
    AddOutput("Out",
              "(2-D Tensor with shape [batch_size x 1]) "
              "The output loss of RankLoss operator.");
K
kexinzhao 已提交
154 155
    AddComment(R"DOC(
RankLoss Operator.
Y
Yibing Liu 已提交
156

K
kexinzhao 已提交
157
RankLoss operator for RankNet
158
(http://icml.cc/2015/wp-content/uploads/2015/06/icml_ranking.pdf).
K
kexinzhao 已提交
159
RankNet is a pairwise ranking model with
Y
Yibing Liu 已提交
160 161 162 163 164 165
one training sample consisting of a pair of doc A and B, and the label P
indicating that A is ranked higher than B or not:

P = {0, 1} or {0, 0.5, 1}, where 0.5 means no information about the rank of
the input pair.

K
kexinzhao 已提交
166
The RankLoss operator takes three inputs: Left (o_i), Right (o_j) and Label
167 168
(P_{i,j}), which represent the output score of RankNet for the two docs and
the label respectively, and yields the rank loss C_{i,j} using the following
Y
Yibing Liu 已提交
169
equation:
Y
Yibing Liu 已提交
170

171 172
$$
  C_{i,j} = -\tilde{P_{ij}} * o_{i,j} + \log(1 + e^{o_{i,j}}) \\
Y
Yibing Liu 已提交
173 174
  o_{i,j} =  o_i - o_j  \\
  \tilde{P_{i,j}} = \left \{0, 0.5, 1 \right \} \ or \ \left \{0, 1 \right \}
175
$$
Y
Yibing Liu 已提交
176

Y
Yibing Liu 已提交
177
The operator can take batch inputs with size batch_size (batch_size >= 1).
Y
Yibing Liu 已提交
178

Y
Yibing Liu 已提交
179 180 181 182 183 184 185 186 187 188 189 190
)DOC");
  }
};

class RankLossGradOp : public framework::OperatorWithKernel {
 public:
  RankLossGradOp(const std::string &type,
                 const framework::VariableNameMap &inputs,
                 const framework::VariableNameMap &outputs,
                 const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

191
  void InferShape(framework::InferShapeContext *ctx) const override {
192 193 194
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLossGrad");
195 196 197 198
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")),
                   "Input",
                   framework::GradVarName("Out"),
                   "RankLossGrad");
199

200 201
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
Q
Qiao Longfei 已提交
202 203 204 205
    auto left_grad_name = framework::GradVarName("Left");
    auto right_grad_name = framework::GradVarName("Right");

    if (ctx->HasOutput(left_grad_name)) {
206
      ctx->SetOutputDim(left_grad_name, left_dims);
Y
Yibing Liu 已提交
207
    }
Q
Qiao Longfei 已提交
208 209

    if (ctx->HasOutput(right_grad_name)) {
210
      ctx->SetOutputDim(right_grad_name, right_dims);
Y
Yibing Liu 已提交
211
    }
Y
Yibing Liu 已提交
212 213 214
  }
};

H
hong 已提交
215 216
template <typename T>
class RankLossGradMaker : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
217
 public:
H
hong 已提交
218
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
219 220

 protected:
221
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
222
    op->SetType("rank_loss_grad");
H
hong 已提交
223 224 225 226 227 228 229
    op->SetInput("Label", this->Input("Label"));
    op->SetInput("Left", this->Input("Left"));
    op->SetInput("Right", this->Input("Right"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetOutput(framework::GradVarName("Left"), this->InputGrad("Left"));
    op->SetOutput(framework::GradVarName("Right"), this->InputGrad("Right"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
230 231 232
  }
};

Y
Yibing Liu 已提交
233 234 235 236
}  // namespace operators
}  // namespace paddle
namespace ops = paddle::operators;

237 238 239
REGISTER_OPERATOR(rank_loss,
                  ops::RankLossOp,
                  ops::RankLossOpMaker,
H
hong 已提交
240 241
                  ops::RankLossGradMaker<paddle::framework::OpDesc>,
                  ops::RankLossGradMaker<paddle::imperative::OpBase>);
242
REGISTER_OPERATOR(rank_loss_grad, ops::RankLossGradOp);
243 244 245 246 247 248 249 250 251 252 253 254

PD_REGISTER_STRUCT_KERNEL(
    rank_loss, CPU, ALL_LAYOUT, ops::RankLossKernel, float) {}
PD_REGISTER_STRUCT_KERNEL(
    rank_loss_grad, CPU, ALL_LAYOUT, ops::RankLossGradKernel, float) {}

#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
PD_REGISTER_STRUCT_KERNEL(
    rank_loss, GPU, ALL_LAYOUT, ops::RankLossKernel, float) {}
PD_REGISTER_STRUCT_KERNEL(
    rank_loss_grad, GPU, ALL_LAYOUT, ops::RankLossGradKernel, float) {}
#endif