rank_loss_op.cc 9.1 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yibing Liu 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yibing Liu 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yibing Liu 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yibing Liu 已提交
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/rank_loss_op.h"
W
wanghuancoder 已提交
16

17
#include <string>
Y
Yibing Liu 已提交
18

W
wanghuancoder 已提交
19 20 21 22 23 24 25 26 27 28 29 30 31
namespace paddle {
namespace framework {
class InferShapeContext;
class OpDesc;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
namespace platform {
class CPUDeviceContext;
}  // namespace platform
}  // namespace paddle

Y
Yibing Liu 已提交
32 33 34 35 36 37 38 39 40 41
namespace paddle {
namespace operators {

class RankLossOp : public framework::OperatorWithKernel {
 public:
  RankLossOp(const std::string &type, const framework::VariableNameMap &inputs,
             const framework::VariableNameMap &outputs,
             const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

42
  void InferShape(framework::InferShapeContext *ctx) const override {
43 44 45
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLoss");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLoss");
Q
Qiao Longfei 已提交
46 47 48 49

    auto label_dims = ctx->GetInputDim("Label");
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
50
    // check label_dims valid
51 52 53 54 55 56
    PADDLE_ENFORCE_GE(
        label_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Label) must be greater than "
            "or equal to 1, but received %d.",
            label_dims.size()));
57 58
    PADDLE_ENFORCE_LE(
        label_dims.size(), 2,
59 60 61 62
        platform::errors::InvalidArgument("The dimension size of Input(Label) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          label_dims.size()));
63
    if (label_dims.size() == 2U) {
64 65 66 67 68
      PADDLE_ENFORCE_EQ(
          label_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Label) must be 1, but received %d.",
              label_dims[1]));
69 70
    }
    // check left_dims valid
71 72 73 74 75 76
    PADDLE_ENFORCE_GE(
        left_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Left) must be greater than "
            "or equal to 1, but received %d.",
            left_dims.size()));
77 78
    PADDLE_ENFORCE_LE(
        left_dims.size(), 2,
79 80 81 82
        platform::errors::InvalidArgument("The dimension size of Input(Left) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          left_dims.size()));
83
    if (left_dims.size() == 2U) {
84 85 86 87 88
      PADDLE_ENFORCE_EQ(
          left_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Left) must be 1, but received %d.",
              left_dims[1]));
89 90
    }
    // check right_dims valid
91 92 93 94 95 96
    PADDLE_ENFORCE_GE(
        right_dims.size(), 1,
        platform::errors::InvalidArgument(
            "The dimension size of Input(Right) must be greater than "
            "or equal to 1, but received %d.",
            right_dims.size()));
97 98
    PADDLE_ENFORCE_LE(
        right_dims.size(), 2,
99 100 101 102
        platform::errors::InvalidArgument("The dimension size of Input(Right) "
                                          "must be less than or equal to 2, "
                                          "but received %d.",
                                          right_dims.size()));
103
    if (right_dims.size() == 2U) {
104 105 106 107 108
      PADDLE_ENFORCE_EQ(
          right_dims[1], 1,
          platform::errors::InvalidArgument(
              "The last dimension of Input(Right) must be 1, but received %d.",
              right_dims[1]));
109
    }
110 111 112 113 114 115 116 117 118 119 120 121 122 123
    PADDLE_ENFORCE_EQ(
        label_dims[0], left_dims[0],
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Left) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Left.dims[0]=%d.",
            label_dims[0], left_dims[0]));
    PADDLE_ENFORCE_EQ(
        label_dims[0], right_dims[0],
        platform::errors::InvalidArgument(
            "The first dimension of Input(Label) and Input(Right) "
            "must have the same value. But received Label.dims[0]=%d, "
            "Right.dims[0]=%d.",
            label_dims[0], right_dims[0]));
Q
Qiao Longfei 已提交
124
    ctx->SetOutputDim("Out", label_dims);
Y
Yibing Liu 已提交
125 126 127 128 129
  }
};

class RankLossOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
130
  void Make() override {
Y
Yibing Liu 已提交
131
    AddInput("Label",
Y
Yibing Liu 已提交
132 133 134 135 136 137 138 139 140 141 142
             "(2-D Tensor with shape [batch_size x 1]) "
             "The label indicating A ranked higher than B or not.");
    AddInput("Left",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc A.");
    AddInput("Right",
             "(2-D Tensor with shape [batch_size x 1]) "
             "The output of RankNet for doc B.");
    AddOutput("Out",
              "(2-D Tensor with shape [batch_size x 1]) "
              "The output loss of RankLoss operator.");
K
kexinzhao 已提交
143 144
    AddComment(R"DOC(
RankLoss Operator.
Y
Yibing Liu 已提交
145

K
kexinzhao 已提交
146 147 148
RankLoss operator for RankNet
(http://icml.cc/2015/wp-content/uploads/2015/06/icml_ranking.pdf). 
RankNet is a pairwise ranking model with
Y
Yibing Liu 已提交
149 150 151 152 153 154
one training sample consisting of a pair of doc A and B, and the label P
indicating that A is ranked higher than B or not:

P = {0, 1} or {0, 0.5, 1}, where 0.5 means no information about the rank of
the input pair.

K
kexinzhao 已提交
155
The RankLoss operator takes three inputs: Left (o_i), Right (o_j) and Label
Y
Yibing Liu 已提交
156 157 158
(P_{i,j}), which represent the output score of RankNet for the two docs and 
the label respectively, and yields the rank loss C_{i,j} using the following 
equation:
Y
Yibing Liu 已提交
159

160 161
$$
  C_{i,j} = -\tilde{P_{ij}} * o_{i,j} + \log(1 + e^{o_{i,j}}) \\
Y
Yibing Liu 已提交
162 163
  o_{i,j} =  o_i - o_j  \\
  \tilde{P_{i,j}} = \left \{0, 0.5, 1 \right \} \ or \ \left \{0, 1 \right \}
164
$$
Y
Yibing Liu 已提交
165

Y
Yibing Liu 已提交
166
The operator can take batch inputs with size batch_size (batch_size >= 1).
Y
Yibing Liu 已提交
167

Y
Yibing Liu 已提交
168 169 170 171 172 173 174 175 176 177 178 179
)DOC");
  }
};

class RankLossGradOp : public framework::OperatorWithKernel {
 public:
  RankLossGradOp(const std::string &type,
                 const framework::VariableNameMap &inputs,
                 const framework::VariableNameMap &outputs,
                 const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

180
  void InferShape(framework::InferShapeContext *ctx) const override {
181 182 183 184 185 186
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Left"), "Input", "Left", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput("Right"), "Input", "Right", "RankLossGrad");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   framework::GradVarName("Out"), "RankLossGrad");

187 188
    auto left_dims = ctx->GetInputDim("Left");
    auto right_dims = ctx->GetInputDim("Right");
Q
Qiao Longfei 已提交
189 190 191 192
    auto left_grad_name = framework::GradVarName("Left");
    auto right_grad_name = framework::GradVarName("Right");

    if (ctx->HasOutput(left_grad_name)) {
193
      ctx->SetOutputDim(left_grad_name, left_dims);
Y
Yibing Liu 已提交
194
    }
Q
Qiao Longfei 已提交
195 196

    if (ctx->HasOutput(right_grad_name)) {
197
      ctx->SetOutputDim(right_grad_name, right_dims);
Y
Yibing Liu 已提交
198
    }
Y
Yibing Liu 已提交
199 200 201
  }
};

H
hong 已提交
202 203
template <typename T>
class RankLossGradMaker : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
204
 public:
H
hong 已提交
205
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
206 207

 protected:
208
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
209
    op->SetType("rank_loss_grad");
H
hong 已提交
210 211 212 213 214 215 216
    op->SetInput("Label", this->Input("Label"));
    op->SetInput("Left", this->Input("Left"));
    op->SetInput("Right", this->Input("Right"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetOutput(framework::GradVarName("Left"), this->InputGrad("Left"));
    op->SetOutput(framework::GradVarName("Right"), this->InputGrad("Right"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
217 218 219
  }
};

Y
Yibing Liu 已提交
220 221 222 223
}  // namespace operators
}  // namespace paddle
namespace ops = paddle::operators;

Y
Yang Yang 已提交
224
REGISTER_OPERATOR(rank_loss, ops::RankLossOp, ops::RankLossOpMaker,
H
hong 已提交
225 226
                  ops::RankLossGradMaker<paddle::framework::OpDesc>,
                  ops::RankLossGradMaker<paddle::imperative::OpBase>);
227
REGISTER_OPERATOR(rank_loss_grad, ops::RankLossGradOp);
Y
Yibing Liu 已提交
228
REGISTER_OP_CPU_KERNEL(
Q
QI JUN 已提交
229 230 231 232
    rank_loss, ops::RankLossKernel<paddle::platform::CPUDeviceContext, float>);
REGISTER_OP_CPU_KERNEL(
    rank_loss_grad,
    ops::RankLossGradKernel<paddle::platform::CPUDeviceContext, float>);
233 234 235 236 237 238 239

REGISTER_OP_CUDA_KERNEL(rank_loss,
                        paddle::operators::RankLossKernel<
                            paddle::platform::CUDADeviceContext, float>);
REGISTER_OP_CUDA_KERNEL(rank_loss_grad,
                        paddle::operators::RankLossGradKernel<
                            paddle::platform::CUDADeviceContext, float>);