cross_entropy_op.cc 15.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
Qiao Longfei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/cross_entropy_op.h"
S
sneaxiy 已提交
16
#include <memory>
C
chengduo 已提交
17
#include <string>
18
#include <unordered_map>
Q
Qiao Longfei 已提交
19 20 21 22

namespace paddle {
namespace operators {

S
sneaxiy 已提交
23
class CrossEntropyOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
24 25 26 27
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
28 29 30
    PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
    PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
                      "Input(Label) should be not null.");
S
sneaxiy 已提交
31

32 33
    PADDLE_ENFORCE_EQ(ctx->HasOutput("Y"), true,
                      "Output(Y) should be not null.");
S
sneaxiy 已提交
34 35 36 37

    auto x_dims = ctx->GetInputDim("X");
    auto label_dims = ctx->GetInputDim("Label");
    int rank = x_dims.size();
38

H
Hongyu Liu 已提交
39 40 41
    bool contain_unknown_dim = framework::contain_unknown_dim(x_dims) ||
                               framework::contain_unknown_dim(label_dims);
    bool check = ctx->IsRuntime() || !contain_unknown_dim;
42

S
sneaxiy 已提交
43
    if (check) {
44 45 46 47 48 49 50 51
      PADDLE_ENFORCE_EQ(
          framework::slice_ddim(x_dims, 0, rank - 1),
          framework::slice_ddim(label_dims, 0, rank - 1),
          "ShapeError: Input(X) and Input(Label) shall have the same shape "
          "except the last dimension. But received: the shape of Input(X) is "
          "[%s],"
          "the shape of Input(Label) is [%s].",
          x_dims, label_dims);
S
sneaxiy 已提交
52
    }
S
sneaxiy 已提交
53 54

    if (IsSoftLabel(ctx)) {
55 56
      PADDLE_ENFORCE_EQ(
          rank, label_dims.size(),
57 58 59 60 61 62 63 64
          "ShapeError: If Attr(soft_label) == true, Input(X) and Input(Label) "
          "shall have the same dimensions. But received: the dimensions of "
          "Input(X) is [%d],"
          "the shape of Input(X) is [%s], the dimensions of Input(Label) is "
          "[%d], the shape of"
          "Input(Label) is [%s]",
          rank, x_dims, label_dims.size(), label_dims);

S
sneaxiy 已提交
65
      if (check) {
66 67 68 69 70 71 72 73 74 75
        PADDLE_ENFORCE_EQ(
            x_dims[rank - 1], label_dims[rank - 1],
            "ShapeError: If Attr(soft_label) == true, the last dimension of "
            "Input(X) and Input(Label) should be equal. But received: the"
            "last dimension of Input(X) is [%d], the shape of Input(X) is [%s],"
            "the last dimension of Input(Label) is [%d], the shape of "
            "Input(Label)"
            "is [%s], the last dimension is [%d].",
            x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
            rank - 1);
S
sneaxiy 已提交
76 77
      }
    } else {
78 79
      if (rank == label_dims.size()) {
        PADDLE_ENFORCE_EQ(
80 81 82 83 84 85 86 87 88 89 90 91 92 93
            label_dims[rank - 1], 1UL,
            "ShapeError: the last dimension of Input(Label) should be 1."
            "But received: the last dimension of Input(Label) is [%d],"
            "the last dimension is [%d]",
            label_dims[rank - 1], rank - 1);
      } else {
        PADDLE_ENFORCE_EQ(rank, label_dims.size() + 1,
                          "ShapeError: The rank of Input(X) should be equal to "
                          "Input(Label) plus 1."
                          "But received: The dimension of Input(X) is [%d], "
                          "the shape of Input(X) is [%s],"
                          "the dimension of Input(Label) is [%d], the shape of "
                          "Input(Label) is [%s]",
                          rank, x_dims, label_dims.size(), label_dims);
94
      }
S
sneaxiy 已提交
95 96
    }

97 98 99 100
    auto y_dims = label_dims;
    if (rank == label_dims.size()) {
      y_dims[rank - 1] = 1;
    }
S
sneaxiy 已提交
101 102 103 104 105 106 107 108 109 110 111 112
    ctx->SetOutputDim("Y", y_dims);
    ctx->ShareLoD("X", /*->*/ "Y");
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return framework::OpKernelType(ctx.Input<Tensor>("X")->type(),
                                   ctx.device_context());
  }
S
sneaxiy 已提交
113 114 115 116

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
S
sneaxiy 已提交
117 118
};

S
sneaxiy 已提交
119
class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
120 121 122
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

S
sneaxiy 已提交
123
  void InferShape(framework::InferShapeContext* ctx) const {
124 125 126 127 128 129
    PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
                      "Input(Label) should be not null.");
    PADDLE_ENFORCE_EQ(ctx->HasInput(framework::GradVarName("Y")), true,
                      "Input(Y@GRAD) shoudl be not null.");
    PADDLE_ENFORCE_EQ(ctx->HasOutput(framework::GradVarName("X")), true,
                      "Output(X@GRAD) should be not null.");
S
sneaxiy 已提交
130

S
sneaxiy 已提交
131
    auto x_dims = GetXDim(ctx);
S
sneaxiy 已提交
132 133 134
    auto label_dims = ctx->GetInputDim("Label");
    auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y"));
    int rank = x_dims.size();
135 136
    PADDLE_ENFORCE_EQ(dy_dims.size(), label_dims.size(),
                      "Input(Y@Grad) and Input(Y) should have the same rank.");
S
sneaxiy 已提交
137 138 139 140 141 142 143 144 145 146 147 148 149

    bool check = true;
    if ((!ctx->IsRuntime()) && (framework::product(x_dims) <= 0 ||
                                framework::product(label_dims) <= 0)) {
      check = false;
    }

    if (check) {
      PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
                        framework::slice_ddim(dy_dims, 0, rank - 1),
                        "The Input(X) and Input(Y@Grad) should have the same "
                        "shape except the last dimension.");
    }
150

S
sneaxiy 已提交
151 152
    ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
    ctx->ShareLoD(VarNameWithXLoD(), framework::GradVarName("X"));
S
sneaxiy 已提交
153 154 155 156 157 158 159
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
S
sneaxiy 已提交
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181
    return framework::OpKernelType(
        ctx.Input<Tensor>(framework::GradVarName("Y"))->type(),
        ctx.device_context());
  }

  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    return ctx->GetInputDim("X");
  }

  virtual const char* VarNameWithXLoD() const { return "X"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
};

class CrossEntropyOpInferVarType
    : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
  std::unordered_map<std::string, std::string> GetInputOutputWithSameType()
      const override {
    return std::unordered_map<std::string, std::string>{{"X", /*->*/ "Y"}};
S
sneaxiy 已提交
182 183 184
  }
};

185
class CrossEntropyOpMaker : public framework::OpProtoAndCheckerMaker {
186
 public:
Y
Yu Yang 已提交
187
  void Make() override {
C
caoying03 已提交
188
    AddInput("X",
F
stash  
fengjiayi 已提交
189 190 191 192 193 194 195 196 197 198
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. When soft_label is set "
        "to false, the last dimension size is 1; when soft_label is set to "
        "true, the last dimension size is equal to the number of classes.");
C
caoying03 已提交
199
    AddOutput("Y",
F
stash  
fengjiayi 已提交
200 201 202
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
C
caoying03 已提交
203 204 205
    AddAttr<bool>("soft_label",
                  "(bool, default false), a flag indicating whether to "
                  "interpretate the given labels as soft labels.")
206
        .SetDefault(false);
207 208 209 210 211
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
Q
Qiao Longfei 已提交
212
    AddComment(R"DOC(
213
CrossEntropy Operator.
Q
Qiao Longfei 已提交
214

F
stash  
fengjiayi 已提交
215 216 217 218 219 220
The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

221 222 223
It supports both standard cross-entropy and soft-label cross-entropy loss
computation.
1) One-hot cross-entropy:
224
    soft_label = false, Label[i, 0] indicates the class index for sample i:
225

K
Kexin Zhao 已提交
226
                $Y[i] = -\log(X[i, Label[i]])$
Q
Qiao Longfei 已提交
227

228
2) Soft-label cross-entropy:
229
    soft_label = true, Label[i, j] indicates the soft label of class j
230
    for sample i:
231

K
Kexin Zhao 已提交
232
                $Y[i] = \sum_j{-Label[i, j] * log(X[i, j])}$
233

234
   Please make sure that in this case the summuation of each row of Label
235 236 237 238 239 240
   equals one.

3) One-hot cross-entropy with vecterized Input(Label):
     As a special case of 2), when each row of Input(Label) has only one
     non-zero element (equals 1), soft-label cross-entropy degenerates to a
     one-hot cross-entropy with one-hot label representation.
D
dangqingqing 已提交
241

K
Kexin Zhao 已提交
242 243 244
Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

Q
Qiao Longfei 已提交
245 246 247
)DOC");
  }
};
C
chengduo 已提交
248

S
sneaxiy 已提交
249 250 251 252 253
class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
254
    PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
S
sneaxiy 已提交
255 256 257 258
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
};

S
sneaxiy 已提交
259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
class CrossEntropyGradOpDescMaker : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
  std::unique_ptr<framework::OpDesc> Apply() const override {
    std::unique_ptr<framework::OpDesc> op(new framework::OpDesc());
    op->SetType("cross_entropy_grad");
    op->SetInput("X", Input("X"));
    op->SetInput("Label", Input("Label"));
    op->SetInput(framework::GradVarName("Y"), OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), InputGrad("X"));
    op->SetAttrMap(Attrs());
    return op;
  }
};

S
sneaxiy 已提交
276 277 278 279 280 281 282
class CrossEntropyOp2 : public CrossEntropyOpBase {
 public:
  using CrossEntropyOpBase::CrossEntropyOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
    CrossEntropyOpBase::InferShape(ctx);

283 284
    PADDLE_ENFORCE_EQ(ctx->HasOutput("XShape"), true,
                      "Output(XShape) should be not null.");
S
sneaxiy 已提交
285

286 287
    PADDLE_ENFORCE_EQ(ctx->HasOutput("MatchX"), true,
                      "Output(MatchX) should be not null.");
S
sneaxiy 已提交
288 289 290 291
    auto x_dims = ctx->GetInputDim("X");
    auto x_dims_vec = framework::vectorize(x_dims);
    x_dims_vec.push_back(0);
    ctx->SetOutputDim("XShape", framework::make_ddim(x_dims_vec));
S
sneaxiy 已提交
292 293
    x_dims[x_dims.size() - 1] = 1;
    ctx->SetOutputDim("MatchX", x_dims);
S
sneaxiy 已提交
294 295 296
    ctx->ShareLoD("X", /*->*/ "XShape");
  }

S
sneaxiy 已提交
297
 protected:
S
sneaxiy 已提交
298 299 300 301 302 303 304 305
  bool IsSoftLabel(framework::InferShapeContext* ctx) const override {
    return false;
  }
};

class CrossEntropyGradientOp2 : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
S
sneaxiy 已提交
306
  void InferShape(framework::InferShapeContext* ctx) const override {
307 308
    PADDLE_ENFORCE_EQ(ctx->HasInput("MatchX"), true,
                      "Input(MatchX) must exist");
S
sneaxiy 已提交
309 310
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
S
sneaxiy 已提交
311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341

 protected:
  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    auto x_shape = ctx->GetInputDim("XShape");
    return framework::DDim(x_shape.Get(), x_shape.size() - 1);
  }

  virtual const char* VarNameWithXLoD() const { return "XShape"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return false;
  }
};

class CrossEntropyOpMaker2 : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
    AddInput("X",
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. One hot Tensor.");
    AddOutput("Y",
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
    AddOutput("XShape", "Temporaily variable to save shape and LoD of X.");
S
sneaxiy 已提交
342 343
    AddOutput("MatchX",
              "X value that matches label, used for gradient computation.");
S
sneaxiy 已提交
344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
    AddComment(R"DOC(
Hard-label CrossEntropy Operator.

The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

Only support hard label.

Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

)DOC");
  }
};

class CrossEntropyGradOpDescMaker2 : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
  std::unique_ptr<framework::OpDesc> Apply() const override {
    std::unique_ptr<framework::OpDesc> op(new framework::OpDesc());
    op->SetType("cross_entropy_grad2");
    op->SetInput("Label", Input("Label"));
S
sneaxiy 已提交
376
    op->SetInput("MatchX", Output("MatchX"));
S
sneaxiy 已提交
377 378 379 380 381
    op->SetInput("XShape", Output("XShape"));
    op->SetInput(framework::GradVarName("Y"), OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), InputGrad("X"));
    op->SetAttrMap(Attrs());
    return op;
S
sneaxiy 已提交
382 383
  }
};
S
sneaxiy 已提交
384

Q
Qiao Longfei 已提交
385 386 387
}  // namespace operators
}  // namespace paddle

D
dongzhihong 已提交
388
namespace ops = paddle::operators;
389 390
using CPUCtx = paddle::platform::CPUDeviceContext;

S
sneaxiy 已提交
391 392
REGISTER_OPERATOR(cross_entropy, ops::CrossEntropyOpBase,
                  ops::CrossEntropyOpMaker, ops::CrossEntropyOpInferVarType,
S
sneaxiy 已提交
393
                  ops::CrossEntropyGradOpDescMaker);
394
REGISTER_OPERATOR(cross_entropy_grad, ops::CrossEntropyGradientOp);
395 396
REGISTER_OP_CPU_KERNEL(cross_entropy, ops::CrossEntropyOpKernel<CPUCtx, float>,
                       ops::CrossEntropyOpKernel<CPUCtx, double>);
397
REGISTER_OP_CPU_KERNEL(cross_entropy_grad,
398 399
                       ops::CrossEntropyGradientOpKernel<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel<CPUCtx, double>);
S
sneaxiy 已提交
400 401 402 403 404 405 406 407 408 409 410

REGISTER_OPERATOR(cross_entropy2, ops::CrossEntropyOp2,
                  ops::CrossEntropyOpMaker2, ops::CrossEntropyOpInferVarType,
                  ops::CrossEntropyGradOpDescMaker2);
REGISTER_OPERATOR(cross_entropy_grad2, ops::CrossEntropyGradientOp2);
REGISTER_OP_CPU_KERNEL(cross_entropy2,
                       ops::CrossEntropyOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyOpKernel2<CPUCtx, double>);
REGISTER_OP_CPU_KERNEL(cross_entropy_grad2,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, double>);