cross_entropy_op.cc 4.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
Qiao Longfei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/cross_entropy_op.h"
C
chengduo 已提交
16
#include <string>
S
sneaxiy 已提交
17
#include "paddle/fluid/operators/cross_entropy_op_base.h"
Q
Qiao Longfei 已提交
18 19 20 21

namespace paddle {
namespace operators {

22
class CrossEntropyOpMaker : public framework::OpProtoAndCheckerMaker {
23
 public:
Y
Yu Yang 已提交
24
  void Make() override {
C
caoying03 已提交
25
    AddInput("X",
F
stash  
fengjiayi 已提交
26 27 28 29 30 31 32 33 34 35
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. When soft_label is set "
        "to false, the last dimension size is 1; when soft_label is set to "
        "true, the last dimension size is equal to the number of classes.");
C
caoying03 已提交
36
    AddOutput("Y",
F
stash  
fengjiayi 已提交
37 38 39
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
C
caoying03 已提交
40 41 42
    AddAttr<bool>("soft_label",
                  "(bool, default false), a flag indicating whether to "
                  "interpretate the given labels as soft labels.")
43
        .SetDefault(false);
44 45 46 47 48
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
Q
Qiao Longfei 已提交
49
    AddComment(R"DOC(
50
CrossEntropy Operator.
Q
Qiao Longfei 已提交
51

F
stash  
fengjiayi 已提交
52 53 54 55 56 57
The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

58 59 60
It supports both standard cross-entropy and soft-label cross-entropy loss
computation.
1) One-hot cross-entropy:
61
    soft_label = false, Label[i, 0] indicates the class index for sample i:
62

K
Kexin Zhao 已提交
63
                $Y[i] = -\log(X[i, Label[i]])$
Q
Qiao Longfei 已提交
64

65
2) Soft-label cross-entropy:
66
    soft_label = true, Label[i, j] indicates the soft label of class j
67
    for sample i:
68

K
Kexin Zhao 已提交
69
                $Y[i] = \sum_j{-Label[i, j] * log(X[i, j])}$
70

71
   Please make sure that in this case the summuation of each row of Label
72 73 74 75 76 77
   equals one.

3) One-hot cross-entropy with vecterized Input(Label):
     As a special case of 2), when each row of Input(Label) has only one
     non-zero element (equals 1), soft-label cross-entropy degenerates to a
     one-hot cross-entropy with one-hot label representation.
D
dangqingqing 已提交
78

K
Kexin Zhao 已提交
79 80 81
Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

Q
Qiao Longfei 已提交
82 83 84
)DOC");
  }
};
C
chengduo 已提交
85

S
sneaxiy 已提交
86 87 88 89 90 91 92
class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;

  void InferShape(framework::InferShapeContext *ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
    CrossEntropyGradientOpBase::InferShape(ctx);
C
chengduo 已提交
93 94
  }
};
S
sneaxiy 已提交
95

Q
Qiao Longfei 已提交
96 97 98
}  // namespace operators
}  // namespace paddle

D
dongzhihong 已提交
99
namespace ops = paddle::operators;
100 101
using CPUCtx = paddle::platform::CPUDeviceContext;

S
sneaxiy 已提交
102 103
REGISTER_OPERATOR(cross_entropy, ops::CrossEntropyOpBase,
                  ops::CrossEntropyOpMaker, ops::CrossEntropyOpInferVarType,
104 105
                  paddle::framework::DefaultGradOpDescMaker<true>);
REGISTER_OPERATOR(cross_entropy_grad, ops::CrossEntropyGradientOp);
106 107
REGISTER_OP_CPU_KERNEL(cross_entropy, ops::CrossEntropyOpKernel<CPUCtx, float>,
                       ops::CrossEntropyOpKernel<CPUCtx, double>);
108
REGISTER_OP_CPU_KERNEL(cross_entropy_grad,
109 110
                       ops::CrossEntropyGradientOpKernel<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel<CPUCtx, double>);