cross_entropy_op.cc 16.6 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
Qiao Longfei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/cross_entropy_op.h"
S
sneaxiy 已提交
16
#include <memory>
C
chengduo 已提交
17
#include <string>
18
#include <unordered_map>
Q
Qiao Longfei 已提交
19 20 21 22

namespace paddle {
namespace operators {

S
sneaxiy 已提交
23
class CrossEntropyOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
24 25 26 27
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
28 29 30
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropy");
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "CrossEntropy");
    OP_INOUT_CHECK(ctx->HasOutput("Y"), "Output", "Y", "CrossEntropy");
S
sneaxiy 已提交
31 32 33 34

    auto x_dims = ctx->GetInputDim("X");
    auto label_dims = ctx->GetInputDim("Label");
    int rank = x_dims.size();
35

H
Hongyu Liu 已提交
36 37 38
    bool contain_unknown_dim = framework::contain_unknown_dim(x_dims) ||
                               framework::contain_unknown_dim(label_dims);
    bool check = ctx->IsRuntime() || !contain_unknown_dim;
39

S
sneaxiy 已提交
40
    if (check) {
41 42 43
      PADDLE_ENFORCE_EQ(
          framework::slice_ddim(x_dims, 0, rank - 1),
          framework::slice_ddim(label_dims, 0, rank - 1),
44 45 46 47 48 49
          platform::errors::InvalidArgument(
              "Input(X) and Input(Label) shall have the same shape "
              "except the last dimension. But received: the shape of Input(X) "
              "is "
              "[%s], the shape of Input(Label) is [%s].",
              x_dims, label_dims));
S
sneaxiy 已提交
50
    }
S
sneaxiy 已提交
51 52

    if (IsSoftLabel(ctx)) {
53 54
      PADDLE_ENFORCE_EQ(
          rank, label_dims.size(),
55 56 57 58 59 60 61 62 63
          platform::errors::InvalidArgument(
              "If Attr(soft_label) == true, Input(X) and Input(Label) "
              "shall have the same dimensions. But received: the dimensions of "
              "Input(X) is [%d],"
              "the shape of Input(X) is [%s], the dimensions of Input(Label) "
              "is "
              "[%d], the shape of"
              "Input(Label) is [%s]",
              rank, x_dims, label_dims.size(), label_dims));
64

S
sneaxiy 已提交
65
      if (check) {
66 67
        PADDLE_ENFORCE_EQ(
            x_dims[rank - 1], label_dims[rank - 1],
68 69 70 71 72 73 74 75 76 77
            platform::errors::InvalidArgument(
                "If Attr(soft_label) == true, the last dimension of "
                "Input(X) and Input(Label) should be equal. But received: the"
                "last dimension of Input(X) is [%d], the shape of Input(X) is "
                "[%s],"
                "the last dimension of Input(Label) is [%d], the shape of "
                "Input(Label)"
                "is [%s], the last dimension is [%d].",
                x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
                rank - 1));
S
sneaxiy 已提交
78 79
      }
    } else {
80 81
      if (rank == label_dims.size()) {
        PADDLE_ENFORCE_EQ(
82
            label_dims[rank - 1], 1UL,
83 84 85 86 87
            platform::errors::InvalidArgument(
                "the last dimension of Input(Label) should be 1."
                "But received: the last dimension of Input(Label) is [%d],"
                "the last dimension is [%d]",
                label_dims[rank - 1], rank - 1));
88
      } else {
89 90 91 92 93 94 95 96 97 98
        PADDLE_ENFORCE_EQ(
            rank, label_dims.size() + 1,
            platform::errors::InvalidArgument(
                "ShapeError: The rank of Input(X) should be equal to "
                "Input(Label) plus 1."
                "But received: The dimension of Input(X) is [%d], "
                "the shape of Input(X) is [%s],"
                "the dimension of Input(Label) is [%d], the shape of "
                "Input(Label) is [%s]",
                rank, x_dims, label_dims.size(), label_dims));
99
      }
S
sneaxiy 已提交
100 101
    }

102 103 104 105
    auto y_dims = label_dims;
    if (rank == label_dims.size()) {
      y_dims[rank - 1] = 1;
    }
S
sneaxiy 已提交
106 107 108 109 110 111 112 113 114
    ctx->SetOutputDim("Y", y_dims);
    ctx->ShareLoD("X", /*->*/ "Y");
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
115 116 117
    return framework::OpKernelType(
        OperatorWithKernel::IndicateVarDataType(ctx, "X"),
        ctx.device_context());
S
sneaxiy 已提交
118
  }
S
sneaxiy 已提交
119 120 121 122

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
S
sneaxiy 已提交
123 124
};

S
sneaxiy 已提交
125
class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
126 127 128
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

S
sneaxiy 已提交
129
  void InferShape(framework::InferShapeContext* ctx) const {
130 131 132 133 134 135
    OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
                   "CrossEntropyGradientOpBase");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Y")), "Input",
                   framework::GradVarName("Y"), "CrossEntropyGradientOpBase");
    OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
                   framework::GradVarName("X"), "CrossEntropyGradientOpBase");
S
sneaxiy 已提交
136

S
sneaxiy 已提交
137
    auto x_dims = GetXDim(ctx);
S
sneaxiy 已提交
138 139 140
    auto label_dims = ctx->GetInputDim("Label");
    auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y"));
    int rank = x_dims.size();
141 142 143 144 145 146
    PADDLE_ENFORCE_EQ(
        dy_dims.size(), label_dims.size(),
        platform::errors::InvalidArgument(
            "Input(Y@Grad) and Input(Y) should have the same rank."
            "But received: Y@Grad's rank is [%d], Y's rank is [%d]",
            dy_dims.size(), label_dims.size()));
S
sneaxiy 已提交
147 148

    bool check = true;
149 150
    if ((!ctx->IsRuntime()) &&
        (framework::product(x_dims) <= 0 || framework::product(dy_dims) <= 0)) {
S
sneaxiy 已提交
151 152 153 154
      check = false;
    }

    if (check) {
155 156 157 158 159 160 161 162 163
      PADDLE_ENFORCE_EQ(
          framework::slice_ddim(x_dims, 0, rank - 1),
          framework::slice_ddim(dy_dims, 0, rank - 1),
          platform::errors::InvalidArgument(
              "The Input(X) and Input(Y@Grad) should have the same "
              "shape except the last dimension. but received: "
              "the shape of Input(X) is [%s], "
              "the shape of Input(Y@Grad) is [%s].",
              x_dims, dy_dims));
S
sneaxiy 已提交
164
    }
165

S
sneaxiy 已提交
166 167
    ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
    ctx->ShareLoD(VarNameWithXLoD(), framework::GradVarName("X"));
S
sneaxiy 已提交
168 169 170 171 172 173 174
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
175 176 177
    return framework::OpKernelType(OperatorWithKernel::IndicateVarDataType(
                                       ctx, framework::GradVarName("Y")),
                                   ctx.device_context());
S
sneaxiy 已提交
178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
  }

  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    return ctx->GetInputDim("X");
  }

  virtual const char* VarNameWithXLoD() const { return "X"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
};

class CrossEntropyOpInferVarType
    : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
194
  std::unordered_map<std::string, std::string>& GetInputOutputWithSameType()
S
sneaxiy 已提交
195
      const override {
196 197
    static std::unordered_map<std::string, std::string> m{{"X", /*->*/ "Y"}};
    return m;
S
sneaxiy 已提交
198 199 200
  }
};

201
class CrossEntropyOpMaker : public framework::OpProtoAndCheckerMaker {
202
 public:
Y
Yu Yang 已提交
203
  void Make() override {
C
caoying03 已提交
204
    AddInput("X",
F
stash  
fengjiayi 已提交
205 206 207 208 209 210 211 212 213 214
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. When soft_label is set "
        "to false, the last dimension size is 1; when soft_label is set to "
        "true, the last dimension size is equal to the number of classes.");
C
caoying03 已提交
215
    AddOutput("Y",
F
stash  
fengjiayi 已提交
216 217 218
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
C
caoying03 已提交
219 220
    AddAttr<bool>("soft_label",
                  "(bool, default false), a flag indicating whether to "
T
tianshuo78520a 已提交
221
                  "interpretant the given labels as soft labels.")
222
        .SetDefault(false);
223 224 225 226 227
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
Q
Qiao Longfei 已提交
228
    AddComment(R"DOC(
229
CrossEntropy Operator.
Q
Qiao Longfei 已提交
230

F
stash  
fengjiayi 已提交
231 232 233 234 235 236
The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

237 238 239
It supports both standard cross-entropy and soft-label cross-entropy loss
computation.
1) One-hot cross-entropy:
240
    soft_label = false, Label[i, 0] indicates the class index for sample i:
241

K
Kexin Zhao 已提交
242
                $Y[i] = -\log(X[i, Label[i]])$
Q
Qiao Longfei 已提交
243

244
2) Soft-label cross-entropy:
245
    soft_label = true, Label[i, j] indicates the soft label of class j
246
    for sample i:
247

K
Kexin Zhao 已提交
248
                $Y[i] = \sum_j{-Label[i, j] * log(X[i, j])}$
249

250
   Please make sure that in this case the summuation of each row of Label
251 252 253 254 255 256
   equals one.

3) One-hot cross-entropy with vecterized Input(Label):
     As a special case of 2), when each row of Input(Label) has only one
     non-zero element (equals 1), soft-label cross-entropy degenerates to a
     one-hot cross-entropy with one-hot label representation.
D
dangqingqing 已提交
257

K
Kexin Zhao 已提交
258 259 260
Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

Q
Qiao Longfei 已提交
261 262 263
)DOC");
  }
};
C
chengduo 已提交
264

S
sneaxiy 已提交
265 266 267 268 269
class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
270
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropyGradientOp");
S
sneaxiy 已提交
271 272 273 274
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
};

H
hong 已提交
275 276
template <typename T>
class CrossEntropyGradOpMaker : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
277
 public:
H
hong 已提交
278
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
279 280

 protected:
281
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
282
    op->SetType("cross_entropy_grad");
H
hong 已提交
283 284 285 286 287
    op->SetInput("X", this->Input("X"));
    op->SetInput("Label", this->Input("Label"));
    op->SetInput(framework::GradVarName("Y"), this->OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
288 289 290
  }
};

S
sneaxiy 已提交
291 292 293 294 295 296 297
class CrossEntropyOp2 : public CrossEntropyOpBase {
 public:
  using CrossEntropyOpBase::CrossEntropyOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
    CrossEntropyOpBase::InferShape(ctx);

298 299 300 301
    OP_INOUT_CHECK(ctx->HasOutput("XShape"), "Output", "XShape",
                   "CrossEntropyOp2");
    OP_INOUT_CHECK(ctx->HasOutput("MatchX"), "Output", "MatchX",
                   "CrossEntropyOp2");
S
sneaxiy 已提交
302 303 304 305
    auto x_dims = ctx->GetInputDim("X");
    auto x_dims_vec = framework::vectorize(x_dims);
    x_dims_vec.push_back(0);
    ctx->SetOutputDim("XShape", framework::make_ddim(x_dims_vec));
S
sneaxiy 已提交
306 307
    x_dims[x_dims.size() - 1] = 1;
    ctx->SetOutputDim("MatchX", x_dims);
S
sneaxiy 已提交
308 309 310
    ctx->ShareLoD("X", /*->*/ "XShape");
  }

S
sneaxiy 已提交
311
 protected:
S
sneaxiy 已提交
312 313 314 315 316 317 318 319
  bool IsSoftLabel(framework::InferShapeContext* ctx) const override {
    return false;
  }
};

class CrossEntropyGradientOp2 : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
S
sneaxiy 已提交
320
  void InferShape(framework::InferShapeContext* ctx) const override {
321 322
    OP_INOUT_CHECK(ctx->HasInput("MatchX"), "Input", "MatchX",
                   "CrossEntropyGradientOp2");
S
sneaxiy 已提交
323 324
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
S
sneaxiy 已提交
325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355

 protected:
  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    auto x_shape = ctx->GetInputDim("XShape");
    return framework::DDim(x_shape.Get(), x_shape.size() - 1);
  }

  virtual const char* VarNameWithXLoD() const { return "XShape"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return false;
  }
};

class CrossEntropyOpMaker2 : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
    AddInput("X",
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. One hot Tensor.");
    AddOutput("Y",
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
    AddOutput("XShape", "Temporaily variable to save shape and LoD of X.");
S
sneaxiy 已提交
356 357
    AddOutput("MatchX",
              "X value that matches label, used for gradient computation.");
S
sneaxiy 已提交
358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
    AddComment(R"DOC(
Hard-label CrossEntropy Operator.

The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

Only support hard label.

Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

)DOC");
  }
};

H
hong 已提交
381 382
template <typename T>
class CrossEntropyGradOpMaker2 : public framework::SingleGradOpMaker<T> {
S
sneaxiy 已提交
383
 public:
H
hong 已提交
384
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
S
sneaxiy 已提交
385 386

 protected:
387
  void Apply(GradOpPtr<T> op) const override {
S
sneaxiy 已提交
388
    op->SetType("cross_entropy_grad2");
H
hong 已提交
389 390 391 392 393 394
    op->SetInput("Label", this->Input("Label"));
    op->SetInput("MatchX", this->Output("MatchX"));
    op->SetInput("XShape", this->Output("XShape"));
    op->SetInput(framework::GradVarName("Y"), this->OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
    op->SetAttrMap(this->Attrs());
S
sneaxiy 已提交
395 396
  }
};
S
sneaxiy 已提交
397

Q
Qiao Longfei 已提交
398 399 400
}  // namespace operators
}  // namespace paddle

D
dongzhihong 已提交
401
namespace ops = paddle::operators;
402 403
using CPUCtx = paddle::platform::CPUDeviceContext;

S
sneaxiy 已提交
404 405
REGISTER_OPERATOR(cross_entropy, ops::CrossEntropyOpBase,
                  ops::CrossEntropyOpMaker, ops::CrossEntropyOpInferVarType,
H
hong 已提交
406 407
                  ops::CrossEntropyGradOpMaker<paddle::framework::OpDesc>,
                  ops::CrossEntropyGradOpMaker<paddle::imperative::OpBase>);
408
REGISTER_OPERATOR(cross_entropy_grad, ops::CrossEntropyGradientOp);
409 410
REGISTER_OP_CPU_KERNEL(cross_entropy, ops::CrossEntropyOpKernel<CPUCtx, float>,
                       ops::CrossEntropyOpKernel<CPUCtx, double>);
411
REGISTER_OP_CPU_KERNEL(cross_entropy_grad,
412 413
                       ops::CrossEntropyGradientOpKernel<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel<CPUCtx, double>);
S
sneaxiy 已提交
414 415 416

REGISTER_OPERATOR(cross_entropy2, ops::CrossEntropyOp2,
                  ops::CrossEntropyOpMaker2, ops::CrossEntropyOpInferVarType,
H
hong 已提交
417 418
                  ops::CrossEntropyGradOpMaker2<paddle::framework::OpDesc>,
                  ops::CrossEntropyGradOpMaker2<paddle::imperative::OpBase>);
S
sneaxiy 已提交
419 420 421 422 423 424 425
REGISTER_OPERATOR(cross_entropy_grad2, ops::CrossEntropyGradientOp2);
REGISTER_OP_CPU_KERNEL(cross_entropy2,
                       ops::CrossEntropyOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyOpKernel2<CPUCtx, double>);
REGISTER_OP_CPU_KERNEL(cross_entropy_grad2,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, double>);