cross_entropy_op.cc 15.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
Qiao Longfei 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/cross_entropy_op.h"
S
sneaxiy 已提交
16
#include <memory>
C
chengduo 已提交
17
#include <string>
18
#include <unordered_map>
Q
Qiao Longfei 已提交
19 20 21 22

namespace paddle {
namespace operators {

S
sneaxiy 已提交
23
class CrossEntropyOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
24 25 26 27 28 29
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
    PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
S
sneaxiy 已提交
30

S
sneaxiy 已提交
31 32 33 34 35 36 37
    PADDLE_ENFORCE(ctx->HasOutput("Y"), "Output(Y) should be not null.");

    auto x_dims = ctx->GetInputDim("X");
    auto label_dims = ctx->GetInputDim("Label");
    int rank = x_dims.size();
    PADDLE_ENFORCE_EQ(rank, label_dims.size(),
                      "Input(X) and Input(Label) shall have the same rank.");
H
Hongyu Liu 已提交
38 39 40
    bool contain_unknown_dim = framework::contain_unknown_dim(x_dims) ||
                               framework::contain_unknown_dim(label_dims);
    bool check = ctx->IsRuntime() || !contain_unknown_dim;
S
sneaxiy 已提交
41 42 43 44 45 46
    if (check) {
      PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
                        framework::slice_ddim(label_dims, 0, rank - 1),
                        "Input(X) and Input(Label) shall have the same shape "
                        "except the last dimension.");
    }
S
sneaxiy 已提交
47 48

    if (IsSoftLabel(ctx)) {
S
sneaxiy 已提交
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
      if (check) {
        PADDLE_ENFORCE_EQ(x_dims[rank - 1], label_dims[rank - 1],
                          "If Attr(soft_label) == true, the last dimension of "
                          "Input(X) and Input(Label) should be equal.");
      }
    } else {
      PADDLE_ENFORCE_EQ(label_dims[rank - 1], 1UL,
                        "If Attr(softLabel) == false, the last dimension of "
                        "Input(Label) should be 1.");
    }

    auto y_dims = x_dims;
    y_dims[rank - 1] = 1;
    ctx->SetOutputDim("Y", y_dims);
    ctx->ShareLoD("X", /*->*/ "Y");
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
    return framework::OpKernelType(ctx.Input<Tensor>("X")->type(),
                                   ctx.device_context());
  }
S
sneaxiy 已提交
74 75 76 77

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
S
sneaxiy 已提交
78 79
};

S
sneaxiy 已提交
80
class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
S
sneaxiy 已提交
81 82 83
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

S
sneaxiy 已提交
84
  void InferShape(framework::InferShapeContext* ctx) const {
S
sneaxiy 已提交
85 86 87 88 89 90
    PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
    PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Y")),
                   "Input(Y@GRAD) shoudl be not null.");
    PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("X")),
                   "Output(X@GRAD) should be not null.");

S
sneaxiy 已提交
91
    auto x_dims = GetXDim(ctx);
S
sneaxiy 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
    auto label_dims = ctx->GetInputDim("Label");
    auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y"));
    int rank = x_dims.size();
    PADDLE_ENFORCE_EQ(dy_dims.size(), rank,
                      "Input(Y@Grad) and Input(X) should have the same rank.");
    PADDLE_ENFORCE_EQ(label_dims.size(), rank,
                      "Input(Label) and Input(X) should have the same rank.");

    bool check = true;
    if ((!ctx->IsRuntime()) && (framework::product(x_dims) <= 0 ||
                                framework::product(label_dims) <= 0)) {
      check = false;
    }

    if (check) {
      PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
                        framework::slice_ddim(label_dims, 0, rank - 1),
                        "The Input(X) and Input(Label) should have the same "
                        "shape except the last dimension.");
      PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
                        framework::slice_ddim(dy_dims, 0, rank - 1),
                        "The Input(X) and Input(Y@Grad) should have the same "
                        "shape except the last dimension.");
    }
S
sneaxiy 已提交
116
    if (IsSoftLabel(ctx)) {
S
sneaxiy 已提交
117 118 119 120 121 122 123 124 125 126 127 128
      if (check) {
        PADDLE_ENFORCE_EQ(
            x_dims[rank - 1], label_dims[rank - 1],
            "When Attr(soft_label) == true, the last dimension of "
            "Input(X) and Input(Label) should be equal.");
      }
    } else {
      PADDLE_ENFORCE_EQ(label_dims[rank - 1], 1,
                        "When Attr(soft_label) == false, the last dimension of "
                        "Input(Label) should be 1.");
    }
    ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
S
sneaxiy 已提交
129 130 131 132
    PADDLE_ENFORCE_EQ(dy_dims[rank - 1], 1,
                      "The last dimension of Input(Y@Grad) should be 1.");
    ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
    ctx->ShareLoD(VarNameWithXLoD(), framework::GradVarName("X"));
S
sneaxiy 已提交
133 134 135 136 137 138 139
  }

 protected:
  // Explicitly set that the data type of computation kernel of cross_entropy
  // is determined by its input "X".
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext& ctx) const override {
S
sneaxiy 已提交
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
    return framework::OpKernelType(
        ctx.Input<Tensor>(framework::GradVarName("Y"))->type(),
        ctx.device_context());
  }

  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    return ctx->GetInputDim("X");
  }

  virtual const char* VarNameWithXLoD() const { return "X"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return ctx->Attrs().Get<bool>("soft_label");
  }
};

class CrossEntropyOpInferVarType
    : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
  std::unordered_map<std::string, std::string> GetInputOutputWithSameType()
      const override {
    return std::unordered_map<std::string, std::string>{{"X", /*->*/ "Y"}};
S
sneaxiy 已提交
162 163 164
  }
};

165
class CrossEntropyOpMaker : public framework::OpProtoAndCheckerMaker {
166
 public:
Y
Yu Yang 已提交
167
  void Make() override {
C
caoying03 已提交
168
    AddInput("X",
F
stash  
fengjiayi 已提交
169 170 171 172 173 174 175 176 177 178
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. When soft_label is set "
        "to false, the last dimension size is 1; when soft_label is set to "
        "true, the last dimension size is equal to the number of classes.");
C
caoying03 已提交
179
    AddOutput("Y",
F
stash  
fengjiayi 已提交
180 181 182
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
C
caoying03 已提交
183 184 185
    AddAttr<bool>("soft_label",
                  "(bool, default false), a flag indicating whether to "
                  "interpretate the given labels as soft labels.")
186
        .SetDefault(false);
187 188 189 190 191
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
Q
Qiao Longfei 已提交
192
    AddComment(R"DOC(
193
CrossEntropy Operator.
Q
Qiao Longfei 已提交
194

F
stash  
fengjiayi 已提交
195 196 197 198 199 200
The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

201 202 203
It supports both standard cross-entropy and soft-label cross-entropy loss
computation.
1) One-hot cross-entropy:
204
    soft_label = false, Label[i, 0] indicates the class index for sample i:
205

K
Kexin Zhao 已提交
206
                $Y[i] = -\log(X[i, Label[i]])$
Q
Qiao Longfei 已提交
207

208
2) Soft-label cross-entropy:
209
    soft_label = true, Label[i, j] indicates the soft label of class j
210
    for sample i:
211

K
Kexin Zhao 已提交
212
                $Y[i] = \sum_j{-Label[i, j] * log(X[i, j])}$
213

214
   Please make sure that in this case the summuation of each row of Label
215 216 217 218 219 220
   equals one.

3) One-hot cross-entropy with vecterized Input(Label):
     As a special case of 2), when each row of Input(Label) has only one
     non-zero element (equals 1), soft-label cross-entropy degenerates to a
     one-hot cross-entropy with one-hot label representation.
D
dangqingqing 已提交
221

K
Kexin Zhao 已提交
222 223 224
Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

Q
Qiao Longfei 已提交
225 226 227
)DOC");
  }
};
C
chengduo 已提交
228

S
sneaxiy 已提交
229 230 231 232 233 234 235 236 237 238
class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
};

S
sneaxiy 已提交
239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255
class CrossEntropyGradOpDescMaker : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
  std::unique_ptr<framework::OpDesc> Apply() const override {
    std::unique_ptr<framework::OpDesc> op(new framework::OpDesc());
    op->SetType("cross_entropy_grad");
    op->SetInput("X", Input("X"));
    op->SetInput("Label", Input("Label"));
    op->SetInput(framework::GradVarName("Y"), OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), InputGrad("X"));
    op->SetAttrMap(Attrs());
    return op;
  }
};

S
sneaxiy 已提交
256 257 258 259 260 261 262 263 264 265
class CrossEntropyOp2 : public CrossEntropyOpBase {
 public:
  using CrossEntropyOpBase::CrossEntropyOpBase;

  void InferShape(framework::InferShapeContext* ctx) const override {
    CrossEntropyOpBase::InferShape(ctx);

    PADDLE_ENFORCE(ctx->HasOutput("XShape"),
                   "Output(XShape) should be not null.");

S
sneaxiy 已提交
266 267
    PADDLE_ENFORCE(ctx->HasOutput("MatchX"),
                   "Output(MatchX) should be not null.");
S
sneaxiy 已提交
268 269 270 271
    auto x_dims = ctx->GetInputDim("X");
    auto x_dims_vec = framework::vectorize(x_dims);
    x_dims_vec.push_back(0);
    ctx->SetOutputDim("XShape", framework::make_ddim(x_dims_vec));
S
sneaxiy 已提交
272 273
    x_dims[x_dims.size() - 1] = 1;
    ctx->SetOutputDim("MatchX", x_dims);
S
sneaxiy 已提交
274 275 276
    ctx->ShareLoD("X", /*->*/ "XShape");
  }

S
sneaxiy 已提交
277
 protected:
S
sneaxiy 已提交
278 279 280 281 282 283 284 285
  bool IsSoftLabel(framework::InferShapeContext* ctx) const override {
    return false;
  }
};

class CrossEntropyGradientOp2 : public CrossEntropyGradientOpBase {
 public:
  using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
S
sneaxiy 已提交
286 287 288 289
  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("MatchX"), "Input(MatchX) must exist");
    CrossEntropyGradientOpBase::InferShape(ctx);
  }
S
sneaxiy 已提交
290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320

 protected:
  virtual framework::DDim GetXDim(framework::InferShapeContext* ctx) const {
    auto x_shape = ctx->GetInputDim("XShape");
    return framework::DDim(x_shape.Get(), x_shape.size() - 1);
  }

  virtual const char* VarNameWithXLoD() const { return "XShape"; }

  virtual bool IsSoftLabel(framework::InferShapeContext* ctx) const {
    return false;
  }
};

class CrossEntropyOpMaker2 : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
    AddInput("X",
             "(Tensor, default Tensor<float>), a tensor whose last dimension "
             "size is equal to the number of classes. This input is a "
             "probability computed by the previous operator, which is almost "
             "always the result of a softmax operator.");
    AddInput(
        "Label",
        "(Tensor), the tensor which represents the ground truth. It has the "
        "same shape with 'X' except the last dimension. One hot Tensor.");
    AddOutput("Y",
              "(Tensor, default Tensor<float>), a tensor whose shape is same "
              "with 'X' except that the last dimension size is 1. It "
              "represents the cross entropy loss.");
    AddOutput("XShape", "Temporaily variable to save shape and LoD of X.");
S
sneaxiy 已提交
321 322
    AddOutput("MatchX",
              "X value that matches label, used for gradient computation.");
S
sneaxiy 已提交
323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354
    AddAttr<int>("ignore_index",
                 "(int, default -100), Specifies a target value that is"
                 "ignored and does not contribute to the input gradient."
                 "Only valid if soft_label is set to False")
        .SetDefault(-100);
    AddComment(R"DOC(
Hard-label CrossEntropy Operator.

The input 'X' and 'Label' will first be logically flattened to 2-D matrixs. 
The matrix's second dimension(row length) is as same as the original last 
dimension, and the first dimension(column length) is the product of all other 
original dimensions. Then the softmax computation will take palce on each raw 
of flattened matrixs.

Only support hard label.

Both the input X and Label can carry the LoD (Level of Details) information,
or not. But the output only shares the LoD information with input X.

)DOC");
  }
};

class CrossEntropyGradOpDescMaker2 : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
  std::unique_ptr<framework::OpDesc> Apply() const override {
    std::unique_ptr<framework::OpDesc> op(new framework::OpDesc());
    op->SetType("cross_entropy_grad2");
    op->SetInput("Label", Input("Label"));
S
sneaxiy 已提交
355
    op->SetInput("MatchX", Output("MatchX"));
S
sneaxiy 已提交
356 357 358 359 360
    op->SetInput("XShape", Output("XShape"));
    op->SetInput(framework::GradVarName("Y"), OutputGrad("Y"));
    op->SetOutput(framework::GradVarName("X"), InputGrad("X"));
    op->SetAttrMap(Attrs());
    return op;
S
sneaxiy 已提交
361 362
  }
};
S
sneaxiy 已提交
363

Q
Qiao Longfei 已提交
364 365 366
}  // namespace operators
}  // namespace paddle

D
dongzhihong 已提交
367
namespace ops = paddle::operators;
368 369
using CPUCtx = paddle::platform::CPUDeviceContext;

S
sneaxiy 已提交
370 371
REGISTER_OPERATOR(cross_entropy, ops::CrossEntropyOpBase,
                  ops::CrossEntropyOpMaker, ops::CrossEntropyOpInferVarType,
S
sneaxiy 已提交
372
                  ops::CrossEntropyGradOpDescMaker);
373
REGISTER_OPERATOR(cross_entropy_grad, ops::CrossEntropyGradientOp);
374 375
REGISTER_OP_CPU_KERNEL(cross_entropy, ops::CrossEntropyOpKernel<CPUCtx, float>,
                       ops::CrossEntropyOpKernel<CPUCtx, double>);
376
REGISTER_OP_CPU_KERNEL(cross_entropy_grad,
377 378
                       ops::CrossEntropyGradientOpKernel<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel<CPUCtx, double>);
S
sneaxiy 已提交
379 380 381 382 383 384 385 386 387 388 389

REGISTER_OPERATOR(cross_entropy2, ops::CrossEntropyOp2,
                  ops::CrossEntropyOpMaker2, ops::CrossEntropyOpInferVarType,
                  ops::CrossEntropyGradOpDescMaker2);
REGISTER_OPERATOR(cross_entropy_grad2, ops::CrossEntropyGradientOp2);
REGISTER_OP_CPU_KERNEL(cross_entropy2,
                       ops::CrossEntropyOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyOpKernel2<CPUCtx, double>);
REGISTER_OP_CPU_KERNEL(cross_entropy_grad2,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, float>,
                       ops::CrossEntropyGradientOpKernel2<CPUCtx, double>);