cross_entropy_op.cc 2.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/operators/cross_entropy_op.h"

namespace paddle {
namespace operators {

D
dongzhihong 已提交
20
class OnehotCrossEntropyOp : public framework::OperatorWithKernel {
21 22 23
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

24
 protected:
D
dongzhihong 已提交
25
  void InferShape(const framework::InferShapeContext &ctx) const override {
Y
Yu Yang 已提交
26 27 28 29 30 31 32
    auto *X = ctx.Input<Tensor>("X");
    auto *label = ctx.Input<Tensor>("label");

    PADDLE_ENFORCE_EQ(X->dims().size(), 2, "X's dimension must be 2.");
    PADDLE_ENFORCE_EQ(label->dims().size(), 1, "label's dimension must be 1.");
    PADDLE_ENFORCE_EQ(X->dims()[0], label->dims()[0]);
    ctx.Output<Tensor>("Y")->Resize({X->dims()[0]});
33 34 35
  }
};

D
dongzhihong 已提交
36
class OnehotCrossEntropyGradientOp : public framework::OperatorWithKernel {
37 38 39
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

40
 protected:
D
dongzhihong 已提交
41
  void InferShape(const framework::InferShapeContext &ctx) const override {
42
    auto dX = ctx.Output<Tensor>(framework::GradVarName("X"));
43 44
    auto X = ctx.Input<Tensor>("X");

45
    dX->Resize(X->dims());
46 47 48
  }
};

D
dongzhihong 已提交
49
class OnehotCrossEntropyOpMaker : public framework::OpProtoAndCheckerMaker {
50
 public:
D
dongzhihong 已提交
51 52
  OnehotCrossEntropyOpMaker(framework::OpProto *proto,
                            framework::OpAttrChecker *op_checker)
53
      : OpProtoAndCheckerMaker(proto, op_checker) {
54 55 56 57 58 59 60 61 62 63 64 65 66 67
    AddInput("X", "The first input of OnehotCrossEntropyOp");
    AddInput("label", "The second input of OnehotCrossEntropyOp");
    AddOutput("Y", "The output of OnehotCrossEntropyOp");
    AddComment(R"DOC(
OnehotCrossEntropy Operator.

                Y[i] = -log(X[i][j])

)DOC");
  }
};
}  // namespace operators
}  // namespace paddle

D
dongzhihong 已提交
68
namespace ops = paddle::operators;
69
REGISTER_OP(onehot_cross_entropy, ops::OnehotCrossEntropyOp,
70 71
            ops::OnehotCrossEntropyOpMaker, onehot_cross_entropy_grad,
            ops::OnehotCrossEntropyGradientOp);
72 73 74 75
REGISTER_OP_CPU_KERNEL(onehot_cross_entropy,
                       ops::OnehotCrossEntropyOpKernel<float>);
REGISTER_OP_CPU_KERNEL(onehot_cross_entropy_grad,
                       ops::OnehotCrossEntropyGradientOpKernel<float>);