cross_entropy_op.cc 2.8 KB
Newer Older
Q
Qiao Longfei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/operators/cross_entropy_op.h"

namespace paddle {
namespace operators {

20
class OnehotCrossEntropyOp : public OperatorWithKernel {
21
 protected:
22 23
  void InferShape(const InferShapeContext &ctx) const override {
    PADDLE_ENFORCE(ctx.InputSize() == 2,
Q
Qiao Longfei 已提交
24
                   "Input size of OnehotCrossEntropyOp must be two");
25
    PADDLE_ENFORCE(ctx.OutputSize() == 1,
Q
Qiao Longfei 已提交
26
                   "Output size of OnehotCrossEntropyOp must be one");
27
    PADDLE_ENFORCE(ctx.InputVar(0) != nullptr && ctx.InputVar(1) != nullptr,
Q
Qiao Longfei 已提交
28
                   "Inputs of OnehotCrossEntropyOp must all be set");
29
    PADDLE_ENFORCE(ctx.OutputVar(0) != nullptr,
Q
Qiao Longfei 已提交
30
                   "Outputs of OnehotCrossEntropyOp must all be set");
31 32 33
    PADDLE_ENFORCE(ctx.Input<Tensor>(0)->dims().size() == 2,
                   "X's dimension must be 2.");
    PADDLE_ENFORCE(ctx.Output<Tensor>(0)->dims().size() == 1,
Q
Qiao Longfei 已提交
34
                   "label's dimension must be 1.");
35
    ctx.Output<Tensor>(0)->Resize({ctx.Input<Tensor>(0)->dims()[0]});
Q
Qiao Longfei 已提交
36 37 38
  }
};

Y
Yan Chunwei 已提交
39 40 41 42 43 44 45 46 47 48 49
class OnehotCrossEntropyGradientOp : public OperatorWithKernel {
 protected:
  void InferShape(const InferShapeContext &ctx) const override {
    auto X_grad = ctx.Output<Tensor>(framework::GradVarName("X"));
    auto X = ctx.Input<Tensor>("X");

    // TODO(superjom) add enforce here after helper functions ready
    X_grad->Resize(X->dims());
  }
};

50
class OnehotCrossEntropyOpMaker : public OpProtoAndCheckerMaker {
51
 public:
52 53
  OnehotCrossEntropyOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Q
Qiao Longfei 已提交
54 55 56 57 58 59 60 61 62 63 64 65 66 67
    AddInput("X", "The first input of OnehotCrossEntropyOp");
    AddInput("label", "The second input of OnehotCrossEntropyOp");
    AddOutput("Y", "The output of OnehotCrossEntropyOp");
    AddComment(R"DOC(
OnehotCrossEntropy Operator.

                Y[i] = -log(X[i][j])

)DOC");
  }
};
}  // namespace operators
}  // namespace paddle

68
REGISTER_OP(onehot_cross_entropy, ops::OnehotCrossEntropyOp,
69 70 71
            ops::OnehotCrossEntropyOpMaker);
REGISTER_OP_CPU_KERNEL(onehot_cross_entropy,
                       ops::OnehotCrossEntropyOpKernel<ops::CPUPlace, float>);
Y
Yan Chunwei 已提交
72 73 74 75

REGISTER_OP_CPU_KERNEL(
    onehot_cross_entropy_grad,
    ops::OnehotCrossEntropyGradientOpKernel<ops::CPUPlace, float>);