From 54f0d2608a0e01d9c633a9237d6433372b61a5b8 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Wed, 2 Aug 2017 20:35:17 +0800 Subject: [PATCH] fix input size. --- paddle/operators/sigmoid_op.cc | 3 ++- paddle/operators/sigmoid_op.h | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/paddle/operators/sigmoid_op.cc b/paddle/operators/sigmoid_op.cc index 9e565bb23..484a6dfd6 100644 --- a/paddle/operators/sigmoid_op.cc +++ b/paddle/operators/sigmoid_op.cc @@ -38,7 +38,8 @@ public: class SigmoidOpGrad : public OperatorWithKernel { protected: void InferShape(const InferShapeContext &ctx) const override { - PADDLE_ENFORCE(ctx.InputSize() == 1, + // need to check input size 2 or 3, (dY, Y) or (dY, Y, X) + PADDLE_ENFORCE(ctx.InputSize() == 2, "Sigmoid Gradient Op only have one input"); PADDLE_ENFORCE(ctx.OutputSize() == 1, "Sigmoid Gradient Op only have one output"); diff --git a/paddle/operators/sigmoid_op.h b/paddle/operators/sigmoid_op.h index 2ea75b488..8cae2d30e 100644 --- a/paddle/operators/sigmoid_op.h +++ b/paddle/operators/sigmoid_op.h @@ -27,6 +27,7 @@ public: auto output = context.Output(0); output->mutable_data(context.GetPlace()); + // The clipping is used in Paddle's raw implenmention EigenVector::Flatten(*output).device( *(context.GetEigenDevice())) = 1.0 / (1.0 + (-1.0 * EigenVector::Flatten(*input)).exp()); @@ -37,7 +38,7 @@ template class SigmoidGradKernel : public OpKernel { public: void Compute(const ExecutionContext& context) const override { - // TODO(qingqing) maybe a helper funciton is needed fo the name x@GRAD + // maybe a helper funciton is needed fo the name x@GRAD auto y_t = context.Input("Y"); auto dy_t = context.Input("Y@GRAD"); auto dx_t = context.Output("X@GRAD"); -- GitLab