From 1fe5acb25a2cedd765da28642510b2ce497dc659 Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Fri, 1 Dec 2017 14:47:15 +0800 Subject: [PATCH] Expose sigmoid_cross_entropy_with_logits (#6147) Also, change the `labels` to `label` for api consistency --- .../sigmoid_cross_entropy_with_logits_op.cc | 24 +++++++-------- .../sigmoid_cross_entropy_with_logits_op.h | 6 ++-- python/paddle/v2/fluid/layers.py | 1 + python/paddle/v2/fluid/tests/test_layers.py | 10 +++++++ ...st_sigmoid_cross_entropy_with_logits_op.py | 29 +++++++++++-------- 5 files changed, 41 insertions(+), 29 deletions(-) diff --git a/paddle/operators/sigmoid_cross_entropy_with_logits_op.cc b/paddle/operators/sigmoid_cross_entropy_with_logits_op.cc index d9e405465..782f4c793 100644 --- a/paddle/operators/sigmoid_cross_entropy_with_logits_op.cc +++ b/paddle/operators/sigmoid_cross_entropy_with_logits_op.cc @@ -25,20 +25,19 @@ class SigmoidCrossEntropyWithLogitsOp : public framework::OperatorWithKernel { void InferShape(framework::InferShapeContext* ctx) const override { PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null."); - PADDLE_ENFORCE(ctx->HasInput("Labels"), - "Input(Labels) should be not null."); + PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null."); PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should be not null."); auto x_dims = ctx->GetInputDim("X"); - auto labels_dims = ctx->GetInputDim("Labels"); + auto labels_dims = ctx->GetInputDim("Label"); PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(X)'s rank should be 2."); PADDLE_ENFORCE_EQ(labels_dims.size(), 2, - "Input(Labels)'s rank should be 2."); + "Input(Label)'s rank should be 2."); PADDLE_ENFORCE_EQ(x_dims[0], labels_dims[0], - "The 1st dimension of Input(X) and Input(Labels) should " + "The 1st dimension of Input(X) and Input(Label) should " "be equal."); PADDLE_ENFORCE_EQ(x_dims[1], labels_dims[1], - "The 2nd dimension of Input(X) and Input(Labels) should " + "The 2nd dimension of Input(X) and Input(Label) should " "be equal."); ctx->SetOutputDim("Out", x_dims); @@ -53,26 +52,25 @@ class SigmoidCrossEntropyWithLogitsGradOp void InferShape(framework::InferShapeContext* ctx) const override { PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null."); - PADDLE_ENFORCE(ctx->HasInput("Labels"), - "Input(Labels) should be not null."); + PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null."); PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")), "Input(Out@GRAD) shoudl be not null."); PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("X")), "Output(X@GRAD) should be not null."); auto x_dims = ctx->GetInputDim("X"); - auto labels_dims = ctx->GetInputDim("Labels"); + auto labels_dims = ctx->GetInputDim("Label"); auto dout_dims = ctx->GetInputDim(framework::GradVarName("Out")); PADDLE_ENFORCE_EQ(x_dims.size(), 2, "Input(X)'s rank should be 2."); PADDLE_ENFORCE_EQ(labels_dims.size(), 2, - "Input(Labels)'s rank should be 2."); + "Input(Label)'s rank should be 2."); PADDLE_ENFORCE_EQ(dout_dims.size(), 2, "Input(Out@Grad)'s rank should be 2."); PADDLE_ENFORCE_EQ(x_dims[0], labels_dims[0], - "The 1st dimension of Input(X) and Input(Labels) should " + "The 1st dimension of Input(X) and Input(Label) should " "be equal."); PADDLE_ENFORCE_EQ(x_dims[1], labels_dims[1], - "The 2nd dimension of Input(X) and Input(Labels) should " + "The 2nd dimension of Input(X) and Input(Label) should " "be equal."); PADDLE_ENFORCE_EQ(x_dims[0], dout_dims[0], "The 1st dimension of Input(X) and Input(Out@Grad) " @@ -97,7 +95,7 @@ class SigmoidCrossEntropyWithLogitsOpMaker "This input is a tensor of logits computed by the previous " " operator. Logits are unscaled log probabilities given as " "log(p/(1-p))."); - AddInput("Labels", + AddInput("Label", "(Tensor, default Tensor), a 2-D tensor of the same type " "and shape as X. This input is a tensor of probabalistic labels " "for each logit"); diff --git a/paddle/operators/sigmoid_cross_entropy_with_logits_op.h b/paddle/operators/sigmoid_cross_entropy_with_logits_op.h index 41c619f18..2a9d9bbc7 100644 --- a/paddle/operators/sigmoid_cross_entropy_with_logits_op.h +++ b/paddle/operators/sigmoid_cross_entropy_with_logits_op.h @@ -25,8 +25,7 @@ class SigmoidCrossEntropyWithLogitsKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext &context) const override { const framework::Tensor *X = context.Input("X"); - const framework::Tensor *Labels = - context.Input("Labels"); + const framework::Tensor *Labels = context.Input("Label"); framework::Tensor *Out = context.Output("Out"); Out->mutable_data(context.GetPlace()); @@ -52,8 +51,7 @@ class SigmoidCrossEntropyWithLogitsGradKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext &context) const override { const framework::Tensor *X = context.Input("X"); - const framework::Tensor *Labels = - context.Input("Labels"); + const framework::Tensor *Labels = context.Input("Label"); const framework::Tensor *dOut = context.Input(framework::GradVarName("Out")); framework::Tensor *dX = diff --git a/python/paddle/v2/fluid/layers.py b/python/paddle/v2/fluid/layers.py index 5a977978b..e41bfae28 100644 --- a/python/paddle/v2/fluid/layers.py +++ b/python/paddle/v2/fluid/layers.py @@ -403,6 +403,7 @@ _create_op_func_('sigmoid') _create_op_func_('scale') _create_op_func_('reshape') _create_op_func_('transpose') +_create_op_func_('sigmoid_cross_entropy_with_logits') def cast(x, dtype, main_program=None): diff --git a/python/paddle/v2/fluid/tests/test_layers.py b/python/paddle/v2/fluid/tests/test_layers.py index 33b0e54f4..a9d9d369c 100644 --- a/python/paddle/v2/fluid/tests/test_layers.py +++ b/python/paddle/v2/fluid/tests/test_layers.py @@ -137,6 +137,16 @@ class TestBook(unittest.TestCase): print(str(program)) + def test_sigmoid_cross_entropy(self): + program = Program() + with program_guard(program): + dat = layers.data(name='data', shape=[10], dtype='float32') + lbl = layers.data(name='label', shape=[10], dtype='float32') + self.assertIsNotNone( + layers.sigmoid_cross_entropy_with_logits( + x=dat, label=lbl)) + print(str(program)) + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/v2/fluid/tests/test_sigmoid_cross_entropy_with_logits_op.py b/python/paddle/v2/fluid/tests/test_sigmoid_cross_entropy_with_logits_op.py index e53856b38..c42f578f7 100644 --- a/python/paddle/v2/fluid/tests/test_sigmoid_cross_entropy_with_logits_op.py +++ b/python/paddle/v2/fluid/tests/test_sigmoid_cross_entropy_with_logits_op.py @@ -2,11 +2,12 @@ import numpy as np from op_test import OpTest from scipy.special import logit from scipy.special import expit +import unittest class TestSigmoidCrossEntropyWithLogitsOp1(OpTest): - '''Test sigmoid_cross_entropy_with_logit_op with binary labels - ''' + """Test sigmoid_cross_entropy_with_logit_op with binary label + """ def setUp(self): self.op_type = "sigmoid_cross_entropy_with_logits" @@ -16,16 +17,16 @@ class TestSigmoidCrossEntropyWithLogitsOp1(OpTest): 'X': logit( np.random.uniform(0, 1, (batch_size, num_classes)) .astype("float32")), - 'Labels': np.random.randint(0, 2, (batch_size, num_classes)) + 'Label': np.random.randint(0, 2, (batch_size, num_classes)) .astype("float32") } # Fw Pass is implemented as elementwise sigmoid followed by # elementwise logistic loss - # Labels * -log(sigmoid(X)) + (1 - labels) * -log(1 - sigmoid(X)) + # Label * -log(sigmoid(X)) + (1 - label) * -log(1 - sigmoid(X)) sigmoid_X = expit(self.inputs['X']) - term1 = self.inputs['Labels'] * np.log(sigmoid_X) - term2 = (1 - self.inputs['Labels']) * np.log(1 - sigmoid_X) + term1 = self.inputs['Label'] * np.log(sigmoid_X) + term2 = (1 - self.inputs['Label']) * np.log(1 - sigmoid_X) self.outputs = {'Out': -term1 - term2} def test_check_output(self): @@ -36,8 +37,8 @@ class TestSigmoidCrossEntropyWithLogitsOp1(OpTest): class TestSigmoidCrossEntropyWithLogitsOp2(OpTest): - '''Test sigmoid_cross_entropy_with_logit_op with probabalistic labels - ''' + """Test sigmoid_cross_entropy_with_logit_op with probabalistic label + """ def setUp(self): self.op_type = "sigmoid_cross_entropy_with_logits" @@ -47,16 +48,16 @@ class TestSigmoidCrossEntropyWithLogitsOp2(OpTest): 'X': logit( np.random.uniform(0, 1, (batch_size, num_classes)) .astype("float32")), - 'Labels': np.random.uniform(0, 1, (batch_size, num_classes)) + 'Label': np.random.uniform(0, 1, (batch_size, num_classes)) .astype("float32") } # Fw Pass is implemented as elementwise sigmoid followed by # elementwise logistic loss - # Labels * -log(sigmoid(X)) + (1 - labels) * -log(1 - sigmoid(X)) + # Label * -log(sigmoid(X)) + (1 - label) * -log(1 - sigmoid(X)) sigmoid_X = expit(self.inputs['X']) - term1 = self.inputs['Labels'] * np.log(sigmoid_X) - term2 = (1 - self.inputs['Labels']) * np.log(1 - sigmoid_X) + term1 = self.inputs['Label'] * np.log(sigmoid_X) + term2 = (1 - self.inputs['Label']) * np.log(1 - sigmoid_X) self.outputs = {'Out': -term1 - term2} def test_check_output(self): @@ -64,3 +65,7 @@ class TestSigmoidCrossEntropyWithLogitsOp2(OpTest): def test_check_grad(self): self.check_grad(['X'], 'Out') + + +if __name__ == '__main__': + unittest.main() -- GitLab