From 27df3a9f2b5a7597faa528453f19cd095a7440d6 Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Tue, 7 Aug 2018 17:02:31 +0800 Subject: [PATCH] make cross_entropy_op supporting tensors --- paddle/fluid/framework/tensor.cc | 1 + paddle/fluid/operators/cross_entropy_op.h | 15 ++- .../tests/unittests/test_cross_entropy_op.py | 102 ++++++++++++++++++ 3 files changed, 115 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/framework/tensor.cc b/paddle/fluid/framework/tensor.cc index c7286dacf..56bb9142d 100644 --- a/paddle/fluid/framework/tensor.cc +++ b/paddle/fluid/framework/tensor.cc @@ -112,5 +112,6 @@ Tensor& Tensor::Resize(const DDim& dims) { const DDim& Tensor::dims() const { return dims_; } int64_t Tensor::numel() const { return product(dims_); } + } // namespace framework } // namespace paddle diff --git a/paddle/fluid/operators/cross_entropy_op.h b/paddle/fluid/operators/cross_entropy_op.h index 19a2aec92..e26c85ad9 100644 --- a/paddle/fluid/operators/cross_entropy_op.h +++ b/paddle/fluid/operators/cross_entropy_op.h @@ -33,8 +33,14 @@ class CrossEntropyOpKernel : public framework::OpKernel { auto* y = ctx.Output("Y"); y->mutable_data(ctx.GetPlace()); + int rank = x->dims().size(); + Tensor x_2d = rank > 2 ? framework::ReshapeToMatrix(*x, rank - 1) : *x; + Tensor labels_2d = + rank > 2 ? framework::ReshapeToMatrix(*labels, rank - 1) : *labels; + Tensor y_2d = rank > 2 ? framework::ReshapeToMatrix(*y, rank - 1) : *y; + math::CrossEntropyFunctor()( - ctx.template device_context(), y, x, labels, + ctx.template device_context(), &y_2d, &x_2d, &labels_2d, ctx.Attr("soft_label")); } }; @@ -98,9 +104,12 @@ class CrossEntropyGradientOpKernel : public framework::OpKernel { auto* dy = ctx.Input(framework::GradVarName("Y")); auto* label = ctx.Input("Label"); auto* dx = ctx.Output(framework::GradVarName("X")); - auto* dx_data = dx->mutable_data(ctx.GetPlace()); + T* dx_data = dx->mutable_data(ctx.GetPlace()); - int64_t class_num = x->dims()[1]; + // Following computation only depends on the last dimension size. So it's + // unnecessary to convert tensors to 2-D views. + int rank = x->dims().size(); + int64_t class_num = x->dims()[rank - 1]; if (ctx.Attr("soft_label")) { XeSoftlabelGradFunctor functor(dx_data, dy->data(), x->data(), label->data(), diff --git a/python/paddle/fluid/tests/unittests/test_cross_entropy_op.py b/python/paddle/fluid/tests/unittests/test_cross_entropy_op.py index c5b9e92d6..86ac15932 100644 --- a/python/paddle/fluid/tests/unittests/test_cross_entropy_op.py +++ b/python/paddle/fluid/tests/unittests/test_cross_entropy_op.py @@ -105,5 +105,107 @@ class TestCrossEntropyOp3(OpTest): ["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001) +class TestCrossEntropyOp4(OpTest): + """Test high rank tensor cross-entropy with discrete one-hot labels. + """ + + def setUp(self): + self.op_type = "cross_entropy" + shape = [10, 2, 4] + ins_num = np.prod(np.array(shape)) + class_num = 10 + + X_2d = randomize_probability(ins_num, class_num, dtype='float64') + + label_2d = np.random.randint(0, class_num, (ins_num, 1), dtype="int64") + cross_entropy_2d = np.asmatrix( + [[-np.log(X_2d[i][label_2d[i][0]])] for i in range(X_2d.shape[0])], + dtype="float64") + + X = X_2d.reshape(shape + [class_num]) + label = label_2d.reshape(shape + [1]) + cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1]) + + self.inputs = {"X": X, "Label": label} + self.outputs = {"Y": cross_entropy} + self.attrs = {"soft_label": False} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(["X"], "Y", numeric_grad_delta=0.001) + + +class TestCrossEntropyOp5(OpTest): + """Test high rank tensor cross-entropy with vectorized soft labels. + """ + + def setUp(self): + self.op_type = "cross_entropy" + shape = [4, 3] + ins_num = np.prod(np.array(shape)) + class_num = 37 + + X_2d = randomize_probability(ins_num, class_num) + label_2d = np.random.uniform(0.1, 1.0, + [ins_num, class_num]).astype("float32") + label_2d /= label_2d.sum(axis=1, keepdims=True) + cross_entropy_2d = (-label_2d * np.log(X_2d)).sum( + axis=1, keepdims=True).astype("float32") + + X = X_2d.reshape(shape + [class_num]) + label = label_2d.reshape(shape + [class_num]) + cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1]) + + self.inputs = {"X": X, "Label": label} + self.outputs = {"Y": cross_entropy} + self.attrs = {"soft_label": True} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad( + ["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001) + + +class TestCrossEntropyOp6(OpTest): + """Test high rank tensor cross-entropy with vectorized one-hot representation of labels. + """ + + def setUp(self): + self.op_type = "cross_entropy" + shape = [4, 3, 2] + ins_num = np.prod(np.array(shape)) + class_num = 17 + + X_2d = randomize_probability(ins_num, class_num) + label_index_2d = np.random.randint( + 0, class_num, (ins_num), dtype="int32") + label_2d = np.zeros(X_2d.shape) + label_2d[np.arange(ins_num), label_index_2d] = 1 + + cross_entropy_2d = np.asmatrix( + [[-np.log(X_2d[i][label_index_2d[i]])] + for i in range(X_2d.shape[0])], + dtype="float32") + + X = X_2d.reshape(shape + [class_num]) + label = label_2d.reshape(shape + [class_num]) + cross_entropy = np.array(cross_entropy_2d).reshape(shape + [1]) + + self.inputs = {"X": X, "Label": label.astype(np.float32)} + self.outputs = {"Y": cross_entropy} + self.attrs = {"soft_label": True} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad( + ["X"], "Y", max_relative_error=0.05, numeric_grad_delta=0.001) + + if __name__ == "__main__": unittest.main() -- GitLab