From 14f6fa346bfd57205dd465f8d2a602bd107af1c4 Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Wed, 17 Jan 2018 15:27:16 +0800 Subject: [PATCH] make elementwise op support scalar as input Y --- paddle/operators/elementwise_div_op.h | 7 ++++- paddle/operators/elementwise_mul_op.h | 7 ++++- paddle/operators/elementwise_op_function.h | 14 ++++++++++ paddle/operators/elementwise_sub_op.h | 7 ++++- .../v2/fluid/tests/test_elementwise_add_op.py | 26 +++++++++++++------ .../v2/fluid/tests/test_elementwise_div_op.py | 26 +++++++++++++------ .../v2/fluid/tests/test_elementwise_max_op.py | 22 ++++++++++++++++ .../v2/fluid/tests/test_elementwise_min_op.py | 22 ++++++++++++++++ .../v2/fluid/tests/test_elementwise_mul_op.py | 26 +++++++++++++------ .../v2/fluid/tests/test_elementwise_sub_op.py | 26 +++++++++++++------ 10 files changed, 148 insertions(+), 35 deletions(-) diff --git a/paddle/operators/elementwise_div_op.h b/paddle/operators/elementwise_div_op.h index 7783875e24e..ef26cb6c914 100644 --- a/paddle/operators/elementwise_div_op.h +++ b/paddle/operators/elementwise_div_op.h @@ -19,11 +19,16 @@ limitations under the License. */ namespace paddle { namespace operators { +template +struct DivFunctor { + inline HOSTDEVICE T operator()(T a, T b) const { return a / b; } +}; + template class ElementwiseDivKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& ctx) const override { - ElementwiseCompute(ctx); + ElementwiseComputeEx, DeviceContext, T>(ctx); } }; diff --git a/paddle/operators/elementwise_mul_op.h b/paddle/operators/elementwise_mul_op.h index 0e6559eacc0..4b86b00b5a0 100644 --- a/paddle/operators/elementwise_mul_op.h +++ b/paddle/operators/elementwise_mul_op.h @@ -18,11 +18,16 @@ limitations under the License. */ namespace paddle { namespace operators { +template +struct MulFunctor { + inline HOSTDEVICE T operator()(T a, T b) const { return a * b; } +}; + template class ElementwiseMulKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& ctx) const override { - ElementwiseCompute(ctx); + ElementwiseComputeEx, DeviceContext, T>(ctx); } }; diff --git a/paddle/operators/elementwise_op_function.h b/paddle/operators/elementwise_op_function.h index be11d5cc9de..db5d30c1af2 100644 --- a/paddle/operators/elementwise_op_function.h +++ b/paddle/operators/elementwise_op_function.h @@ -340,6 +340,13 @@ void ElementwiseGradCompute(const framework::ExecutionContext& ctx) { return; } + if (y_dims.size() == 1 && y_dims[0] == 1) { + // y is a scalar + auto extended_dims = framework::vectorize(x_dims); + extended_dims.push_back(1); + x_dims = framework::make_ddim(extended_dims); + } + int axis = ctx.Attr("axis"); axis = (axis == -1 ? x_dims.size() - y_dims.size() : axis); @@ -378,6 +385,13 @@ void ElementwiseComputeEx(const framework::ExecutionContext& ctx) { return; } + if (y_dims.size() == 1 && y_dims[0] == 1) { + // y is a scalar + auto extended_dims = framework::vectorize(x_dims); + extended_dims.push_back(1); + x_dims = framework::make_ddim(extended_dims); + } + int axis = ctx.Attr("axis"); axis = (axis == -1 ? x_dims.size() - y_dims.size() : axis); PADDLE_ENFORCE(axis >= 0 && axis < x_dims.size(), diff --git a/paddle/operators/elementwise_sub_op.h b/paddle/operators/elementwise_sub_op.h index 347e92f87c7..a2aca793026 100644 --- a/paddle/operators/elementwise_sub_op.h +++ b/paddle/operators/elementwise_sub_op.h @@ -18,11 +18,16 @@ limitations under the License. */ namespace paddle { namespace operators { +template +struct SubFunctor { + inline HOSTDEVICE T operator()(T a, T b) const { return a - b; } +}; + template class ElementwiseSubKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& ctx) const override { - ElementwiseCompute(ctx); + ElementwiseComputeEx, DeviceContext, T>(ctx); } }; diff --git a/python/paddle/v2/fluid/tests/test_elementwise_add_op.py b/python/paddle/v2/fluid/tests/test_elementwise_add_op.py index 1e88231877f..3564772fb52 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_add_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_add_op.py @@ -1,16 +1,16 @@ # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. # -#Licensed under the Apache License, Version 2.0 (the "License"); -#you may not use this file except in compliance with the License. -#You may obtain a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # -#Unless required by applicable law or agreed to in writing, software -#distributed under the License is distributed on an "AS IS" BASIS, -#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#See the License for the specific language governing permissions and -#limitations under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -40,6 +40,16 @@ class TestElementwiseOp(OpTest): ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')) +class TestElementwiseAddOp_scalar(TestElementwiseOp): + def setUp(self): + self.op_type = "elementwise_add" + self.inputs = { + 'X': np.random.rand(2, 3, 4).astype(np.float32), + 'Y': np.random.rand(1).astype(np.float32) + } + self.outputs = {'Out': self.inputs['X'] + self.inputs['Y']} + + class TestElementwiseAddOp_Vector(TestElementwiseOp): def setUp(self): self.op_type = "elementwise_add" diff --git a/python/paddle/v2/fluid/tests/test_elementwise_div_op.py b/python/paddle/v2/fluid/tests/test_elementwise_div_op.py index fbabc79be24..77b113af769 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_div_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_div_op.py @@ -1,16 +1,16 @@ # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. # -#Licensed under the Apache License, Version 2.0 (the "License"); -#you may not use this file except in compliance with the License. -#You may obtain a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # -#Unless required by applicable law or agreed to in writing, software -#distributed under the License is distributed on an "AS IS" BASIS, -#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#See the License for the specific language governing permissions and -#limitations under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -45,6 +45,16 @@ class ElementwiseDivOp(OpTest): ['X'], 'Out', max_relative_error=0.05, no_grad_set=set('Y')) +class TestElementwiseDivOp_scalar(ElementwiseDivOp): + def setUp(self): + self.op_type = "elementwise_div" + self.inputs = { + 'X': np.random.uniform(0.1, 1, [2, 3, 4]).astype(np.float32), + 'Y': np.random.uniform(0.1, 1, [1]).astype(np.float32) + } + self.outputs = {'Out': self.inputs['X'] / self.inputs['Y']} + + class TestElementwiseDivOp_Vector(ElementwiseDivOp): def setUp(self): self.op_type = "elementwise_div" diff --git a/python/paddle/v2/fluid/tests/test_elementwise_max_op.py b/python/paddle/v2/fluid/tests/test_elementwise_max_op.py index 52bd123d802..9526f0199bf 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_max_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_max_op.py @@ -1,3 +1,16 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. +# +#Licensed under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -30,6 +43,15 @@ class TestElementwiseOp(OpTest): ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')) +class TestElementwiseMaxOp_scalar(TestElementwiseOp): + def setUp(self): + self.op_type = "elementwise_max" + x = np.random.random_integers(-5, 5, [2, 3, 4]).astype("float32") + y = np.array([0.5]).astype("float32") + self.inputs = {'X': x, 'Y': y} + self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])} + + class TestElementwiseMaxOp_Vector(TestElementwiseOp): def setUp(self): self.op_type = "elementwise_max" diff --git a/python/paddle/v2/fluid/tests/test_elementwise_min_op.py b/python/paddle/v2/fluid/tests/test_elementwise_min_op.py index eeafab5b188..b9007282335 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_min_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_min_op.py @@ -1,3 +1,16 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. +# +#Licensed under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -30,6 +43,15 @@ class TestElementwiseOp(OpTest): ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')) +class TestElementwiseMinOp_scalar(TestElementwiseOp): + def setUp(self): + self.op_type = "elementwise_min" + x = np.random.random_integers(-5, 5, [2, 3, 4]).astype("float32") + y = np.array([0.5]).astype("float32") + self.inputs = {'X': x, 'Y': y} + self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])} + + class TestElementwiseMaxOp_Vector(TestElementwiseOp): def setUp(self): self.op_type = "elementwise_min" diff --git a/python/paddle/v2/fluid/tests/test_elementwise_mul_op.py b/python/paddle/v2/fluid/tests/test_elementwise_mul_op.py index ef3a829abc6..12dfa6599cd 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_mul_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_mul_op.py @@ -1,16 +1,16 @@ # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. # -#Licensed under the Apache License, Version 2.0 (the "License"); -#you may not use this file except in compliance with the License. -#You may obtain a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # -#Unless required by applicable law or agreed to in writing, software -#distributed under the License is distributed on an "AS IS" BASIS, -#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#See the License for the specific language governing permissions and -#limitations under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -38,6 +38,16 @@ class ElementwiseMulOp(OpTest): self.check_grad(['X'], 'Out', no_grad_set=set('Y')) +class TestElementwiseMulOp_scalar(ElementwiseMulOp): + def setUp(self): + self.op_type = "elementwise_mul" + self.inputs = { + 'X': np.random.rand(2, 3, 4).astype(np.float32), + 'Y': np.random.rand(1).astype(np.float32) + } + self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']} + + class TestElementwiseMulOp_Vector(ElementwiseMulOp): def setUp(self): self.op_type = "elementwise_mul" diff --git a/python/paddle/v2/fluid/tests/test_elementwise_sub_op.py b/python/paddle/v2/fluid/tests/test_elementwise_sub_op.py index db24db7b304..cf53d85bbad 100644 --- a/python/paddle/v2/fluid/tests/test_elementwise_sub_op.py +++ b/python/paddle/v2/fluid/tests/test_elementwise_sub_op.py @@ -1,16 +1,16 @@ # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve. # -#Licensed under the Apache License, Version 2.0 (the "License"); -#you may not use this file except in compliance with the License. -#You may obtain a copy of the License at +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # -#Unless required by applicable law or agreed to in writing, software -#distributed under the License is distributed on an "AS IS" BASIS, -#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#See the License for the specific language governing permissions and -#limitations under the License. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import unittest import numpy as np from op_test import OpTest @@ -40,6 +40,16 @@ class TestElementwiseOp(OpTest): ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')) +class TestElementwiseSubOp_scalar(TestElementwiseOp): + def setUp(self): + self.op_type = "elementwise_sub" + self.inputs = { + 'X': np.random.rand(2, 3, 4).astype(np.float32), + 'Y': np.random.rand(1).astype(np.float32) + } + self.outputs = {'Out': self.inputs['X'] - self.inputs['Y']} + + class TestElementwiseSubOp_Vector(TestElementwiseOp): def setUp(self): self.op_type = "elementwise_sub" -- GitLab