未验证 提交 449903de 编写于 作者: L lilong12 提交者: GitHub

add paddle.broadcast_to api which is a alias of paddle.expand (#28706)

* update, test=develop
上级 da71173b
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
import paddle
paddle.enable_static()
class TestBroadcastToError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
x1 = fluid.create_lod_tensor(
np.array([[-1]]), [[1]], fluid.CPUPlace())
shape = [2, 2]
self.assertRaises(TypeError, paddle.tensor.broadcast_to, x1, shape)
x2 = fluid.layers.data(name='x2', shape=[4], dtype="uint8")
self.assertRaises(TypeError, paddle.tensor.broadcast_to, x2, shape)
x3 = fluid.layers.data(name='x3', shape=[4], dtype="bool")
x3.stop_gradient = False
self.assertRaises(ValueError, paddle.tensor.broadcast_to, x3, shape)
# Test python API
class TestBroadcastToAPI(unittest.TestCase):
def test_api(self):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(
name='x', shape=[12, 14], append_batch_size=False, dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 12)
expand_shape = fluid.layers.data(
name="expand_shape",
shape=[2],
append_batch_size=False,
dtype="int32")
out_1 = paddle.broadcast_to(x, shape=[12, 14])
out_2 = paddle.broadcast_to(x, shape=[positive_2, 14])
out_3 = paddle.broadcast_to(x, shape=expand_shape)
g0 = fluid.backward.calc_gradient(out_2, x)
exe = fluid.Executor(place=fluid.CPUPlace())
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x": input,
"expand_shape":
np.array([12, 14]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (1, 1)))
assert np.array_equal(res_2, np.tile(input, (1, 1)))
assert np.array_equal(res_3, np.tile(input, (1, 1)))
if __name__ == "__main__":
unittest.main()
...@@ -1204,6 +1204,96 @@ def expand_as(x, y, name=None): ...@@ -1204,6 +1204,96 @@ def expand_as(x, y, name=None):
return out return out
def broadcast_to(x, shape, name=None):
"""
Broadcast the input tensor to a given shape.
Both the number of dimensions of ``x`` and the number of elements in ``shape`` should be less than or equal to 6. The dimension to broadcast to must have a value 1.
Args:
x (Tensor): The input tensor, its data type is bool, float32, float64, int32 or int64.
shape (list|tuple|Tensor): The result shape after broadcasting. The data type is int32. If shape is a list or tuple, all its elements
should be integers or 1-D Tensors with the data type int32. If shape is a Tensor, it should be an 1-D Tensor with the data type int32.
The value -1 in shape means keeping the corresponding dimension unchanged.
name (str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` .
Returns:
N-D Tensor: A Tensor with the given shape. The data type is the same as ``x``.
Examples:
.. code-block:: python
import paddle
data = paddle.to_tensor([1, 2, 3], dtype='int32')
out = paddle.broadcast_to(data, shape=[2, 3])
print(out)
# [[1, 2, 3], [1, 2, 3]]
"""
if in_dygraph_mode():
return core.ops.expand_v2(x, 'shape', shape)
if isinstance(shape, Variable):
assert len(shape.shape) == 1, ('shape must be an 1-D Tensor.')
else:
for elem in shape:
if isinstance(elem, Variable):
assert len(elem.shape) == 1, (
'Elements in shape must be 1-D Tensors or integers.')
else:
if six.PY3:
type_tuple = (int, np.int32, np.int64)
elif six.PY2:
type_tuple = (int, long, np.int32, np.int64)
assert isinstance(elem, type_tuple), (
'Elements in shape must be 1-D Tensors or integers.')
check_variable_and_dtype(x, 'x',
['bool', 'float32', 'float64', 'int32', 'int64'],
'broadcast_to')
check_type(shape, 'shape', (list, tuple, Variable), 'broadcast_to')
if convert_dtype(x.dtype) == 'bool' and x.stop_gradient == False:
raise ValueError(
"When the data type of input 'x' for broadcast_to is bool, "
"you must set its stop_gradient to be False by "
"some_var.stop_gradient = True, supporting "
"some_var as the input.")
inputs = {"X": [x]}
attrs = {}
helper = LayerHelper('expand', **locals())
def get_attr_expand_shape(list_expand_shape):
attrs_expand_shape = []
for idx, shape in enumerate(list_expand_shape):
if isinstance(shape, Variable):
attrs_expand_shape.append(-1)
else:
attrs_expand_shape.append(shape)
assert shape > 0 or shape == -1, (
"All elements in shape of broadcast_to must be positive or -1."
)
return attrs_expand_shape
if isinstance(shape, Variable):
shape.stop_gradient = True
inputs['Shape'] = shape
elif isinstance(shape, (list, tuple)):
attrs['shape'] = get_attr_expand_shape(shape)
if utils._contain_var(shape):
inputs['expand_shapes_tensor'] = utils._convert_to_tensor_list(
shape)
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='expand_v2', inputs=inputs, outputs={'Out': out}, attrs=attrs)
return out
def expand(x, shape, name=None): def expand(x, shape, name=None):
""" """
...@@ -1227,10 +1317,9 @@ def expand(x, shape, name=None): ...@@ -1227,10 +1317,9 @@ def expand(x, shape, name=None):
import paddle import paddle
paddle.disable_static()
data = paddle.to_tensor([1, 2, 3], dtype='int32') data = paddle.to_tensor([1, 2, 3], dtype='int32')
out = paddle.expand(data, shape=[2, 3]) out = paddle.expand(data, shape=[2, 3])
out = out.numpy() print(out)
# [[1, 2, 3], [1, 2, 3]] # [[1, 2, 3], [1, 2, 3]]
""" """
if in_dygraph_mode(): if in_dygraph_mode():
...@@ -1292,9 +1381,6 @@ def expand(x, shape, name=None): ...@@ -1292,9 +1381,6 @@ def expand(x, shape, name=None):
return out return out
broadcast_to = expand
def reshape(x, shape, name=None): def reshape(x, shape, name=None):
""" """
This operator changes the shape of ``x`` without changing its data. This operator changes the shape of ``x`` without changing its data.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册