From 6e7f0bb284e72cfe7a9ebefe0a96422ad3a734e8 Mon Sep 17 00:00:00 2001 From: Leo Chen Date: Thu, 6 Aug 2020 23:09:54 +0800 Subject: [PATCH] refine paddle.stack (#25305) * refine stack in dygraph mode, test=develop * update stack, test=develop * rename Variable to Tensor, test=develop * add name args to fluid.layers.stack, test=develop * follow comments * update doc * update doc * follow comments --- python/paddle/fluid/layers/nn.py | 13 ++-- python/paddle/tensor/manipulation.py | 89 +++++++++++----------------- 2 files changed, 41 insertions(+), 61 deletions(-) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index a46391452b..1b8df4a098 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -9911,7 +9911,7 @@ def flatten(x, axis=1, name=None): return out -def stack(x, axis=0): +def stack(x, axis=0, name=None): """ This OP stacks all the inputs :code:`x` along axis. @@ -9991,15 +9991,16 @@ def stack(x, axis=0): data = layers.stack(x1) # stack according to axis 0, data.shape=[1, None, 1, 2] """ - - helper = LayerHelper('stack', **locals()) axis = 0 if axis is None else axis - if not isinstance(x, list) and not isinstance(x, tuple): x = [x] + + if in_dygraph_mode(): + return core.ops.stack(x, 'axis', axis) + + helper = LayerHelper('stack', **locals()) out = helper.create_variable_for_type_inference(x[0].dtype) - if not in_dygraph_mode() and \ - x[0].desc.type() == core.VarDesc.VarType.LOD_TENSOR_ARRAY: + if x[0].desc.type() == core.VarDesc.VarType.LOD_TENSOR_ARRAY: assert len(x) == 1, "If the elements of 'x' in stack are Variable(LoDTensorArray), " \ "number of the elements must be 1, but received %s." % len(x) out_index = helper.create_variable_for_type_inference(dtype="int32") diff --git a/python/paddle/tensor/manipulation.py b/python/paddle/tensor/manipulation.py index c23859b03a..07d327a21e 100644 --- a/python/paddle/tensor/manipulation.py +++ b/python/paddle/tensor/manipulation.py @@ -359,13 +359,18 @@ def roll(x, shifts, axis=None, name=None): return out -def stack(x, axis=0, out=None, name=None): +def stack(x, axis=0, name=None): """ :alias_main: paddle.stack - :alias: paddle.stack,paddle.tensor.stack,paddle.tensor.manipulation.stack + :alias: paddle.stack, paddle.tensor.stack, paddle.tensor.manipulation.stack - - This OP stacks all the inputs :code:`x` along axis. + This OP stacks all the input tensors ``x`` along ``axis`` dimemsion. + All tensors must be of the same shape and same dtype. + + For example, given N tensors of shape [A, B], if ``axis == 0``, the shape of stacked + tensor is [N, A, B]; if ``axis == 1``, the shape of stacked + tensor is [A, N, B], etc. + .. code-block:: text @@ -391,7 +396,6 @@ def stack(x, axis=0, out=None, name=None): Case 2: - Input: x[0].shape = [1, 2] x[0].data = [ [1.0 , 2.0 ] ] @@ -402,7 +406,7 @@ def stack(x, axis=0, out=None, name=None): Attrs: - axis = 1 or axis = -2 + axis = 1 or axis = -2 # If axis = -2, axis = axis+ndim(x[0])+1 = -2+2+1 = 1. Output: Out.shape = [1, 3, 2] @@ -411,65 +415,40 @@ def stack(x, axis=0, out=None, name=None): [5.0, 6.0] ] ] Args: - x (Variable|list(Variable)): Input :code:`x` can be a single Tensor, a :code:`list` of Tensors. - If :code:`x` is a :code:`list`, the shapes of all these Tensors - must be the same. Supposing input is N dims - Tensors :math:`[d_0, d_1, ..., d_{n-1}]`, the output is N+1 dims - Tensor :math:`[d_0, d_1, d_{axis-1}, len(x), d_{axis}, ..., d_{n-1}]`. - Support data types: float32, float64, int32, int64. - axis (int, optional): The axis along which all inputs are stacked. ``axis`` range is :math:`[-(R+1), R+1)`. - R is the first tensor of inputs. If ``axis`` < 0, :math:`axis=axis+rank(x[0])+1`. - The default value of axis is 0. - + x (Tensor|list[Tensor]): Input ``x`` can be a single tensor, or a ``list`` of tensors. + If ``x`` is a ``list``, the Tensors in ``x`` + must be of the same shape and dtype. Support data types: float32, float64, int32, int64. + axis (int, optional): The axis along which all inputs are stacked. ``axis`` range is ``[-(R+1), R+1)``, + where ``R`` is the number of dimensions of the first input tensor ``x[0]``. + If ``axis < 0``, ``axis = axis+R+1``. The default value of axis is 0. + name (str, optional): Please refer to :ref:`api_guide_Name`, Default None. + Returns: - Variable: The stacked Tensor, has same data type with input Tensors. Output dim is :math:`rank(x[0])+1`. + Tensor: The stacked tensor with same data type as input. Example: .. code-block:: python - import numpy as np + import paddle - import paddle.fluid as fluid + import numpy as np data1 = np.array([[1.0, 2.0]]) data2 = np.array([[3.0, 4.0]]) data3 = np.array([[5.0, 6.0]]) - with fluid.dygraph.guard(): - x1 = fluid.dygraph.to_variable(data1) - x2 = fluid.dygraph.to_variable(data2) - x3 = fluid.dygraph.to_variable(data3) - result = paddle.stack([x1, x2, x3], axis=0) - # result shape: [3, 1, 2] - # result value: [[[1.0, 2.0]], - # [[3.0, 4.0]], - # [[5.0, 6.0]]] - """ - helper = LayerHelper('stack', **locals()) - axis = 0 if axis is None else axis - - if not isinstance(x, list) and not isinstance(x, tuple): - x = [x] - out = helper.create_variable_for_type_inference(x[0].dtype) - if not in_dygraph_mode() and \ - x[0].desc.type() == core.VarDesc.VarType.LOD_TENSOR_ARRAY: - assert len(x) == 1, "If the elements of 'x' in stack are Variable(LoDTensorArray), " \ - "number of the elements must be 1, but received %s." % len(x) - out_index = helper.create_variable_for_type_inference(dtype="int32") - helper.append_op( - type='tensor_array_to_tensor', - inputs={'X': x[0]}, - outputs={'Out': [out], - 'OutIndex': [out_index]}, - attrs={'axis': axis, - 'use_stack': True}) - else: - helper.append_op( - type='stack', - inputs={'X': x}, - outputs={'Y': out}, - attrs={'axis': axis}) - - return out + paddle.enable_imperative() + x1 = paddle.imperative.to_variable(data1) + x2 = paddle.imperative.to_variable(data2) + x3 = paddle.imperative.to_variable(data3) + + out = paddle.stack([x1, x2, x3], axis=0) + print(out.shape) # [3, 1, 2] + print(out.numpy()) + # [[[1., 2.]], + # [[3., 4.]], + # [[5., 6.]]] + """ + return layers.stack(x, axis, name) def split(x, num_or_sections, axis=0, name=None): -- GitLab