From dfb841ad5a8bda7f9e8968a2bfba1d43f51420ef Mon Sep 17 00:00:00 2001 From: guosheng Date: Fri, 19 Oct 2018 18:01:16 +0800 Subject: [PATCH] Make reshape_op reuse input. test=develop --- paddle/fluid/API.spec | 2 +- python/paddle/fluid/layers/nn.py | 23 +++++++++++++---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 850ccbfb397..e3776762f94 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -107,7 +107,7 @@ paddle.fluid.layers.softmax_with_cross_entropy ArgSpec(args=['logits', 'label', paddle.fluid.layers.smooth_l1 ArgSpec(args=['x', 'y', 'inside_weight', 'outside_weight', 'sigma'], varargs=None, keywords=None, defaults=(None, None, None)) paddle.fluid.layers.one_hot ArgSpec(args=['input', 'depth'], varargs=None, keywords=None, defaults=None) paddle.fluid.layers.autoincreased_step_counter ArgSpec(args=['counter_name', 'begin', 'step'], varargs=None, keywords=None, defaults=(None, 1, 1)) -paddle.fluid.layers.reshape ArgSpec(args=['x', 'shape', 'actual_shape', 'act', 'inplace', 'name'], varargs=None, keywords=None, defaults=(None, None, True, None)) +paddle.fluid.layers.reshape ArgSpec(args=['x', 'shape', 'actual_shape', 'inplace', 'name'], varargs=None, keywords=None, defaults=(None, False, None)) paddle.fluid.layers.squeeze ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.layers.unsqueeze ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)) paddle.fluid.layers.lod_reset ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 58c9ce56bf6..3ce0126c838 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -4830,7 +4830,7 @@ def autoincreased_step_counter(counter_name=None, begin=1, step=1): return counter -def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None): +def reshape(x, shape, actual_shape=None, inplace=False, name=None): """ Gives a new shape to the input Tensor without changing its data. @@ -4878,15 +4878,18 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None): :attr:`shape` specifying shape. That is to say :attr:`actual_shape` has a higher priority than :attr:`shape`. - act (str): The non-linear activation to be applied to output variable. - inplace(bool): If this flag is set true, the output - shares data with input without copying, otherwise - a new output tensor is created - whose data is copied from input x. + inplace(bool): If this flag is set true, reuse the input :attr:`x` as + output, which will change the shape of variable :attr:`x`. + Otherwise, preserve the shape :attr:`x` and return a new + output tensor variable whose data is copied from input x + but reshaped. Though setting to :attr:`True` will be more + efficient, :attr:`False` is suggested when :attr:`x` are + used in multiple operators. name (str): The name of this layer. It is optional. Returns: - Variable: The output tensor. + Variable: The reshaped tensor variable. It is a new tensor variable if \ + if :attr:`inplace` is :attr:`False`, otherwise it is :attr:`x`. Raises: TypeError: if actual_shape is neither Variable nor None. @@ -4897,7 +4900,7 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None): data = fluid.layers.data( name='data', shape=[2, 4, 6], dtype='float32') reshaped = fluid.layers.reshape( - x=data, shape=[-1, 0, 3, 2], act='tanh', inplace=True) + x=data, shape=[-1, 0, 3, 2], inplace=True) """ if not (isinstance(shape, list) or isinstance(shape, tuple)): @@ -4924,8 +4927,8 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None): "except one unknown dimension.") helper = LayerHelper("reshape2", **locals()) - out = helper.create_tmp_variable(dtype=x.dtype) x_shape = helper.create_tmp_variable(dtype=x.dtype) + out = x if inplace else helper.create_tmp_variable(dtype=x.dtype) helper.append_op( type="reshape2", inputs=inputs, @@ -4933,7 +4936,7 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None): outputs={"Out": out, "XShape": x_shape}) - return helper.append_activation(out) + return out def squeeze(input, axes, name=None): -- GitLab