未验证 提交 c7e0a8be 编写于 作者: L Leo Chen 提交者: GitHub

Remove inplace argument when calling nn.reshape() (#27376)

* remove inplace argument

* fix sample code

* fix sample code
上级 26b61691
......@@ -416,7 +416,7 @@ class DataParallel(layers.Layer):
g_var_shapes.append(g_var.shape)
flattened_vars.append(
nn.reshape(
x=g_var, shape=[np.prod(g_var.shape)], inplace=True))
x=g_var, shape=[np.prod(g_var.shape)]))
coalesced_grad = nn.concat(flattened_vars)
coalesced_grads_and_grad_vars.append(
[coalesced_grad, grad_vars, g_var_shapes])
......
......@@ -1755,7 +1755,7 @@ def npair_loss(anchor, positive, labels, l2_reg=0.002):
Beta = 0.25
batch_size = labels.shape[0]
labels = nn.reshape(labels, shape=[batch_size, 1], inplace=True)
labels = nn.reshape(labels, shape=[batch_size, 1])
labels = nn.expand(labels, expand_times=[1, batch_size])
labels = equal(labels, nn.transpose(labels, perm=[1, 0])).astype('float32')
......
......@@ -6102,14 +6102,16 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=False, name=None):
Examples:
.. code-block:: python
import paddle
import paddle.fluid as fluid
paddle.enable_static()
# example 1:
# attr shape is a list which doesn't contain Tensors.
data_1 = fluid.data(
name='data_1', shape=[2, 4, 6], dtype='float32')
reshaped_1 = fluid.layers.reshape(
x=data_1, shape=[-1, 0, 3, 2], inplace=True)
x=data_1, shape=[-1, 0, 3, 2])
# the shape of reshaped_1 is [2,4,3,2].
# example 2:
......
......@@ -363,7 +363,7 @@ def roll(x, shifts, axis=None, name=None):
outputs={'Out': out},
attrs={'axis': axis,
'shifts': shifts})
out = layers.reshape(out, shape=origin_shape, inplace=True)
out = layers.reshape(out, shape=origin_shape)
return out
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册