提交 16240f85 编写于 作者: Z zhiqiu

remove inplace argument

上级 d4b4357b
......@@ -416,7 +416,7 @@ class DataParallel(layers.Layer):
g_var_shapes.append(g_var.shape)
flattened_vars.append(
nn.reshape(
x=g_var, shape=[np.prod(g_var.shape)], inplace=True))
x=g_var, shape=[np.prod(g_var.shape)]))
coalesced_grad = nn.concat(flattened_vars)
coalesced_grads_and_grad_vars.append(
[coalesced_grad, grad_vars, g_var_shapes])
......
......@@ -1736,7 +1736,7 @@ def npair_loss(anchor, positive, labels, l2_reg=0.002):
Beta = 0.25
batch_size = labels.shape[0]
labels = nn.reshape(labels, shape=[batch_size, 1], inplace=True)
labels = nn.reshape(labels, shape=[batch_size, 1])
labels = nn.expand(labels, expand_times=[1, batch_size])
labels = equal(labels, nn.transpose(labels, perm=[1, 0])).astype('float32')
......
......@@ -6109,7 +6109,7 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=False, name=None):
data_1 = fluid.data(
name='data_1', shape=[2, 4, 6], dtype='float32')
reshaped_1 = fluid.layers.reshape(
x=data_1, shape=[-1, 0, 3, 2], inplace=True)
x=data_1, shape=[-1, 0, 3, 2])
# the shape of reshaped_1 is [2,4,3,2].
# example 2:
......
......@@ -363,7 +363,7 @@ def roll(x, shifts, axis=None, name=None):
outputs={'Out': out},
attrs={'axis': axis,
'shifts': shifts})
out = layers.reshape(out, shape=origin_shape, inplace=True)
out = layers.reshape(out, shape=origin_shape)
return out
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册