From 08b7f17dbbe1f52cad2e0b276d5f2caa314135fc Mon Sep 17 00:00:00 2001 From: wanghuancoder Date: Tue, 4 Jan 2022 09:53:49 +0800 Subject: [PATCH] [Eager] Fix benchmark Performance (#38610) --- python/paddle/fluid/initializer.py | 32 +++++++++++++++--------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/python/paddle/fluid/initializer.py b/python/paddle/fluid/initializer.py index 32e3377e263..5d0b56ed537 100644 --- a/python/paddle/fluid/initializer.py +++ b/python/paddle/fluid/initializer.py @@ -160,9 +160,9 @@ class ConstantInitializer(Initializer): if var.dtype == VarDesc.VarType.FP16: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: # fill constant should set the "str_value" to preserve precision @@ -279,9 +279,9 @@ class UniformInitializer(Initializer): if var.dtype == VarDesc.VarType.FP16: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: op = block.append_op( @@ -382,9 +382,9 @@ class NormalInitializer(Initializer): if var.dtype in [VarDesc.VarType.FP16, VarDesc.VarType.BF16]: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: op = block.append_op( @@ -477,9 +477,9 @@ class TruncatedNormalInitializer(Initializer): if var.dtype in [VarDesc.VarType.FP16, VarDesc.VarType.BF16]: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: op = block.append_op( @@ -617,9 +617,9 @@ class XavierInitializer(Initializer): var.dtype == VarDesc.VarType.BF16 and not self._uniform): var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: if self._uniform: @@ -770,9 +770,9 @@ class MSRAInitializer(Initializer): var.dtype == VarDesc.VarType.BF16 and not self._uniform): var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: if self._uniform: @@ -938,9 +938,9 @@ class BilinearInitializer(Initializer): ]: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: op = block.append_op( @@ -1044,9 +1044,9 @@ class NumpyArrayInitializer(Initializer): if var.dtype in [VarDesc.VarType.FP16, VarDesc.VarType.BF16]: var_tmp = _C_ops.cast(out_var, 'in_dtype', out_var.dtype, 'out_dtype', var.dtype) - var.copy_(var_tmp, True) + var.copy_(var_tmp, False) else: - var.copy_(out_var, True) + var.copy_(out_var, False) return None else: op = block.append_op( -- GitLab