未验证 提交 edd5e9a8 编写于 作者: Y Yuang Liu 提交者: GitHub

fix bug for fused_linear_grad_add and main_grad (#56030)

上级 42e0c6b8
...@@ -55,16 +55,18 @@ class MixPrecisionLayer(nn.Layer): ...@@ -55,16 +55,18 @@ class MixPrecisionLayer(nn.Layer):
), "In main_grad node, param.grad should be None, but find param[{}] has grad.".format( ), "In main_grad node, param.grad should be None, but find param[{}] has grad.".format(
param.name param.name
) )
if param.main_grad is None: if tmp_grad._is_initialized():
param.main_grad = core.eager.Tensor( # Some previous pylayer may return None, should check grad validation.
value=tmp_grad.cast(paddle.float32).value(), if param.main_grad is None:
place=tmp_grad.place, param.main_grad = core.eager.Tensor(
name="main_grad@" + param.name, value=tmp_grad.cast(paddle.float32).value(),
) place=tmp_grad.place,
else: name="main_grad@" + param.name,
param.main_grad.add_(tmp_grad) )
else:
param.main_grad.add_(tmp_grad)
tmp_grad._clear_data() tmp_grad._clear_data()
return None return None
return param_hook return param_hook
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册