diff --git a/mindspore/ccsrc/pre_activate/common/helper.cc b/mindspore/ccsrc/pre_activate/common/helper.cc index 4cda390fbb2c8a1b908a4f7799b0f8d79319cd8a..9be537775e151af8d9bbac0e3eaa7a291f9db8a4 100644 --- a/mindspore/ccsrc/pre_activate/common/helper.cc +++ b/mindspore/ccsrc/pre_activate/common/helper.cc @@ -687,7 +687,7 @@ bool IsSameNode(const EquivPtr &equiv1, const EquivPtr &equiv2, const VarPtr &va MS_EXCEPTION_IF_NULL(equiv1_node); auto equiv2_node = GetAnfNodeByVar(equiv2, var_node); MS_EXCEPTION_IF_NULL(equiv2_node); - return equiv1_node == equiv2_node; + return *equiv1_node == *equiv2_node; } AnfNodePtr GetAnfNodeByVar(const EquivPtr &equiv, const VarPtr &var_node) { diff --git a/mindspore/nn/optim/lamb.py b/mindspore/nn/optim/lamb.py index b4d478f52ab38be605719c9f9dfa124dcd7b3240..a6a38f164a72216362ebb34268fd30264562c71b 100755 --- a/mindspore/nn/optim/lamb.py +++ b/mindspore/nn/optim/lamb.py @@ -180,7 +180,7 @@ class Lamb(Optimizer): beta2=0.999, eps=1e-6, weight_decay=0.0, - decay_filter=lambda x: 'LayerNorm' not in x.name and 'bias' not in x.name): + decay_filter=lambda x: 'layernorm' not in x.name.lower() and 'bias' not in x.name.lower()): super(Lamb, self).__init__(start_learning_rate, params) if self.is_group: diff --git a/mindspore/ops/_grad/grad_math_ops.py b/mindspore/ops/_grad/grad_math_ops.py index f457148d513281b7f553dce20f50eaa3429534c7..8edf6d82f2400562be9407b9edd00b680df5afa7 100755 --- a/mindspore/ops/_grad/grad_math_ops.py +++ b/mindspore/ops/_grad/grad_math_ops.py @@ -191,8 +191,8 @@ def get_bprop_mul(self): mul_func = P.Mul() def bprop(x, y, out, dout): - bc_dx = mul_func(dout, y) - bc_dy = mul_func(dout, x) + bc_dx = mul_func(y, dout) + bc_dy = mul_func(x, dout) return binop_grad_common(x, y, bc_dx, bc_dy) return bprop