提交 ce086181 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!1009 Adapt LambNextMVWithDecayV1 changed to LambNextMVWithDecay

Merge pull request !1009 from zhaozhenlong/adapt-lamb-next-mv-ir-fusion
......@@ -69,7 +69,7 @@ static std::map<string, string> tbe_func_adapter_map = {
{"reduce_sum", "reduce_sum_d"},
{"one_hot", "one_hot_d"},
{"sum", "reduce_sum_d"},
{"lamb_next_mv_with_decay_v1", "lamb_next_m_v_with_decay"},
{"lamb_next_mv_with_decay", "lamb_next_m_v_with_decay"},
{"lamb_next_mv", "lamb_next_m_v"},
{"split", "split_d"},
{"resize_nearest_neighbor", "resize_nearest_neighbor_v2_d"},
......
......@@ -31,7 +31,6 @@
#include "pre_activate/ascend/ir_fusion/confusion_softmax_grad_rule.h"
#include "pre_activate/ascend/ir_fusion/lamb_next_mv_rule.h"
#include "pre_activate/ascend/ir_fusion/lamb_next_mv_with_decay_rule.h"
#include "pre_activate/ascend/ir_fusion/lamb_next_mv_with_decay_v1_rule.h"
#include "pre_activate/ascend/ir_fusion/lamb_next_right_rule.h"
#include "pre_activate/ascend/ir_fusion/lamb_update_with_lr_v2.h"
#include "pre_activate/ascend/ir_fusion/layer_norm_beta_gamma_backprop_fusion.h"
......@@ -82,7 +81,6 @@ void AddAscendBackendOptionalIRFusion(PassManager *ir_fusion_pm) {
ir_fusion_pm->AddPass(std::make_shared<ClipByNormNoDivSquareSumFusion>());
ir_fusion_pm->AddPass(std::make_shared<LambUpdateWithLRRuleFusion>());
ir_fusion_pm->AddPass(std::make_shared<ConfusionSoftmaxGradRule>());
ir_fusion_pm->AddPass(std::make_shared<LambNextMVWithDecayV1Rule>());
ir_fusion_pm->AddPass(std::make_shared<LambNextMVRule>());
ir_fusion_pm->AddPass(std::make_shared<LambNextMVWithDecayRule>());
ir_fusion_pm->AddPass(std::make_shared<LambNextRightRule>());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册