Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
683d29c0
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
683d29c0
编写于
4月 23, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 23, 2020
浏览文件
操作
浏览文件
下载
差异文件
!564 Overlength functions rectification
Merge pull request !564 from YuJianfeng/master
上级
5cf09405
146ac126
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
50 addition
and
34 deletion
+50
-34
mindspore/ccsrc/pre_activate/ascend/ascend_backend_optimization.cc
.../ccsrc/pre_activate/ascend/ascend_backend_optimization.cc
+30
-23
mindspore/ccsrc/pre_activate/ascend/ir_fusion/parameter_and_transop_fusion.cc
...activate/ascend/ir_fusion/parameter_and_transop_fusion.cc
+20
-11
未找到文件。
mindspore/ccsrc/pre_activate/ascend/ascend_backend_optimization.cc
浏览文件 @
683d29c0
...
...
@@ -70,6 +70,35 @@
namespace
mindspore
{
namespace
opt
{
namespace
{
void
AddAscendBackendOptionalIRFusion
(
PassManager
*
ir_fusion_pm
)
{
MS_EXCEPTION_IF_NULL
(
ir_fusion_pm
);
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
SquareSumFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ClipByNormNoDivSquareSumFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambUpdateWithLRRuleFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ConfusionSoftmaxGradRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVWithDecayV1Rule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVWithDecayRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextRightRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambUpdateWithLrV2
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ReshapeTransposeFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TransposeReshapeFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ClipByValueFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
FusedBatchNormFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TopKSplit
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AdamApplyOneWithDecayRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AdamApplyOneFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MomentumLossscaleFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MulAddFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MulAddNFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MatmulBiasaddFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AddnFission
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
GetitemTuple
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TransposeTransDataFusion
>
());
}
}
// namespace
void
RunOpAscendDataLayout
(
const
std
::
shared_ptr
<
session
::
KernelGraph
>
&
kernel_graph
)
{
MS_EXCEPTION_IF_NULL
(
kernel_graph
);
auto
optimizer
=
std
::
make_shared
<
GraphOptimizer
>
();
...
...
@@ -164,29 +193,7 @@ void AscendBackendIRFusionOptimization(const std::shared_ptr<session::KernelGrap
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
BnGradSplit
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AddMemcpyAsync
>
());
if
(
context_ptr
->
ir_fusion_flag
())
{
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
SquareSumFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ClipByNormNoDivSquareSumFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambUpdateWithLRRuleFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ConfusionSoftmaxGradRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVWithDecayV1Rule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextMVWithDecayRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambNextRightRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
LambUpdateWithLrV2
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ReshapeTransposeFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TransposeReshapeFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
ClipByValueFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
FusedBatchNormFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TopKSplit
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AdamApplyOneWithDecayRule
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AdamApplyOneFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MomentumLossscaleFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MulAddFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MulAddNFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
MatmulBiasaddFusion
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
AddnFission
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
GetitemTuple
>
());
ir_fusion_pm
->
AddPass
(
std
::
make_shared
<
TransposeTransDataFusion
>
());
AddAscendBackendOptionalIRFusion
(
ir_fusion_pm
.
get
());
}
if
(
context_ptr
->
enable_task_sink
()
&&
context_ptr
->
loop_sink_flag
()
&&
ConfigManager
::
GetInstance
().
iter_num
()
>
1
)
{
...
...
mindspore/ccsrc/pre_activate/ascend/ir_fusion/parameter_and_transop_fusion.cc
浏览文件 @
683d29c0
...
...
@@ -26,6 +26,7 @@
namespace
mindspore
{
namespace
opt
{
namespace
{
const
AnfNodePtr
ParamTransRoad
(
const
FuncGraphPtr
&
func_graph
,
const
AnfNodePtr
&
node
,
bool
first_flag
,
std
::
vector
<
CNodePtr
>
*
trans_road
)
{
if
(
node
==
nullptr
)
{
...
...
@@ -59,6 +60,24 @@ const AnfNodePtr ParamTransRoad(const FuncGraphPtr &func_graph, const AnfNodePtr
return
nullptr
;
}
kernel
::
KernelBuildInfoPtr
GetKernelBuildInfo
(
const
CNodePtr
&
cast
,
const
string
&
format
,
TypeId
input_type
,
TypeId
output_type
)
{
MS_EXCEPTION_IF_NULL
(
cast
);
auto
kernel_info
=
cast
->
kernel_info
();
MS_EXCEPTION_IF_NULL
(
kernel_info
);
auto
cast_build_info
=
kernel_info
->
select_kernel_build_info
();
MS_EXCEPTION_IF_NULL
(
cast_build_info
);
kernel
::
KernelBuildInfo
::
KernelBuildInfoBuilder
builder
;
builder
.
SetOutputsFormat
({
format
});
builder
.
SetInputsFormat
({
format
});
builder
.
SetInputsDeviceType
({
input_type
});
builder
.
SetOutputsDeviceType
({
output_type
});
builder
.
SetKernelType
(
cast_build_info
->
kernel_type
());
builder
.
SetFusionType
(
cast_build_info
->
fusion_type
());
builder
.
SetProcessor
(
cast_build_info
->
processor
());
return
builder
.
Build
();
}
}
// namespace
bool
ParameterTransOpFusion
::
Run
(
const
FuncGraphPtr
&
func_graph
)
{
if
(
func_graph
==
nullptr
)
{
MS_LOG
(
ERROR
)
<<
"Func graph is nullptr"
;
...
...
@@ -95,17 +114,7 @@ bool ParameterTransOpFusion::Run(const FuncGraphPtr &func_graph) {
auto
param_dtype
=
AnfAlgo
::
GetOutputDeviceDataType
(
final_node
,
0
);
auto
cast
=
trans_road
[
1
];
auto
cast_format
=
AnfAlgo
::
GetOutputFormat
(
cast
,
0
);
auto
cast_build_info
=
cast
->
kernel_info
()
->
select_kernel_build_info
();
kernel
::
KernelBuildInfo
::
KernelBuildInfoBuilder
builder
;
builder
.
SetOutputsFormat
({
format
});
builder
.
SetInputsFormat
({
format
});
builder
.
SetInputsDeviceType
({
param_dtype
});
builder
.
SetOutputsDeviceType
({
dtype
});
builder
.
SetKernelType
(
cast_build_info
->
kernel_type
());
builder
.
SetFusionType
(
cast_build_info
->
fusion_type
());
builder
.
SetProcessor
(
cast_build_info
->
processor
());
AnfAlgo
::
SetSelectKernelBuildInfo
(
builder
.
Build
(),
cast
.
get
());
AnfAlgo
::
SetSelectKernelBuildInfo
(
GetKernelBuildInfo
(
cast
,
format
,
param_dtype
,
dtype
),
cast
.
get
());
if
(
param_format
==
format
&&
param_dtype
!=
dtype
)
{
manager
->
Replace
(
trans_road
[
2
],
final_node
);
manager
->
Replace
(
cur_transop
,
cast
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录