Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
fa216697
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
fa216697
编写于
6月 22, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
6月 22, 2020
浏览文件
操作
浏览文件
下载
差异文件
!2446 Fix BackendCommonOptimization order
Merge pull request !2446 from zhoufeng/xiu-ba-ge
上级
4f3ea801
d4de0c5a
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
36 addition
and
21 deletion
+36
-21
mindspore/ccsrc/session/ascend_session.cc
mindspore/ccsrc/session/ascend_session.cc
+28
-17
mindspore/ccsrc/session/ascend_session.h
mindspore/ccsrc/session/ascend_session.h
+1
-0
mindspore/ccsrc/session/session_basic.cc
mindspore/ccsrc/session/session_basic.cc
+5
-3
mindspore/ccsrc/session/session_basic.h
mindspore/ccsrc/session/session_basic.h
+2
-1
未找到文件。
mindspore/ccsrc/session/ascend_session.cc
浏览文件 @
fa216697
...
...
@@ -29,6 +29,7 @@
#include "device/ascend/ascend_kernel_runtime.h"
#include "device/ascend/ascend_device_address.h"
#include "pre_activate/ascend/ascend_backend_optimization.h"
#include "pre_activate/common/common_backend_optimization.h"
#include "device/kernel_adjust.h"
#include "device/ascend/ascend_stream_assign.h"
#include "device/ascend/ascend_label_assign.h"
...
...
@@ -283,36 +284,38 @@ GraphId AscendSession::CompileGraph(const AnfNodePtrList &lst, const AnfNodePtrL
GraphId
AscendSession
::
CompileGraph
(
NotNull
<
FuncGraphPtr
>
func_graph
)
{
MS_LOG
(
INFO
)
<<
"start"
;
auto
graph
=
ConstructKernelGraph
(
func_graph
);
std
::
vector
<
KernelGraphPtr
>
all_graphs
;
auto
root_graph
=
ConstructKernelGraph
(
func_graph
,
&
all_graphs
);
BackendOptimization
(
all_graphs
);
// split switch
SplitGraphs
(
NOT_NULL
(
graph
));
SplitGraphs
(
NOT_NULL
(
root_
graph
));
// insert goto labels and label_sets
LinkChildGraphs
(
NOT_NULL
(
graph
));
LinkChildGraphs
(
NOT_NULL
(
root_
graph
));
// resource initialize
InitRuntimeResource
();
// assign label
AssignLabel
(
NOT_NULL
(
graph
));
// recurse compile child graph
AssignLabel
(
NOT_NULL
(
root_
graph
));
// recurse compile child
root_
graph
std
::
set
<
KernelGraphPtr
>
memo
;
RecurseCompileGraph
(
NOT_NULL
(
graph
),
NOT_NULL
(
&
memo
));
// root graph valiate,include genearte execute order and so on
RootGraphExecutorValidate
(
NOT_NULL
(
graph
));
RecurseCompileGraph
(
NOT_NULL
(
root_
graph
),
NOT_NULL
(
&
memo
));
// root
root_
graph valiate,include genearte execute order and so on
RootGraphExecutorValidate
(
NOT_NULL
(
root_
graph
));
// adjust kernel
AdjustKernel
(
graph
);
AdjustKernel
(
root_
graph
);
// assign stream
AssignStream
(
graph
);
AssignStream
(
root_
graph
);
// insert profiling point
device
::
KernelAdjust
::
GetInstance
().
Profiling
(
NOT_NULL
(
graph
.
get
()));
device
::
KernelAdjust
::
GetInstance
().
Profiling
(
NOT_NULL
(
root_
graph
.
get
()));
// build kernel
BuildKernel
(
graph
);
BuildKernel
(
root_
graph
);
// alloc mem
MemoryAlloc
(
graph
.
get
());
MemoryAlloc
(
root_
graph
.
get
());
// task generate
GenerateTaskInfo
(
graph
);
GenerateTaskInfo
(
root_
graph
);
// load task into device
LoadTask
(
graph
);
// return the graph id to backend
auto
graph_id
=
graph
->
graph_id
();
LoadTask
(
root_
graph
);
// return the
root_
graph id to backend
auto
graph_id
=
root_
graph
->
graph_id
();
return
graph_id
;
}
...
...
@@ -1569,6 +1572,14 @@ std::vector<AnfNodePtr> AscendSession::ConstructSplitedGraph(const KernelGraphPt
return
call_node_inputs
;
}
void
AscendSession
::
BackendOptimization
(
const
std
::
vector
<
KernelGraphPtr
>
&
all_graphs
)
{
MS_LOG
(
INFO
)
<<
"Start BackendCommonOptimization"
;
for
(
auto
&
graph
:
all_graphs
)
{
opt
::
BackendCommonOptimization
(
graph
);
}
MS_LOG
(
INFO
)
<<
"End."
;
}
void
AscendSession
::
SplitGraphs
(
NotNull
<
KernelGraphPtr
>
root_graph
)
{
std
::
set
<
KernelGraphPtr
>
memo
;
// if root graph output is a call node ,the root graph is condition graph of 'if' sentence
...
...
mindspore/ccsrc/session/ascend_session.h
浏览文件 @
fa216697
...
...
@@ -102,6 +102,7 @@ class AscendSession : public SessionBasic {
void
SplitGraph
(
NotNull
<
KernelGraphPtr
>
graph
,
const
std
::
set
<
PrimitivePtr
>
&
cut_prims
);
// split graphs with recurse from root graph
void
SplitGraphs
(
NotNull
<
KernelGraphPtr
>
root_graph
);
void
BackendOptimization
(
const
std
::
vector
<
KernelGraphPtr
>
&
all_graphs
);
void
LinkChildGraphs
(
NotNull
<
KernelGraphPtr
>
graph
);
void
RootGraphExecutorValidate
(
NotNull
<
KernelGraphPtr
>
graph
);
std
::
vector
<
AnfNodePtr
>
ConstructSplitedGraph
(
const
KernelGraphPtr
&
new_kernel_graph
,
...
...
mindspore/ccsrc/session/session_basic.cc
浏览文件 @
fa216697
...
...
@@ -579,8 +579,10 @@ KernelGraphPtr SessionBasic::ConstructKernelGraph(const AnfNodePtrList &lst, con
return
graph
;
}
std
::
shared_ptr
<
KernelGraph
>
SessionBasic
::
ConstructKernelGraph
(
const
FuncGraphPtr
&
func_graph
)
{
std
::
shared_ptr
<
KernelGraph
>
SessionBasic
::
ConstructKernelGraph
(
const
FuncGraphPtr
&
func_graph
,
std
::
vector
<
KernelGraphPtr
>
*
all_out_graph
)
{
MS_EXCEPTION_IF_NULL
(
func_graph
);
MS_EXCEPTION_IF_NULL
(
all_out_graph
);
auto
node_list
=
TopoSort
(
func_graph
->
get_return
());
auto
graph
=
NewKernelGraph
();
front_backend_graph_map_
[
func_graph
]
=
graph
;
...
...
@@ -607,7 +609,7 @@ std::shared_ptr<KernelGraph> SessionBasic::ConstructKernelGraph(const FuncGraphP
if
(
front_backend_graph_map_
.
find
(
child_graph
)
!=
front_backend_graph_map_
.
end
())
{
is_trace_back
=
true
;
}
else
{
(
void
)
ConstructKernelGraph
(
child_graph
);
(
void
)
ConstructKernelGraph
(
child_graph
,
all_out_graph
);
}
(
void
)
CreateValueNodeKernelGraph
(
node
,
graph
.
get
());
}
...
...
@@ -634,7 +636,7 @@ std::shared_ptr<KernelGraph> SessionBasic::ConstructKernelGraph(const FuncGraphP
if
(
ExistSummaryNode
(
graph
.
get
()))
{
graph
->
set_summary_node_exist
(
true
);
}
opt
::
BackendCommonOptimization
(
graph
);
all_out_graph
->
push_back
(
graph
);
return
graph
;
}
...
...
mindspore/ccsrc/session/session_basic.h
浏览文件 @
fa216697
...
...
@@ -75,7 +75,8 @@ class SessionBasic {
virtual
void
RegisterSummaryCallBackFunc
(
const
CallBackFunc
&
callback
);
std
::
shared_ptr
<
KernelGraph
>
ConstructKernelGraph
(
const
AnfNodePtrList
&
lst
,
const
AnfNodePtrList
&
outputs
);
std
::
shared_ptr
<
KernelGraph
>
ConstructKernelGraph
(
const
FuncGraphPtr
&
func_graph
);
std
::
shared_ptr
<
KernelGraph
>
ConstructKernelGraph
(
const
FuncGraphPtr
&
func_graph
,
std
::
vector
<
KernelGraphPtr
>
*
all_out_graph
);
CNodePtr
CreateNewCNode
(
const
CNodePtr
&
cnode
,
bool
valid_input
,
KernelGraph
*
graph
,
bool
*
from_other_graph
,
std
::
unordered_map
<
AnfNodePtr
,
AnfNodePtr
>
*
other_graph_cnode
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录