Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
05fde3d2
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
05fde3d2
编写于
5月 11, 2020
作者:
Y
yangzhenzhang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add slice shape for param info
上级
95d4665d
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
12 addition
and
11 deletion
+12
-11
mindspore/ccsrc/parallel/graph_util/get_parallel_info.cc
mindspore/ccsrc/parallel/graph_util/get_parallel_info.cc
+2
-1
mindspore/parallel/_tensor.py
mindspore/parallel/_tensor.py
+8
-8
tests/ut/python/parallel/test_get_parameter_layout.py
tests/ut/python/parallel/test_get_parameter_layout.py
+2
-2
未找到文件。
mindspore/ccsrc/parallel/graph_util/get_parallel_info.cc
浏览文件 @
05fde3d2
...
...
@@ -42,7 +42,8 @@ py::dict GetParameterLayout(const FuncGraphPtr &graph) {
}
else
{
auto
device_arrangement
=
tensor_layout
->
device_arrangement
().
array
();
auto
tensor_map
=
tensor_layout
->
tensor_map
().
array
();
std
::
pair
<
std
::
vector
<
int32_t
>
,
std
::
vector
<
int32_t
>>
layout
(
device_arrangement
,
tensor_map
);
auto
slice_shape
=
tensor_layout
->
slice_shape
().
array
();
std
::
vector
<
std
::
vector
<
int32_t
>>
layout
=
{
device_arrangement
,
tensor_map
,
slice_shape
};
dict
[
py
::
str
(
name
)]
=
layout
;
MS_LOG
(
INFO
)
<<
"GetParameterLayout name = "
<<
name
<<
", layout "
<<
tensor_layout
->
ToString
();
}
...
...
mindspore/parallel/_tensor.py
浏览文件 @
05fde3d2
...
...
@@ -203,19 +203,19 @@ def _load_tensor_by_layout(tensor, layout):
Args:
tensor (Tensor): The input tensor.
layout (
tuple
): The tensor layout in auto parallel.
layout (
list
): The tensor layout in auto parallel.
Returns:
Tensor, the sliced tensor.
.
Tensor, the sliced tensor.
Raises:
TypeError: If layout is not
tuple
.
ValueError: If the length of layout is not
2
.
TypeError: If layout is not
list
.
ValueError: If the length of layout is not
3
.
"""
if
not
isinstance
(
layout
,
tuple
):
raise
TypeError
(
"
layout should be tuple
! layout is {}"
.
format
(
layout
))
if
len
(
layout
)
!=
2
:
raise
ValueError
(
"The length of layout must be
2
! layout is {}"
.
format
(
layout
))
if
not
isinstance
(
layout
,
list
):
raise
TypeError
(
"
The layout should be list
! layout is {}"
.
format
(
layout
))
if
len
(
layout
)
!=
3
:
raise
ValueError
(
"The length of layout must be
3
! layout is {}"
.
format
(
layout
))
dev_mat
=
layout
[
0
]
tensor_map
=
layout
[
1
]
if
tensor
.
size
()
==
1
:
...
...
tests/ut/python/parallel/test_get_parameter_layout.py
浏览文件 @
05fde3d2
...
...
@@ -48,8 +48,8 @@ def test_get_parameter_layout():
net
.
set_auto_parallel
()
exe
=
me
.
_executor
exe
.
compile
(
net
,
x
,
auto_parallel_mode
=
True
)
x_layout
=
([
2
,
4
],
[
1
,
-
1
])
# device_arrangement = [2, 4], tensor_map = [1, -1]
weight_layout
=
([
2
,
4
],
[
0
,
-
1
])
# device_arrangement = [2, 4], tensor_map = [0, -1]
x_layout
=
[[
2
,
4
],
[
1
,
-
1
],
[
16
,
32
]]
# device_arrangement = [2, 4], tensor_map = [1, -1]
weight_layout
=
[[
2
,
4
],
[
0
,
-
1
],
[
16
,
32
]]
# device_arrangement = [2, 4], tensor_map = [0, -1]
expect_dict
=
{
'x'
:
x_layout
,
'w1'
:
weight_layout
}
# to be resovled: static local variable count_p is used in step_parallel.cc, it needs to be reset between each ut
assert
(
net
.
parameter_layout_dict
==
expect_dict
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录