Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
24d1991c
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
24d1991c
编写于
3月 25, 2020
作者:
B
buxue
提交者:
高东海
4月 08, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix bug structure output when there is depend whose first input is constant in outputs
上级
a6f6a376
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
84 addition
and
29 deletion
+84
-29
mindspore/ccsrc/pipeline/pipeline.cc
mindspore/ccsrc/pipeline/pipeline.cc
+57
-24
tests/ut/python/nn/test_structure_output.py
tests/ut/python/nn/test_structure_output.py
+27
-5
未找到文件。
mindspore/ccsrc/pipeline/pipeline.cc
浏览文件 @
24d1991c
...
...
@@ -725,23 +725,15 @@ py::object ExecutorPy::Run(const py::tuple& args, const py::object& phase) {
return
BaseRefToPyData
(
value
);
}
py
::
object
StructureOutput
(
const
AbstractBasePtr
&
output
,
const
py
::
tuple
&
data
,
size_t
*
count
)
{
MS_EXCEPTION_IF_NULL
(
output
);
py
::
object
ExtractGeneralCnodeRet
(
const
AbstractBasePtr
&
cnode_data
,
const
py
::
tuple
&
data
,
size_t
*
count
)
{
MS_EXCEPTION_IF_NULL
(
cnode_data
);
if
(
*
count
>=
data
.
size
())
{
MS_LOG
(
EXCEPTION
)
<<
"The number of elements in the outputs : "
<<
data
.
size
()
<<
" less than the number of elements required. "
;
}
if
(
!
output
->
isa
<
AbstractTuple
>
())
{
ValuePtr
value
=
output
->
BuildValue
();
if
(
value
!=
kAnyValue
)
{
return
ValuePtrToPyData
(
value
);
}
if
(
!
output
->
isa
<
AbstractTensor
>
())
{
MS_LOG
(
EXCEPTION
)
<<
"Output can only be tensor except for constants, but got "
<<
output
->
BuildValue
()
->
ToString
()
<<
"."
;
}
if
(
*
count
>=
data
.
size
())
{
MS_LOG
(
EXCEPTION
)
<<
"The number of elements in the outputs : "
<<
data
.
size
()
<<
" less than the number of elements required. "
;
}
auto
shape
=
output
->
BuildShape
();
if
(
cnode_data
->
isa
<
AbstractTensor
>
())
{
BaseShapePtr
shape
=
cnode_data
->
BuildShape
();
auto
shape_act
=
shape
->
cast
<
abstract
::
ShapePtr
>
()
->
shape
();
Tensor
tensor_exp
=
py
::
cast
<
Tensor
>
(
data
[
*
count
]);
if
(
shape_act
!=
tensor_exp
.
shape
())
{
...
...
@@ -751,16 +743,58 @@ py::object StructureOutput(const AbstractBasePtr& output, const py::tuple& data,
return
data
[(
*
count
)
++
];
}
auto
tuple_output
=
output
->
cast
<
AbstractTuplePtr
>
();
AbstractBasePtrList
elements
=
tuple_output
->
elements
();
size_t
size
=
elements
.
size
();
if
(
!
cnode_data
->
isa
<
AbstractTuple
>
())
{
MS_LOG
(
EXCEPTION
)
<<
"The output of operator in the final anf graph could "
<<
"only be a tensor or a tuple of tensor, but got "
<<
cnode_data
->
BuildValue
()
->
ToString
()
<<
"."
;
}
auto
data_tp
=
cnode_data
->
cast
<
AbstractTuplePtr
>
();
auto
elements
=
data_tp
->
elements
();
size_t
size
=
data_tp
->
size
();
py
::
tuple
tp
=
py
::
tuple
(
size
);
for
(
size_t
i
=
0
;
i
<
size
;
i
++
)
{
tp
[
i
]
=
StructureOutpu
t
(
elements
[
i
],
data
,
count
);
tp
[
i
]
=
ExtractGeneralCnodeRe
t
(
elements
[
i
],
data
,
count
);
}
return
std
::
move
(
tp
);
}
py
::
object
StructureOutput
(
const
AnfNodePtr
&
output_node
,
const
py
::
tuple
&
data
,
size_t
*
count
)
{
MS_EXCEPTION_IF_NULL
(
output_node
);
if
(
output_node
->
isa
<
ValueNode
>
())
{
return
ValuePtrToPyData
(
GetValueNode
(
output_node
));
}
if
(
*
count
>=
data
.
size
())
{
MS_LOG
(
EXCEPTION
)
<<
"The number of elements in the outputs : "
<<
data
.
size
()
<<
" less than the number of elements required. "
;
}
if
(
output_node
->
isa
<
Parameter
>
())
{
return
data
[(
*
count
)
++
];
}
auto
output_c
=
output_node
->
cast
<
CNodePtr
>
();
if
(
output_c
==
nullptr
)
{
MS_LOG
(
EXCEPTION
)
<<
"The final anf graph could only have constant, parameter, and operator, but got "
<<
output_node
->
ToString
();
}
if
(
output_c
->
IsApply
(
prim
::
kPrimMakeTuple
))
{
auto
input_list
=
output_c
->
inputs
();
size_t
size
=
input_list
.
size
();
py
::
tuple
tp
=
py
::
tuple
(
size
-
1
);
for
(
size_t
i
=
1
;
i
<
size
;
i
++
)
{
tp
[
i
-
1
]
=
StructureOutput
(
input_list
[
i
],
data
,
count
);
}
return
std
::
move
(
tp
);
}
if
(
output_c
->
IsApply
(
prim
::
kPrimDepend
))
{
return
StructureOutput
(
output_c
->
input
(
1
),
data
,
count
);
}
return
ExtractGeneralCnodeRet
(
output_c
->
abstract
(),
data
,
count
);
}
std
::
shared_ptr
<
py
::
object
>
DoExecGraph
(
const
FuncGraphPtr
&
graph
,
const
std
::
vector
<
MeTensorPtr
>&
inputs
,
const
std
::
string
&
phase
)
{
std
::
vector
<
GeTensorPtr
>
ge_tensors
=
TransformUtil
::
ConvertInputTensors
(
inputs
,
kOpFormat_NCHW
);
...
...
@@ -806,11 +840,10 @@ std::shared_ptr<py::object> DoExecGraph(const FuncGraphPtr& graph, const std::ve
std
::
shared_ptr
<
py
::
object
>
ret
=
nullptr
;
#ifdef ENABLE_GE
AnfNodePtr
root
=
graph
->
get_return
();
MS_EXCEPTION_IF_NULL
(
root
);
AbstractBasePtr
output
=
root
->
abstract
();
AnfNodePtr
output_node
=
graph
->
get_return
()
->
input
(
1
);
MS_EXCEPTION_IF_NULL
(
output_node
);
size_t
count
=
0
;
py
::
object
oj
=
StructureOutput
(
output
,
outputs
,
&
count
);
py
::
object
oj
=
StructureOutput
(
output
_node
,
outputs
,
&
count
);
ret
=
std
::
make_shared
<
py
::
object
>
(
oj
);
#else
if
(
outputs
.
size
()
==
1
)
{
...
...
tests/ut/python/nn/test_structure_output.py
浏览文件 @
24d1991c
...
...
@@ -236,7 +236,7 @@ def test_soft():
def
__init__
(
self
):
super
(
SoftmaxCrossEntropyWithLogitsNet
,
self
).
__init__
()
self
.
soft
=
P
.
SoftmaxCrossEntropyWithLogits
()
self
.
value
=
(
Tensor
(
np
.
zeros
((
2
,
)).
astype
(
np
.
float32
)),
Tensor
(
np
.
ones
((
2
,
)).
astype
(
np
.
float32
)))
self
.
value
=
(
Tensor
(
np
.
zeros
((
2
,
2
)).
astype
(
np
.
float32
)),
Tensor
(
np
.
ones
((
2
,
2
)).
astype
(
np
.
float32
)))
def
construct
(
self
,
x
,
y
,
z
):
xx
=
x
+
y
...
...
@@ -246,8 +246,30 @@ def test_soft():
ret
=
(
ret
,
self
.
value
)
return
ret
input1
=
Tensor
(
np
.
zeros
((
2
,)).
astype
(
np
.
float32
))
input2
=
Tensor
(
np
.
ones
((
2
,)).
astype
(
np
.
float32
))
input3
=
Tensor
((
np
.
ones
((
2
,
))
+
np
.
ones
((
2
,
))).
astype
(
np
.
float32
))
input1
=
Tensor
(
np
.
zeros
((
2
,
2
)).
astype
(
np
.
float32
))
input2
=
Tensor
(
np
.
ones
((
2
,
2
)).
astype
(
np
.
float32
))
input3
=
Tensor
((
np
.
ones
((
2
,
2
))
+
np
.
ones
((
2
,
2
))).
astype
(
np
.
float32
))
net
=
SoftmaxCrossEntropyWithLogitsNet
()
print
(
net
(
input1
,
input2
,
input3
))
net
(
input1
,
input2
,
input3
)
def
test_const_depend
():
class
ConstDepend
(
Cell
):
def
__init__
(
self
):
super
(
ConstDepend
,
self
).
__init__
()
self
.
value
=
(
Tensor
(
np
.
zeros
((
2
,
3
)).
astype
(
np
.
float32
)),
Tensor
(
np
.
ones
((
2
,
3
)).
astype
(
np
.
float32
)))
self
.
soft
=
P
.
SoftmaxCrossEntropyWithLogits
()
self
.
depend
=
depend
def
construct
(
self
,
x
,
y
,
z
):
ret
=
x
+
y
ret
=
ret
*
z
ret
=
self
.
depend
(
self
.
value
,
ret
)
ret
=
(
ret
,
self
.
soft
(
x
,
y
))
return
ret
input1
=
Tensor
(
np
.
zeros
((
2
,
2
)).
astype
(
np
.
float32
))
input2
=
Tensor
(
np
.
ones
((
2
,
2
)).
astype
(
np
.
float32
))
input3
=
Tensor
((
np
.
ones
((
2
,
2
))
+
np
.
ones
((
2
,
2
))).
astype
(
np
.
float32
))
net
=
ConstDepend
()
net
(
input1
,
input2
,
input3
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录