Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
50cb2ad9
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
50cb2ad9
编写于
4月 09, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 09, 2020
浏览文件
操作
浏览文件
下载
差异文件
!192 Modify api name Stack -> Pack, Unstack -> Unpack in ME.
Merge pull request !192 from liuxiao/temp
上级
002fd9a4
0f0f83e2
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
64 addition
and
72 deletion
+64
-72
mindspore/ccsrc/transform/convert.cc
mindspore/ccsrc/transform/convert.cc
+4
-4
mindspore/ops/_grad/grad_array_ops.py
mindspore/ops/_grad/grad_array_ops.py
+10
-10
mindspore/ops/operations/__init__.py
mindspore/ops/operations/__init__.py
+3
-3
mindspore/ops/operations/array_ops.py
mindspore/ops/operations/array_ops.py
+31
-39
tests/ut/python/ops/test_ops.py
tests/ut/python/ops/test_ops.py
+16
-16
未找到文件。
mindspore/ccsrc/transform/convert.cc
浏览文件 @
50cb2ad9
...
...
@@ -148,8 +148,8 @@ const char kNameSlice[] = "Slice";
const
char
kNameAddN
[]
=
"AddN"
;
const
char
kNameLess
[]
=
"Less"
;
const
char
kNameGreater
[]
=
"Greater"
;
const
char
kName
Stack
[]
=
"St
ack"
;
const
char
kNameUn
stack
[]
=
"Unst
ack"
;
const
char
kName
Pack
[]
=
"P
ack"
;
const
char
kNameUn
pack
[]
=
"Unp
ack"
;
const
char
kNameMerge
[]
=
"Merge"
;
const
char
kNameGeSwitch
[]
=
"GeSwitch"
;
...
...
@@ -202,8 +202,8 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{
string
(
kNameAvgPool
),
ADPT_DESC
(
AvgPool
)},
{
string
(
kNameMaxPoolWithArgmax
),
ADPT_DESC
(
MaxPoolWithArgmax
)},
{
string
(
kNameTopK
),
ADPT_DESC
(
TopKV2
)},
{
string
(
kName
St
ack
),
ADPT_DESC
(
Pack
)},
{
string
(
kNameUn
st
ack
),
ADPT_DESC
(
Unpack
)},
{
string
(
kName
P
ack
),
ADPT_DESC
(
Pack
)},
{
string
(
kNameUn
p
ack
),
ADPT_DESC
(
Unpack
)},
{
string
(
kNameSplitD
),
ADPT_DESC
(
SplitD
)},
{
string
(
kNameAllReduce
),
ADPT_DESC
(
HcomAllReduce
)},
{
string
(
kNameBroadcast
),
ADPT_DESC
(
HcomBroadcast
)},
...
...
mindspore/ops/_grad/grad_array_ops.py
浏览文件 @
50cb2ad9
...
...
@@ -266,26 +266,26 @@ def get_bprop_gather_v2(self):
return
bprop
@
bprop_getters
.
register
(
P
.
St
ack
)
def
get_bprop_
st
ack
(
self
):
"""Generate bprop for
St
ack"""
@
bprop_getters
.
register
(
P
.
P
ack
)
def
get_bprop_
p
ack
(
self
):
"""Generate bprop for
P
ack"""
axis
=
self
.
axis
def
bprop
(
x
,
out
,
dout
):
stack_grad
=
P
.
Unst
ack
(
axis
)
out
=
st
ack_grad
(
dout
)
pack_grad
=
P
.
Unp
ack
(
axis
)
out
=
p
ack_grad
(
dout
)
return
(
out
,)
return
bprop
@
bprop_getters
.
register
(
P
.
Un
st
ack
)
def
get_bprop_un
st
ack
(
self
):
"""Generate bprop for Un
st
ack"""
@
bprop_getters
.
register
(
P
.
Un
p
ack
)
def
get_bprop_un
p
ack
(
self
):
"""Generate bprop for Un
p
ack"""
axis
=
self
.
axis
def
bprop
(
x
,
out
,
dout
):
un
stack_grad
=
P
.
St
ack
(
axis
)
out
=
un
st
ack_grad
(
dout
)
un
pack_grad
=
P
.
P
ack
(
axis
)
out
=
un
p
ack_grad
(
dout
)
return
(
out
,)
return
bprop
...
...
mindspore/ops/operations/__init__.py
浏览文件 @
50cb2ad9
...
...
@@ -19,7 +19,7 @@ Primitive operator classes.
A collection of operators to build nerual networks or computing functions.
"""
from
.array_ops
import
(
Argmax
,
Argmin
,
Cast
,
ConcatOffset
,
Concat
,
Stack
,
Unst
ack
,
from
.array_ops
import
(
Argmax
,
Argmin
,
Cast
,
ConcatOffset
,
Concat
,
Pack
,
Unp
ack
,
Diag
,
DiagPart
,
DType
,
ExpandDims
,
Eye
,
Fill
,
GatherNd
,
GatherV2
,
InvertPermutation
,
IsInstance
,
IsSubClass
,
ArgMaxWithValue
,
OnesLike
,
ZerosLike
,
...
...
@@ -112,8 +112,8 @@ __all__ = [
'OneHot'
,
'GatherV2'
,
'Concat'
,
'
St
ack'
,
'Un
st
ack'
,
'
P
ack'
,
'Un
p
ack'
,
'Tile'
,
'BiasAdd'
,
'Gelu'
,
...
...
mindspore/ops/operations/array_ops.py
浏览文件 @
50cb2ad9
...
...
@@ -1350,8 +1350,8 @@ class Concat(PrimitiveWithInfer):
return
out
def
_get_
st
ack_shape
(
x_shape
,
x_type
,
axis
):
"""for
sat
ck output shape"""
def
_get_
p
ack_shape
(
x_shape
,
x_type
,
axis
):
"""for
pa
ck output shape"""
validator
.
check_type
(
"shape"
,
x_shape
,
[
tuple
])
validator
.
check_integer
(
"len of input_x shape"
,
len
(
x_shape
),
0
,
Rel
.
GT
)
validator
.
check_subclass
(
"shape0"
,
x_type
[
0
],
mstype
.
tensor
)
...
...
@@ -1368,43 +1368,40 @@ def _get_stack_shape(x_shape, x_type, axis):
validator
.
check
(
'x_type[%d]'
%
i
,
x_type
[
i
],
'base'
,
x_type
[
0
])
for
j
in
range
(
rank_base
):
if
v
[
j
]
!=
x_shape
[
0
][
j
]:
raise
ValueError
(
"
Stack evaluator element %d shape in input can not st
ack with first element"
%
i
)
raise
ValueError
(
"
Pack evaluator element %d shape in input can not p
ack with first element"
%
i
)
out_shape
.
insert
(
axis
,
N
)
return
out_shape
class
St
ack
(
PrimitiveWithInfer
):
class
P
ack
(
PrimitiveWithInfer
):
r
"""
Stacks a list of rank-`R` tensors into one rank-`(R+1)` tensor
.
Packs a list of tensors in specified axis
.
Packs the list of tensors in `input_x` into a tensor with rank one higher than
each tensor in `input_x`, by packing them along the `axis` dimension.
Given a list of length `N` of tensors of shape `(A, B, C)`;
Packs the list of input tensors with the same rank `R`, output is a tensor of rank `(R+1)`.
If `axis == 0` then the `output` tensor will have the shape `(N, A, B, C)`.
If `axis == 1` then the `output` tensor will have the shape `(A, N, B, C)`. Etc.
Given input tensors of shape :math:`(x_1, x_2, ..., x_R)`. Set the number of input tensors as `N`.
If :math:`0 \le axis`, the output tensor shape is :math:`(x_1, x_2, ..., x_{axis}, N, x_{axis+1}, ..., x_R)`.
Args:
axis (int):
The axis to stack along. Negative values wrap around,
so the valid range is [-(R+1), R+1). Default: 0
.
axis (int):
Dimension along which to pack. Default: 0.
Negative values wrap around. The range is [-(R+1), R+1)
.
Inputs:
- **input_x** (Union[tuple, list]) - A Tuple or list of Tensor objects with the same shape and type.
Outputs:
Tensor. A
stacked Tensor with the same type as values
.
Tensor. A
packed Tensor with the same type as `input_x`
.
Examples:
>>> data1 = Tensor(np.array([0, 1]).astype(np.float32))
>>> data2 = Tensor(np.array([2, 3]).astype(np.float32))
>>>
op = P.St
ack()
>>> output =
op
([data1, data2])
>>>
pack = P.P
ack()
>>> output =
pack
([data1, data2])
[[0, 1], [2, 3]]
"""
@
prim_attr_register
def
__init__
(
self
,
axis
=
0
):
"""init
St
ack"""
"""init
P
ack"""
self
.
__setattr_flag__
=
True
validator
.
check_type
(
"axis"
,
axis
,
[
int
])
self
.
axis
=
axis
...
...
@@ -1413,38 +1410,33 @@ class Stack(PrimitiveWithInfer):
x_shape
=
value
[
'shape'
]
x_type
=
value
[
'dtype'
]
self
.
add_prim_attr
(
'num'
,
len
(
x_shape
))
all_shape
=
_get_
st
ack_shape
(
x_shape
,
x_type
,
self
.
axis
)
all_shape
=
_get_
p
ack_shape
(
x_shape
,
x_type
,
self
.
axis
)
out
=
{
'shape'
:
all_shape
,
'dtype'
:
x_type
[
0
],
'value'
:
None
}
return
out
class
Un
st
ack
(
PrimitiveWithInfer
):
class
Un
p
ack
(
PrimitiveWithInfer
):
r
"""
Unpacks the given dimension of a rank-`R` tensor into rank-`(R-1)` tensors.
Unpacks num tensors from value by chipping it along the axis dimension.
If num is not specified (the default), it is inferred from value's shape.
If value.shape[axis] is not known, ValueError is raised.
Unpacks tensor in specified axis.
For example, given a tensor of shape (A, B, C, D);
Unpacks a tensor of rank `R` along axis dimension, output tensors will have rank `(R-1)`.
If axis == 0 then the i'th tensor in output is the slice value[i, :, :, :] and
each tensor in output will have shape (B, C, D). (Note that the dimension unpacked along is gone, unlike split)
.
Given a tensor of shape :math:`(x_1, x_2, ..., x_R)`. If :math:`0 \le axis`,
the shape of tensor in output is :math:`(x_1, x_2, ..., x_{axis}, x_{axis+2}, ..., x_R)`
.
If axis == 1 then the i'th tensor in output is the slice value[:, i, :, :] and
each tensor in output will have shape (A, C, D). Etc.
This is the opposite of stack.
This is the opposite of pack.
Args:
axis (int): The axis to unstack along. Defaults to the first dimension.
Negative values wrap around, so the valid range is [-R, R).
axis (int): Dimension along which to pack. Default: 0.
Negative values wrap around. The range is [-R, R).
num (int): The number of tensors to be unpacked to. Default : "None".
If `num` is not specified, it is inferred from the shape of `input_x`.
Inputs:
- **input_x** (Tensor) - The shape is :math:`(x_1, x_2, ..., x_R)`.
A rank R > 0 Tensor to be un
st
acked.
A rank R > 0 Tensor to be un
p
acked.
Outputs:
A tuple of Tensors, the shape of each objects is same.
...
...
@@ -1454,15 +1446,15 @@ class Unstack(PrimitiveWithInfer):
or if len(input_x.shape[axis]) not equal to num.
Examples:
>>> un
stack = P.Unst
ack()
>>> x = Tensor(np.array([[1, 1, 1, 1], [2, 2, 2, 2]]))
>>> output = un
stack(
x)
>>> un
pack = P.Unp
ack()
>>>
input_
x = Tensor(np.array([[1, 1, 1, 1], [2, 2, 2, 2]]))
>>> output = un
pack(input_
x)
([1, 1, 1, 1], [2, 2, 2, 2])
"""
@
prim_attr_register
def
__init__
(
self
,
axis
=
0
):
"""init Un
st
ack"""
"""init Un
p
ack"""
self
.
__setattr_flag__
=
True
validator
.
check_type
(
"axis"
,
axis
,
[
int
])
self
.
axis
=
axis
...
...
@@ -1479,7 +1471,7 @@ class Unstack(PrimitiveWithInfer):
validator
.
check_integer
(
"output_num"
,
output_num
,
0
,
Rel
.
GT
)
self
.
add_prim_attr
(
'num'
,
output_num
)
output_valid_check
=
x_shape
[
self
.
axis
]
-
output_num
validator
.
check_integer
(
"
the dimension which to unst
ack divides output_num"
,
output_valid_check
,
0
,
Rel
.
EQ
)
validator
.
check_integer
(
"
The dimension which to unp
ack divides output_num"
,
output_valid_check
,
0
,
Rel
.
EQ
)
out_shapes
=
[]
out_dtypes
=
[]
out_shape
=
x_shape
[:
self
.
axis
]
+
x_shape
[
self
.
axis
+
1
:]
...
...
tests/ut/python/ops/test_ops.py
浏览文件 @
50cb2ad9
...
...
@@ -80,9 +80,9 @@ class NetForConcat1(nn.Cell):
return
self
.
concat
((
x1
,
x2
))
class
NetFor
St
ackInput
(
nn
.
Cell
):
class
NetFor
P
ackInput
(
nn
.
Cell
):
def
__init__
(
self
,
op
):
super
(
NetFor
St
ackInput
,
self
).
__init__
()
super
(
NetFor
P
ackInput
,
self
).
__init__
()
self
.
op
=
op
self
.
mul
=
P
.
Mul
()
...
...
@@ -93,9 +93,9 @@ class NetForStackInput(nn.Cell):
return
self
.
op
(
t
)
class
NetForUn
st
ackInput
(
nn
.
Cell
):
class
NetForUn
p
ackInput
(
nn
.
Cell
):
def
__init__
(
self
,
op
):
super
(
NetForUn
st
ackInput
,
self
).
__init__
()
super
(
NetForUn
p
ackInput
,
self
).
__init__
()
self
.
op
=
op
self
.
mul
=
P
.
Mul
()
...
...
@@ -991,33 +991,33 @@ test_case_array_ops = [
Tensor
(
np
.
array
([
1
],
np
.
float32
)),
Tensor
(
np
.
array
([
1
],
np
.
float32
)))],
'desc_bprop'
:
[[
3
,]]}),
(
'
StackV2
_0'
,
{
'block'
:
NetFor
StackInput
(
P
.
St
ack
()),
(
'
Pack
_0'
,
{
'block'
:
NetFor
PackInput
(
P
.
P
ack
()),
'desc_inputs'
:[[
2
,
2
],
[
2
,
2
],
[
2
,
2
]],
'desc_bprop'
:[[
3
,
2
,
2
]],
}),
(
'
StackV2
_1'
,
{
'block'
:
NetFor
StackInput
(
P
.
St
ack
(
axis
=-
2
)),
(
'
Pack
_1'
,
{
'block'
:
NetFor
PackInput
(
P
.
P
ack
(
axis
=-
2
)),
'desc_inputs'
:[[
3
,
2
,
3
],
[
3
,
2
,
3
],
[
3
,
2
,
3
]],
'desc_bprop'
:[[
3
,
2
,
3
,
3
]],
}),
(
'
StackV2
_2'
,
{
'block'
:
NetFor
StackInput
(
P
.
St
ack
()),
(
'
Pack
_2'
,
{
'block'
:
NetFor
PackInput
(
P
.
P
ack
()),
'desc_inputs'
:[[
2
,
2
]],
'desc_bprop'
:[[
2
,
2
,
2
]],
}),
(
'
StackV2
_3'
,
{
'block'
:
NetFor
StackInput
(
P
.
St
ack
()),
(
'
Pack
_3'
,
{
'block'
:
NetFor
PackInput
(
P
.
P
ack
()),
'desc_inputs'
:[[
128
,
128
],
[
128
,
128
]],
'desc_bprop'
:[[
2
,
128
,
128
]],
}),
(
'Un
stackV2
_0'
,
{
'block'
:
NetForUn
stackInput
(
P
.
Unst
ack
(
axis
=
0
)),
(
'Un
pack
_0'
,
{
'block'
:
NetForUn
packInput
(
P
.
Unp
ack
(
axis
=
0
)),
'desc_inputs'
:[[
2
,
4
]],
'desc_bprop'
:[[
4
],
[
4
]],
}),
(
'Un
stackV2
_1'
,
{
'block'
:
NetForUn
stackInput
(
P
.
Unst
ack
(
axis
=-
1
)),
(
'Un
pack
_1'
,
{
'block'
:
NetForUn
packInput
(
P
.
Unp
ack
(
axis
=-
1
)),
'desc_inputs'
:[
Tensor
(
np
.
array
([[
1
,
1
,
1
]],
np
.
float32
))],
'desc_bprop'
:[[
1
],
[
1
],
[
1
]],
}),
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录