Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
8566f893
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
8566f893
编写于
4月 03, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 03, 2020
浏览文件
操作
浏览文件
下载
差异文件
!2 adapting TBE operators IR changed
Merge pull request !2 from mxm/for_adapting_tbe
上级
c24252b2
b53c9745
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
75 addition
and
101 deletion
+75
-101
.gitmodules
.gitmodules
+1
-1
graphengine
graphengine
+1
-1
mindspore/ccsrc/transform/convert.cc
mindspore/ccsrc/transform/convert.cc
+7
-7
mindspore/ccsrc/transform/op_declare.cc
mindspore/ccsrc/transform/op_declare.cc
+60
-86
mindspore/ccsrc/transform/op_declare.h
mindspore/ccsrc/transform/op_declare.h
+6
-6
未找到文件。
.gitmodules
浏览文件 @
8566f893
...
...
@@ -12,4 +12,4 @@
url = https://github.com/protocolbuffers/protobuf.git
[submodule "graphengine"]
path = graphengine
url = https://gitee.com/m
indspore
/graphengine.git
url = https://gitee.com/m
s-incubator
/graphengine.git
graphengine
@
092c7a1f
Subproject commit
5f763679fa33de1608d07f7651c6f16012b953ea
Subproject commit
092c7a1f6548cac7d40e677af3498c3c49ea2bfd
mindspore/ccsrc/transform/convert.cc
浏览文件 @
8566f893
...
...
@@ -189,7 +189,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{
string
(
kNameApplyMomentum
),
ADPT_DESC
(
ApplyMomentum
)},
{
string
(
kNameMaxPool
),
ADPT_DESC
(
MaxPool
)},
{
string
(
kNameAvgPool
),
ADPT_DESC
(
AvgPool
)},
{
string
(
kNameTopK
),
ADPT_DESC
(
TopK
V2
)},
{
string
(
kNameTopK
),
ADPT_DESC
(
TopK
)},
{
string
(
kNamePack
),
ADPT_DESC
(
Pack
)},
{
string
(
kNameSplitD
),
ADPT_DESC
(
SplitD
)},
{
string
(
kNameAllReduce
),
ADPT_DESC
(
HcomAllReduce
)},
...
...
@@ -310,7 +310,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{
prim
::
kPrimMinimum
->
name
(),
ADPT_DESC
(
Minimum
)},
{
prim
::
kPrimSelect
->
name
(),
ADPT_DESC
(
Select
)},
{
string
(
kNameLessEqual
),
ADPT_DESC
(
LessEqual
)},
{
prim
::
kPrimLogSoftmax
->
name
(),
ADPT_DESC
(
LogSoftmax
)},
{
prim
::
kPrimLogSoftmax
->
name
(),
ADPT_DESC
(
LogSoftmax
V2
)},
{
string
(
kNameTruncatedNormal
),
ADPT_DESC
(
TruncatedNormal
)},
{
string
(
kNameStridedSliceGrad
),
ADPT_DESC
(
StridedSliceGrad
)},
{
prim
::
kPrimGelu
->
name
(),
ADPT_DESC
(
Gelu
)},
...
...
@@ -343,7 +343,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{
prim
::
kPrimMatMul
->
name
(),
ADPT_DESC
(
MatMul
)},
{
string
(
kNameConst
),
ADPT_DESC
(
Constant
,
Const
)},
{
string
(
kNameSoftmax
),
ADPT_DESC
(
Softmax
)},
{
string
(
kNameSoftmax
),
ADPT_DESC
(
Softmax
V2
)},
{
string
(
kNameSoftmaxGrad
),
ADPT_DESC
(
SoftmaxGrad
)},
{
string
(
kNameParam
),
ADPT_DESC
(
Data
)},
{
string
(
kNameROIAlign
),
ADPT_DESC
(
ROIAlign
)},
...
...
@@ -1017,8 +1017,8 @@ DfGraphConvertor &DfGraphConvertor::BuildGraph() {
}
}
// set up depend
ic
es
MS_LOG
(
DEBUG
)
<<
"set up depend
ic
es"
;
// set up depend
enci
es
MS_LOG
(
DEBUG
)
<<
"set up depend
enci
es"
;
std
::
vector
<
AnfNodePtr
>
nodes
=
::
mindspore
::
TopoSort
(
anf_graph_
->
get_return
());
for
(
auto
&
it
:
nodes
)
{
SetNodeInput
(
it
);
...
...
@@ -1115,8 +1115,8 @@ void DfGraphConvertor::UpdateDataOpDesc(const AnfNodePtr &it, const OperatorPtr
if
(
desc
==
nullptr
)
{
MS_LOG
(
ERROR
)
<<
"Update data op descriptor failed! TensorDesc is null."
;
}
else
{
(
void
)
std
::
static_pointer_cast
<
Data
>
(
op
)
->
update_input_desc_
data
(
*
desc
);
(
void
)
std
::
static_pointer_cast
<
Data
>
(
op
)
->
update_output_desc_
out
(
*
desc
);
(
void
)
std
::
static_pointer_cast
<
Data
>
(
op
)
->
update_input_desc_
x
(
*
desc
);
(
void
)
std
::
static_pointer_cast
<
Data
>
(
op
)
->
update_output_desc_
y
(
*
desc
);
}
}
...
...
mindspore/ccsrc/transform/op_declare.cc
浏览文件 @
8566f893
...
...
@@ -138,11 +138,10 @@ OUTPUT_MAP(ApplyMomentum) = {{0, OUTPUT_DESC(var)}};
INPUT_MAP
(
Summary
)
=
{{
2
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
Summary
)
=
EMPTY_ATTR_MAP
;
//
d
ata
//
D
ata
INPUT_MAP
(
Data
)
=
EMPTY_INPUT_MAP
;
ATTR_MAP
(
Data
)
=
EMPTY_ATTR_MAP
;
// resnet ops in ge
// BatchNorm
INPUT_MAP
(
BatchNorm
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
scale
)},
...
...
@@ -194,9 +193,9 @@ OUTPUT_MAP(PRelu) = {{0, OUTPUT_DESC(y)}};
// PReluGrad
INPUT_MAP
(
PReluGrad
)
=
{
{
1
,
INPUT_DESC
(
input_gradients
)},
{
2
,
INPUT_DESC
(
input_features
)},
{
3
,
INPUT_DESC
(
input_
weights
)}};
{
1
,
INPUT_DESC
(
grads
)},
{
2
,
INPUT_DESC
(
features
)},
{
3
,
INPUT_DESC
(
weights
)}};
ATTR_MAP
(
PReluGrad
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
PReluGrad
)
=
{{
0
,
OUTPUT_DESC
(
output_backprops_dx
)},
{
1
,
OUTPUT_DESC
(
output_backprops_
da
)}};
OUTPUT_MAP
(
PReluGrad
)
=
{{
0
,
OUTPUT_DESC
(
dx
)},
{
1
,
OUTPUT_DESC
(
da
)}};
// Sigmoid
INPUT_MAP
(
Sigmoid
)
=
{{
1
,
INPUT_DESC
(
x
)}};
...
...
@@ -241,12 +240,12 @@ ATTR_MAP(CumsumD) = {{"exclusive", ATTR_DESC(exclusive, AnyTraits<bool>())},
{
"reverse"
,
ATTR_DESC
(
reverse
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
CumsumD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
//
softmax
INPUT_MAP
(
Softmax
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
Softmax
)
=
{
{
"axis"
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
//
SoftmaxV2
INPUT_MAP
(
Softmax
V2
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
Softmax
V2
)
=
{
{
"axis"
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
};
OUTPUT_MAP
(
Softmax
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
OUTPUT_MAP
(
Softmax
V2
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// SoftmaxGrad
INPUT_MAP
(
SoftmaxGrad
)
=
{{
1
,
INPUT_DESC
(
softmax
)},
{
2
,
INPUT_DESC
(
grad_softmax
)}};
...
...
@@ -269,21 +268,21 @@ ATTR_MAP(GatherV2) = EMPTY_ATTR_MAP;
OUTPUT_MAP
(
GatherV2
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ReduceSum
INPUT_MAP
(
ReduceSum
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
ax
i
s
)}};
INPUT_MAP
(
ReduceSum
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
ax
e
s
)}};
ATTR_MAP
(
ReduceSum
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceSum
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ReduceSumD
INPUT_MAP
(
ReduceSumD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ReduceSumD
)
=
{
{
2
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
2
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ReduceSumD
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceSumD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ReduceProdD
INPUT_MAP
(
ReduceProdD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ReduceProdD
)
=
{
{
2
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
2
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ReduceProdD
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceProdD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -294,7 +293,7 @@ ATTR_MAP(CumprodD) = {{"exclusive", ATTR_DESC(exclusive, AnyTraits<bool>())},
{
"reverse"
,
ATTR_DESC
(
reverse
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
CumprodD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// SoftmaxCrossEntropyWithLogits
/
// SoftmaxCrossEntropyWithLogits
INPUT_MAP
(
SoftmaxCrossEntropyWithLogits
)
=
{{
1
,
INPUT_DESC
(
features
)},
{
2
,
INPUT_DESC
(
labels
)}};
ATTR_MAP
(
SoftmaxCrossEntropyWithLogits
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
SoftmaxCrossEntropyWithLogits
)
=
{{
0
,
OUTPUT_DESC
(
loss
)},
{
1
,
OUTPUT_DESC
(
backprop
)}};
...
...
@@ -306,7 +305,7 @@ INPUT_ATTR_MAP(MeanGrad) = {{2, ATTR_DESC(mean_grad_output_shape_value, kOpForma
ATTR_MAP
(
MeanGrad
)
=
{{
"mode"
,
ATTR_DESC
(
mode
,
AnyTraits
<
int64_t
>
())}};
INPUT_MAP
(
SliceD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
SliceD
)
=
{{
2
,
ATTR_DESC
(
begin
,
AnyTraits
<
int
>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
INPUT_ATTR_MAP
(
SliceD
)
=
{{
2
,
ATTR_DESC
(
offsets
,
AnyTraits
<
int
>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
3
,
ATTR_DESC
(
size
,
AnyTraits
<
int
>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
SliceD
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
SliceD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -401,42 +400,10 @@ ATTR_MAP(BoundingBoxDecode) = {
};
OUTPUT_MAP
(
BoundingBoxDecode
)
=
{{
0
,
OUTPUT_DESC
(
bboxes
)}};
#ifdef VALID_CODE
// Less
INPUT_MAP
(
Less
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
y
)}};
ATTR_MAP
(
Less
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
Less
)
=
{{
0
,
OUTPUT_DESC
(
z
)}};
// Cast
INPUT_MAP
(
Cast
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
Cast
)
=
{{
2
,
ATTR_DESC
(
dst_type
,
AnyTraits
<
GEType
>
())}};
ATTR_MAP
(
Cast
)
=
{{
"Truncate"
,
ATTR_DESC
(
truncate
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
Cast
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// Minimum
INPUT_MAP
(
Minimum
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
y
)}};
ATTR_MAP
(
Minimum
)
=
{{
"alpha"
,
ATTR_DESC
(
alpha
,
AnyTraits
<
float
>
())},
{
"beta"
,
ATTR_DESC
(
beta
,
AnyTraits
<
float
>
())}};
OUTPUT_MAP
(
Minimum
)
=
{{
0
,
OUTPUT_DESC
(
z
)}};
// Sub
INPUT_MAP
(
Sub
)
=
{{
1
,
INPUT_DESC
(
x1
)},
{
2
,
INPUT_DESC
(
x2
)}};
ATTR_MAP
(
Sub
)
=
{{
"alpha"
,
ATTR_DESC
(
alpha
,
AnyTraits
<
float
>
())},
{
"beta"
,
ATTR_DESC
(
beta
,
AnyTraits
<
float
>
())}};
#endif
// TopKV2
INPUT_MAP
(
TopKV2
)
=
{
{
1
,
INPUT_DESC
(
input
)},
{
2
,
INPUT_DESC
(
k
)},
};
ATTR_MAP
(
TopKV2
)
=
{{
"T"
,
ATTR_DESC
(
T
,
AnyTraits
<
GEType
>
())},
{
"sorted"
,
ATTR_DESC
(
sorted
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
TopKV2
)
=
{
{
0
,
OUTPUT_DESC
(
values
)},
{
1
,
OUTPUT_DESC
(
indices
)},
};
// TopK
INPUT_MAP
(
TopK
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
k
)}};
ATTR_MAP
(
TopK
)
=
{{
"sorted"
,
ATTR_DESC
(
sorted
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
TopK
)
=
{{
0
,
OUTPUT_DESC
(
values
)},
{
1
,
OUTPUT_DESC
(
indices
)}};
// Multiply
INPUT_MAP
(
Multiply
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
y
)}};
...
...
@@ -476,7 +443,7 @@ ATTR_MAP(Iou) = {{"mode", ATTR_DESC(mode, AnyTraits<std::string>())}};
OUTPUT_MAP
(
Iou
)
=
{{
0
,
OUTPUT_DESC
(
overlap
)}};
// ResizeNearestNeighborD
INPUT_MAP
(
ResizeNearestNeighborD
)
=
{{
1
,
INPUT_DESC
(
images
)}};
INPUT_MAP
(
ResizeNearestNeighborD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
ResizeNearestNeighborD
)
=
{
{
"size"
,
ATTR_DESC
(
size
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"align_corners"
,
ATTR_DESC
(
align_corners
,
AnyTraits
<
bool
>
())}};
...
...
@@ -506,17 +473,17 @@ ATTR_MAP(Relu6) = EMPTY_ATTR_MAP;
OUTPUT_MAP
(
Relu6
)
=
{{
0
,
OUTPUT_DESC
(
activations
)}};
// Relu6Grad
INPUT_MAP
(
Relu6Grad
)
=
{{
1
,
INPUT_DESC
(
dy
)},
{
2
,
INPUT_DESC
(
y
)}};
INPUT_MAP
(
Relu6Grad
)
=
{{
1
,
INPUT_DESC
(
features
)},
{
2
,
INPUT_DESC
(
gradients
)}};
ATTR_MAP
(
Relu6Grad
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
Relu6Grad
)
=
{{
0
,
OUTPUT_DESC
(
z
)}};
OUTPUT_MAP
(
Relu6Grad
)
=
{{
0
,
OUTPUT_DESC
(
backprops
)}};
// ResizeBilinearGrad
INPUT_MAP
(
ResizeBilinearGrad
)
=
{{
1
,
INPUT_DESC
(
grads
)},
{
2
,
INPUT_DESC
(
original_image
)}};
ATTR_MAP
(
ResizeBilinearGrad
)
=
{{
"align_corners"
,
ATTR_DESC
(
align_corners
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ResizeBilinearGrad
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ResizeBilinear
INPUT_MAP
(
ResizeBilinearD
)
=
{{
1
,
INPUT_DESC
(
images
)}};
// ResizeBilinear
D
INPUT_MAP
(
ResizeBilinearD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
ResizeBilinearD
)
=
{
{
"size"
,
ATTR_DESC
(
size
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"align_corners"
,
ATTR_DESC
(
align_corners
,
AnyTraits
<
bool
>
())}};
...
...
@@ -539,9 +506,9 @@ OUTPUT_MAP(NMSWithMask) = {
{
0
,
OUTPUT_DESC
(
selected_boxes
)},
{
1
,
OUTPUT_DESC
(
selected_idx
)},
{
2
,
OUTPUT_DESC
(
selected_mask
)}};
// Unpack
INPUT_MAP
(
Unpack
)
=
{{
1
,
INPUT_DESC
(
value
)}};
INPUT_MAP
(
Unpack
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
Unpack
)
=
{{
"axis"
,
ATTR_DESC
(
axis
,
AnyTraits
<
int
>
())},
{
"num"
,
ATTR_DESC
(
num
,
AnyTraits
<
int
>
())}};
DYN_OUTPUT_MAP
(
Unpack
)
=
{{
0
,
DYN_OUTPUT_DESC
(
output
)}};
DYN_OUTPUT_MAP
(
Unpack
)
=
{{
0
,
DYN_OUTPUT_DESC
(
y
)}};
// ScatterNdUpdate
INPUT_MAP
(
ScatterNdUpdate
)
=
{{
1
,
INPUT_DESC
(
var
)},
{
2
,
INPUT_DESC
(
indices
)},
{
3
,
INPUT_DESC
(
updates
)}};
...
...
@@ -574,8 +541,8 @@ INPUT_MAP(SigmoidCrossEntropyWithLogitsGrad) = {
ATTR_MAP
(
SigmoidCrossEntropyWithLogitsGrad
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
SigmoidCrossEntropyWithLogitsGrad
)
=
{{
0
,
OUTPUT_DESC
(
gradient
)}};
// ScatterNd
INPUT_MAP
(
ScatterNdD
)
=
{{
1
,
INPUT_DESC
(
indices
)},
{
2
,
INPUT_DESC
(
updates
)}};
// ScatterNd
D
INPUT_MAP
(
ScatterNdD
)
=
{{
1
,
INPUT_DESC
(
indices
)},
{
2
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ScatterNdD
)
=
{
{
3
,
ATTR_DESC
(
shape
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ScatterNdD
)
=
EMPTY_ATTR_MAP
;
...
...
@@ -587,7 +554,7 @@ ATTR_MAP(PadD) = {{"paddings", ATTR_DESC(paddings, AnyTraits<std::vector<std::ve
OUTPUT_MAP
(
PadD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// GatherNd
INPUT_MAP
(
GatherNd
)
=
{{
1
,
INPUT_DESC
(
x
1
)},
{
2
,
INPUT_DESC
(
x2
)}};
INPUT_MAP
(
GatherNd
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
indices
)}};
ATTR_MAP
(
GatherNd
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
GatherNd
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -612,13 +579,13 @@ ATTR_MAP(ROIAlignGrad) = {
// ArgMaxD
INPUT_MAP
(
ArgMaxD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
ArgMaxD
)
=
{{
"axis"
,
ATTR_DESC
(
dimension
,
AnyTraits
<
int
>
())},
{
"output_type"
,
ATTR_DESC
(
output_
type
,
AnyTraits
<
GEType
>
())}};
{
"output_type"
,
ATTR_DESC
(
d
type
,
AnyTraits
<
GEType
>
())}};
OUTPUT_MAP
(
ArgMaxD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ArgMinD
INPUT_MAP
(
ArgMinD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
ArgMinD
)
=
{{
"axis"
,
ATTR_DESC
(
dimension
,
AnyTraits
<
int
>
())},
{
"output_type"
,
ATTR_DESC
(
output_
type
,
AnyTraits
<
GEType
>
())}};
{
"output_type"
,
ATTR_DESC
(
d
type
,
AnyTraits
<
GEType
>
())}};
OUTPUT_MAP
(
ArgMinD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ArgMaxWithValue
...
...
@@ -634,14 +601,14 @@ ATTR_MAP(ArgMinWithValue) = {{"axis", ATTR_DESC(dimension, AnyTraits<int>())},
OUTPUT_MAP
(
ArgMinWithValue
)
=
{{
0
,
OUTPUT_DESC
(
indice
)},
{
1
,
OUTPUT_DESC
(
values
)}};
// ReduceAll
INPUT_MAP
(
ReduceAll
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
ax
i
s
)}};
INPUT_MAP
(
ReduceAll
)
=
{{
1
,
INPUT_DESC
(
x
)},
{
2
,
INPUT_DESC
(
ax
e
s
)}};
ATTR_MAP
(
ReduceAll
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceAll
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ReduceMeanD
INPUT_MAP
(
ReduceMeanD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ReduceMeanD
)
=
{
{
2
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
2
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ReduceMeanD
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceMeanD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -708,11 +675,12 @@ INPUT_MAP(BiasAddGrad) = {{1, INPUT_DESC(x)}};
ATTR_MAP
(
BiasAddGrad
)
=
{{
"data_format"
,
ATTR_DESC
(
data_format
,
AnyTraits
<
std
::
string
>
())}};
OUTPUT_MAP
(
BiasAddGrad
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
//
maxpoolg
rad
//
MaxPoolG
rad
INPUT_MAP
(
MaxPoolGrad
)
=
{{
1
,
INPUT_DESC
(
x1
)},
{
2
,
INPUT_DESC
(
x2
)},
{
3
,
INPUT_DESC
(
grad
)}};
ATTR_MAP
(
MaxPoolGrad
)
=
{{
"ksize"
,
ATTR_DESC
(
ksize
,
AnyTraits
<
int
>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"strides"
,
ATTR_DESC
(
strides
,
AnyTraits
<
int
>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"padding"
,
ATTR_DESC
(
padding
,
AnyTraits
<
std
::
string
>
())}};
{
"padding"
,
ATTR_DESC
(
padding
,
AnyTraits
<
std
::
string
>
())},
{
"data_format"
,
ATTR_DESC
(
data_format
,
AnyTraits
<
std
::
string
>
())}};
OUTPUT_MAP
(
MaxPoolGrad
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// avgpoolgrad
...
...
@@ -739,28 +707,34 @@ ATTR_MAP(Conv2D) = {
{
"stride"
,
ATTR_DESC
(
strides
,
"pad"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"pad_list"
,
ATTR_DESC
(
pads
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"dilation"
,
ATTR_DESC
(
dilations
,
"pad"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"data_format"
,
ATTR_DESC
(
data_format
,
AnyTraits
<
std
::
string
>
())},
{
"group"
,
ATTR_DESC
(
groups
,
AnyTraits
<
int
>
())}
};
OUTPUT_MAP
(
Conv2D
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// Conv2DBackpropInputD
INPUT_MAP
(
Conv2DBackpropInputD
)
=
{{
1
,
INPUT_DESC
(
out_backprop
)},
{
2
,
INPUT_DESC
(
filter
s
)}};
INPUT_MAP
(
Conv2DBackpropInputD
)
=
{{
1
,
INPUT_DESC
(
out_backprop
)},
{
2
,
INPUT_DESC
(
filter
)}};
INPUT_ATTR_MAP
(
Conv2DBackpropInputD
)
=
{
{
3
,
ATTR_DESC
(
input_size
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
3
,
ATTR_DESC
(
input_size
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
Conv2DBackpropInputD
)
=
{
{
"pad_list"
,
ATTR_DESC
(
pads
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"stride"
,
ATTR_DESC
(
strides
,
"
strides
"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"stride"
,
ATTR_DESC
(
strides
,
"
pad
"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"dilation"
,
ATTR_DESC
(
dilations
,
"pad"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"data_format"
,
ATTR_DESC
(
data_format
,
AnyTraits
<
std
::
string
>
())},
{
"group"
,
ATTR_DESC
(
groups
,
AnyTraits
<
int
>
())}
};
OUTPUT_MAP
(
Conv2DBackpropInputD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// Conv2DBackpropFilterD
INPUT_MAP
(
Conv2DBackpropFilterD
)
=
{{
1
,
INPUT_DESC
(
out_backprop
)},
{
2
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
Conv2DBackpropFilterD
)
=
{
{
3
,
ATTR_DESC
(
filter_size
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
3
,
ATTR_DESC
(
filter_size
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
Conv2DBackpropFilterD
)
=
{
{
"pad_list"
,
ATTR_DESC
(
pads
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"stride"
,
ATTR_DESC
(
strides
,
"
strides
"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"stride"
,
ATTR_DESC
(
strides
,
"
pad
"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"dilation"
,
ATTR_DESC
(
dilations
,
"pad"
,
AnyTraits
<
std
::
vector
<
int64_t
>>
())},
{
"data_format"
,
ATTR_DESC
(
data_format
,
AnyTraits
<
std
::
string
>
())},
{
"group"
,
ATTR_DESC
(
groups
,
AnyTraits
<
int
>
())}
};
OUTPUT_MAP
(
Conv2DBackpropFilterD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -798,8 +772,8 @@ OUTPUT_MAP(DepthwiseConv2DBackpropFilterD) = {{0, OUTPUT_DESC(filter_grad)}};
// MatMul
INPUT_MAP
(
MatMul
)
=
{{
1
,
INPUT_DESC
(
x1
)},
{
2
,
INPUT_DESC
(
x2
)}};
ATTR_MAP
(
MatMul
)
=
{{
"transpose_a"
,
ATTR_DESC
(
transpose_
a
,
AnyTraits
<
bool
>
())},
{
"transpose_b"
,
ATTR_DESC
(
transpose_
b
,
AnyTraits
<
bool
>
())}};
ATTR_MAP
(
MatMul
)
=
{{
"transpose_a"
,
ATTR_DESC
(
transpose_
x1
,
AnyTraits
<
bool
>
())},
{
"transpose_b"
,
ATTR_DESC
(
transpose_
x2
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
MatMul
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// Merge
...
...
@@ -846,10 +820,10 @@ ATTR_MAP(Sub) = EMPTY_ATTR_MAP;
OUTPUT_MAP
(
Sub
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// SplitD
INPUT_MAP
(
SplitD
)
=
{{
1
,
INPUT_DESC
(
value
)}};
INPUT_MAP
(
SplitD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
ATTR_MAP
(
SplitD
)
=
{{
"axis"
,
ATTR_DESC
(
split_dim
,
AnyTraits
<
int
>
())},
{
"output_num"
,
ATTR_DESC
(
num_split
,
AnyTraits
<
int
>
())}};
DYN_OUTPUT_MAP
(
SplitD
)
=
{{
0
,
DYN_OUTPUT_DESC
(
output
)}};
DYN_OUTPUT_MAP
(
SplitD
)
=
{{
0
,
DYN_OUTPUT_DESC
(
y
)}};
// Neg
INPUT_MAP
(
Neg
)
=
{{
1
,
INPUT_DESC
(
x
)}};
...
...
@@ -876,12 +850,12 @@ OUTPUT_MAP(Pack) = {{0, OUTPUT_DESC(y)}};
// ConcatD
INPUT_MAP
(
ConcatD
)
=
EMPTY_INPUT_MAP
;
DYN_INPUT_MAP
(
ConcatD
)
=
{{
1
,
DYN_INPUT_DESC
(
input_values
)}};
DYN_INPUT_MAP
(
ConcatD
)
=
{{
1
,
DYN_INPUT_DESC
(
x
)}};
ATTR_MAP
(
ConcatD
)
=
{
{
"axis"
,
ATTR_DESC
(
concat_dim
,
AnyTraits
<
int
>
())},
{
"inputNums"
,
ATTR_DESC
(
N
,
AnyTraits
<
int
>
())},
};
OUTPUT_MAP
(
ConcatD
)
=
{{
0
,
OUTPUT_DESC
(
output_data
)}};
OUTPUT_MAP
(
ConcatD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// Less
INPUT_MAP
(
Less
)
=
{{
1
,
INPUT_DESC
(
x1
)},
{
2
,
INPUT_DESC
(
x2
)}};
...
...
@@ -916,14 +890,14 @@ OUTPUT_MAP(TanhGrad) = {{0, OUTPUT_DESC(z)}};
// ReduceMinD
INPUT_MAP
(
ReduceMinD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ReduceMinD
)
=
{
{
2
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
2
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ReduceMinD
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceMinD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// ReduceMaxD
INPUT_MAP
(
ReduceMaxD
)
=
{{
1
,
INPUT_DESC
(
x
)}};
INPUT_ATTR_MAP
(
ReduceMaxD
)
=
{
{
2
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
{
2
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
ATTR_MAP
(
ReduceMaxD
)
=
{{
"keep_dims"
,
ATTR_DESC
(
keep_dims
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
ReduceMaxD
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
...
...
@@ -1008,11 +982,11 @@ INPUT_MAP(LessEqual) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP
(
LessEqual
)
=
EMPTY_ATTR_MAP
;
OUTPUT_MAP
(
LessEqual
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// LogSoftmax
INPUT_MAP
(
LogSoftmax
)
=
{{
1
,
INPUT_DESC
(
logits
)}};
ATTR_MAP
(
LogSoftmax
)
=
{
{
"axis"
,
ATTR_DESC
(
ax
i
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
OUTPUT_MAP
(
LogSoftmax
)
=
{{
0
,
OUTPUT_DESC
(
logsoftmax
)}};
// LogSoftmax
V2
INPUT_MAP
(
LogSoftmax
V2
)
=
{{
1
,
INPUT_DESC
(
logits
)}};
ATTR_MAP
(
LogSoftmax
V2
)
=
{
{
"axis"
,
ATTR_DESC
(
ax
e
s
,
AnyTraits
<
std
::
vector
<
int64_t
>>
(),
AnyTraits
<
std
::
vector
<
int64_t
>>
())}};
OUTPUT_MAP
(
LogSoftmax
V2
)
=
{{
0
,
OUTPUT_DESC
(
logsoftmax
)}};
// RandomChoiceWithMask
INPUT_MAP
(
RandomChoiceWithMask
)
=
{{
1
,
INPUT_DESC
(
x
)}};
...
...
@@ -1094,8 +1068,8 @@ OUTPUT_MAP(LayerNormGrad) = {{0, OUTPUT_DESC(pd_x)}, {1, OUTPUT_DESC(pd_gamma)},
// BatchMatMul
INPUT_MAP
(
BatchMatMul
)
=
{{
1
,
INPUT_DESC
(
x1
)},
{
2
,
INPUT_DESC
(
x2
)}};
ATTR_MAP
(
BatchMatMul
)
=
{{
"transpose_x1"
,
ATTR_DESC
(
adj_x
,
AnyTraits
<
bool
>
())},
{
"transpose_x2"
,
ATTR_DESC
(
adj_
y
,
AnyTraits
<
bool
>
())}};
ATTR_MAP
(
BatchMatMul
)
=
{{
"transpose_x1"
,
ATTR_DESC
(
adj_x
1
,
AnyTraits
<
bool
>
())},
{
"transpose_x2"
,
ATTR_DESC
(
adj_
x2
,
AnyTraits
<
bool
>
())}};
OUTPUT_MAP
(
BatchMatMul
)
=
{{
0
,
OUTPUT_DESC
(
y
)}};
// DropoutDoMask
...
...
mindspore/ccsrc/transform/op_declare.h
浏览文件 @
8566f893
...
...
@@ -209,8 +209,8 @@ DECLARE_OP_USE_OUTPUT(Merge)
DECLARE_OP_ADAPTER
(
Switch
)
DECLARE_OP_USE_OUTPUT
(
Switch
)
DECLARE_OP_ADAPTER
(
TopK
V2
)
DECLARE_OP_USE_OUTPUT
(
TopK
V2
)
DECLARE_OP_ADAPTER
(
TopK
)
DECLARE_OP_USE_OUTPUT
(
TopK
)
DECLARE_OP_ADAPTER
(
RealDiv
)
DECLARE_OP_USE_OUTPUT
(
RealDiv
)
...
...
@@ -260,8 +260,8 @@ DECLARE_OP_ADAPTER(Select)
DECLARE_OP_USE_OUTPUT
(
Select
)
DECLARE_OP_ADAPTER
(
LessEqual
)
DECLARE_OP_USE_OUTPUT
(
LessEqual
)
DECLARE_OP_ADAPTER
(
LogSoftmax
)
DECLARE_OP_USE_OUTPUT
(
LogSoftmax
)
DECLARE_OP_ADAPTER
(
LogSoftmax
V2
)
DECLARE_OP_USE_OUTPUT
(
LogSoftmax
V2
)
DECLARE_OP_ADAPTER
(
TruncatedNormal
)
DECLARE_OP_USE_OUTPUT
(
TruncatedNormal
)
DECLARE_OP_ADAPTER
(
StridedSliceGrad
)
...
...
@@ -391,8 +391,8 @@ DECLARE_OP_ADAPTER(Sigmoid)
DECLARE_OP_USE_OUTPUT
(
Sigmoid
)
DECLARE_OP_ADAPTER
(
SigmoidGrad
)
DECLARE_OP_USE_OUTPUT
(
SigmoidGrad
)
DECLARE_OP_ADAPTER
(
Softmax
)
DECLARE_OP_USE_OUTPUT
(
Softmax
)
DECLARE_OP_ADAPTER
(
Softmax
V2
)
DECLARE_OP_USE_OUTPUT
(
Softmax
V2
)
DECLARE_OP_ADAPTER
(
SoftmaxGrad
)
DECLARE_OP_USE_OUTPUT
(
SoftmaxGrad
)
DECLARE_OP_ADAPTER
(
Greater
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录