Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
7f53253b
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7f53253b
编写于
8月 10, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
8月 10, 2020
浏览文件
操作
浏览文件
下载
差异文件
!4144 fix anf transform bug
Merge pull request !4144 from zhengjun10/master
上级
0a1fac92
0a1d090b
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
22 addition
and
22 deletion
+22
-22
mindspore/lite/tools/converter/converter.cc
mindspore/lite/tools/converter/converter.cc
+1
-1
mindspore/lite/tools/converter/graphdef_transform.cc
mindspore/lite/tools/converter/graphdef_transform.cc
+14
-14
mindspore/lite/tools/optimizer/fusion/conv_biasadd_fusion.cc
mindspore/lite/tools/optimizer/fusion/conv_biasadd_fusion.cc
+4
-4
mindspore/lite/tools/optimizer/fusion/conv_transform_fusion.cc
...pore/lite/tools/optimizer/fusion/conv_transform_fusion.cc
+3
-3
未找到文件。
mindspore/lite/tools/converter/converter.cc
浏览文件 @
7f53253b
...
...
@@ -90,7 +90,7 @@ MetaGraphT *Converter::Convert(const converter::Flags *flag) {
return
nullptr
;
}
//
graph = anfTransform->Transform(graph);
graph
=
anfTransform
->
Transform
(
graph
);
CreateQuantizer
(
graph
,
flag
);
if
(
mQuantizer
!=
nullptr
)
{
...
...
mindspore/lite/tools/converter/graphdef_transform.cc
浏览文件 @
7f53253b
...
...
@@ -100,20 +100,20 @@ int GraphDefTransform::Transform(const converter::Flags &ctx) {
// }
// fusion
{
Optimizer
fusionOptimizer
;
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
ConvBiasAddFusionPass
());
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
ConvBNFusionPass
());
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
ConvScaleFusionPass
());
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
ConvReluFusionPass
());
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
ConvRelu6FusionPass
());
fusionOptimizer
.
AddPass
(
new
(
std
::
nothrow
)
IsolatedNodeRemovePass
());
status
=
fusionOptimizer
.
Run
(
graphDefT
);
if
(
status
!=
RET_OK
&&
status
!=
RET_NO_CHANGE
)
{
MS_LOG
(
ERROR
)
<<
"Run fusionOptimizer graphPasses Failed"
;
return
status
;
}
}
//
{
//
Optimizer fusionOptimizer;
//
fusionOptimizer.AddPass(new (std::nothrow) ConvBiasAddFusionPass());
//
fusionOptimizer.AddPass(new (std::nothrow) ConvBNFusionPass());
//
fusionOptimizer.AddPass(new (std::nothrow) ConvScaleFusionPass());
//
fusionOptimizer.AddPass(new (std::nothrow) ConvReluFusionPass());
//
fusionOptimizer.AddPass(new (std::nothrow) ConvRelu6FusionPass());
//
fusionOptimizer.AddPass(new (std::nothrow) IsolatedNodeRemovePass());
//
status = fusionOptimizer.Run(graphDefT);
//
if (status != RET_OK && status != RET_NO_CHANGE) {
//
MS_LOG(ERROR) << "Run fusionOptimizer graphPasses Failed";
//
return status;
//
}
//
}
// weight format trans
if
(
ctx
.
formatTrans
)
{
...
...
mindspore/lite/tools/optimizer/fusion/conv_biasadd_fusion.cc
浏览文件 @
7f53253b
...
...
@@ -89,10 +89,10 @@ void GenConvNewBias(const FuncGraphPtr &func_graph, const CNodePtr &conv_node, c
auto
add_weight_param
=
bias_add_weight
->
cast
<
ParameterPtr
>
()
->
default_param
();
auto
add_weight_tensor
=
std
::
dynamic_pointer_cast
<
ParamValueLite
>
(
add_weight_param
);
auto
add_weight_data
=
reinterpret_cast
<
float
*>
(
add_weight_tensor
->
tensor_addr
());
if
(
add_weight_
tensor
->
tensor_shape
().
empty
(
))
{
if
(
EOK
!=
memset_s
(
add_bias_data
,
kernel_nums
*
sizeof
(
float
),
*
add_weight_data
,
kernel_nums
*
sizeof
(
float
))
)
{
MS_LOG
(
EXCEPTION
)
<<
"memset_s conv_bias_data failed"
;
auto
add_weight_shape
=
add_weight_tensor
->
tensor_shape
();
if
(
add_weight_
shape
.
empty
()
||
(
add_weight_shape
.
size
()
==
1
&&
add_weight_shape
[
0
]
==
1
))
{
for
(
size_t
i
=
0
;
i
<
kernel_nums
;
i
++
)
{
add_bias_data
[
i
]
=
*
add_weight_data
;
}
}
else
{
if
(
EOK
!=
memcpy_s
(
add_bias_data
,
kernel_nums
*
sizeof
(
float
),
add_weight_data
,
kernel_nums
*
sizeof
(
float
)))
{
...
...
mindspore/lite/tools/optimizer/fusion/conv_transform_fusion.cc
浏览文件 @
7f53253b
...
...
@@ -145,8 +145,8 @@ const {
// conv has bias,bias_flag true
bool
bias_flag
=
false
;
if
(
conv_bias_node
!=
nullptr
)
{
auto
bias_weight_param
=
conv_weight
_node
->
cast
<
ParameterPtr
>
()
->
default_param
();
auto
bias_tensor
=
std
::
dynamic_pointer_cast
<
ParamValueLite
>
(
bias_weight
_param
);
auto
conv_bias_param
=
conv_bias
_node
->
cast
<
ParameterPtr
>
()
->
default_param
();
auto
bias_tensor
=
std
::
dynamic_pointer_cast
<
ParamValueLite
>
(
conv_bias
_param
);
bias_data
=
reinterpret_cast
<
float
*>
(
bias_tensor
->
tensor_addr
());
bias_flag
=
true
;
}
else
{
...
...
@@ -187,7 +187,7 @@ const void ConvTransformFusion::CalNewBiasTensor(float *bias_data, int kernel_nu
MS_ASSERT
(
bias_data
!=
nullptr
);
if
(
bias_flag
)
{
auto
tmp_bias_data
=
new
(
std
::
nothrow
)
float
[
kernel_num
];
if
(
EOK
!=
memset_s
(
bias_data
,
kernel_num
*
sizeof
(
float
),
0
,
kernel_num
*
sizeof
(
float
)))
{
if
(
EOK
!=
memset_s
(
tmp_
bias_data
,
kernel_num
*
sizeof
(
float
),
0
,
kernel_num
*
sizeof
(
float
)))
{
MS_LOG
(
EXCEPTION
)
<<
"memset bias data failed"
;
}
for
(
size_t
i
=
0
;
i
<
kernel_num
;
i
++
)
{
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录