Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
慢慢CG
Mace
提交
fc7a469c
Mace
项目概览
慢慢CG
/
Mace
与 Fork 源项目一致
Fork自
Xiaomi / Mace
通知
1
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
Mace
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
提交
fc7a469c
编写于
1月 19, 2018
作者:
叶
叶剑武
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'activation' into 'master'
Add activations support See merge request !3
上级
c264d3b2
72bf66a9
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
31 addition
and
5 deletion
+31
-5
python/tools/tf_converter_lib.py
python/tools/tf_converter_lib.py
+31
-5
未找到文件。
python/tools/tf_converter_lib.py
浏览文件 @
fc7a469c
...
@@ -216,6 +216,9 @@ class TFConverter(object):
...
@@ -216,6 +216,9 @@ class TFConverter(object):
and
self
.
tf_graph
[
final_op
.
name
][
0
].
type
==
'Relu'
:
and
self
.
tf_graph
[
final_op
.
name
][
0
].
type
==
'Relu'
:
relu_op
=
self
.
tf_graph
[
final_op
.
name
][
0
]
relu_op
=
self
.
tf_graph
[
final_op
.
name
][
0
]
op_def
.
type
=
"FusedConv2D"
op_def
.
type
=
"FusedConv2D"
fused_relu_arg
=
op_def
.
arg
.
add
()
fused_relu_arg
.
name
=
'activation'
fused_relu_arg
.
s
=
"RELU"
final_op
=
relu_op
final_op
=
relu_op
self
.
resolved_ops
[
relu_op
.
name
]
=
1
self
.
resolved_ops
[
relu_op
.
name
]
=
1
...
@@ -273,8 +276,8 @@ class TFConverter(object):
...
@@ -273,8 +276,8 @@ class TFConverter(object):
relu_op
=
self
.
tf_graph
[
op
.
name
][
0
]
relu_op
=
self
.
tf_graph
[
op
.
name
][
0
]
final_op
=
relu_op
final_op
=
relu_op
fused_relu_arg
=
op_def
.
arg
.
add
()
fused_relu_arg
=
op_def
.
arg
.
add
()
fused_relu_arg
.
name
=
'
fused_relu
'
fused_relu_arg
.
name
=
'
activation
'
fused_relu_arg
.
i
=
1
fused_relu_arg
.
s
=
"RELU"
self
.
resolved_ops
[
relu_op
.
name
]
=
1
self
.
resolved_ops
[
relu_op
.
name
]
=
1
op_def
.
output
.
extend
([
final_op
.
outputs
[
0
].
name
])
op_def
.
output
.
extend
([
final_op
.
outputs
[
0
].
name
])
...
@@ -362,16 +365,34 @@ class TFConverter(object):
...
@@ -362,16 +365,34 @@ class TFConverter(object):
data_format_arg
.
s
=
'NHWC'
data_format_arg
.
s
=
'NHWC'
self
.
resolved_ops
[
op
.
name
]
=
1
self
.
resolved_ops
[
op
.
name
]
=
1
def
convert_relu
(
self
,
op
):
op_def
=
self
.
net_def
.
op
.
add
()
arg
=
op_def
.
arg
.
add
()
arg
.
name
=
'T'
arg
.
i
=
self
.
dt
op_def
.
name
=
op
.
name
op_def
.
type
=
'Activation'
activation_arg
=
op_def
.
arg
.
add
()
activation_arg
.
name
=
'activation'
activation_arg
.
s
=
"RELU"
op_def
.
input
.
extend
([
input
.
name
for
input
in
op
.
inputs
])
op_def
.
output
.
extend
([
output
.
name
for
output
in
op
.
outputs
])
self
.
add_output_shape
(
op
.
outputs
,
op_def
)
self
.
resolved_ops
[
op
.
name
]
=
1
def
convert_relu6
(
self
,
op
):
def
convert_relu6
(
self
,
op
):
op_def
=
self
.
net_def
.
op
.
add
()
op_def
=
self
.
net_def
.
op
.
add
()
arg
=
op_def
.
arg
.
add
()
arg
=
op_def
.
arg
.
add
()
arg
.
name
=
'T'
arg
.
name
=
'T'
arg
.
i
=
self
.
dt
arg
.
i
=
self
.
dt
op_def
.
name
=
op
.
name
op_def
.
name
=
op
.
name
op_def
.
type
=
'
Relu
'
op_def
.
type
=
'
Activation
'
op_def
.
input
.
extend
([
input
.
name
for
input
in
op
.
inputs
])
op_def
.
input
.
extend
([
input
.
name
for
input
in
op
.
inputs
])
op_def
.
output
.
extend
([
output
.
name
for
output
in
op
.
outputs
])
op_def
.
output
.
extend
([
output
.
name
for
output
in
op
.
outputs
])
self
.
add_output_shape
(
op
.
outputs
,
op_def
)
self
.
add_output_shape
(
op
.
outputs
,
op_def
)
activation_arg
=
op_def
.
arg
.
add
()
activation_arg
.
name
=
'activation'
activation_arg
.
s
=
"RELUX"
max_limit_arg
=
op_def
.
arg
.
add
()
max_limit_arg
=
op_def
.
arg
.
add
()
max_limit_arg
.
name
=
'max_limit'
max_limit_arg
.
name
=
'max_limit'
max_limit_arg
.
f
=
6
max_limit_arg
.
f
=
6
...
@@ -531,6 +552,9 @@ class TFConverter(object):
...
@@ -531,6 +552,9 @@ class TFConverter(object):
and
self
.
tf_graph
[
final_op
.
name
][
0
].
type
==
'Relu'
:
and
self
.
tf_graph
[
final_op
.
name
][
0
].
type
==
'Relu'
:
relu_op
=
self
.
tf_graph
[
final_op
.
name
][
0
]
relu_op
=
self
.
tf_graph
[
final_op
.
name
][
0
]
op_def
.
type
=
"FusedConv2D"
op_def
.
type
=
"FusedConv2D"
fused_relu_arg
=
op_def
.
arg
.
add
()
fused_relu_arg
.
name
=
'activation'
fused_relu_arg
.
s
=
"RELU"
final_op
=
relu_op
final_op
=
relu_op
self
.
resolved_ops
[
relu_op
.
name
]
=
1
self
.
resolved_ops
[
relu_op
.
name
]
=
1
...
@@ -602,6 +626,8 @@ class TFConverter(object):
...
@@ -602,6 +626,8 @@ class TFConverter(object):
self
.
convert_batchnorm
(
op
)
self
.
convert_batchnorm
(
op
)
elif
op
.
type
==
'AvgPool'
or
op
.
type
==
'MaxPool'
:
elif
op
.
type
==
'AvgPool'
or
op
.
type
==
'MaxPool'
:
self
.
convert_pooling
(
op
)
self
.
convert_pooling
(
op
)
elif
op
.
type
==
'Relu'
:
self
.
convert_relu
(
op
)
elif
op
.
type
==
'Relu6'
:
elif
op
.
type
==
'Relu6'
:
self
.
convert_relu6
(
op
)
self
.
convert_relu6
(
op
)
elif
op
.
type
==
'Add'
:
elif
op
.
type
==
'Add'
:
...
@@ -618,8 +644,8 @@ class TFConverter(object):
...
@@ -618,8 +644,8 @@ class TFConverter(object):
self
.
convert_space_to_batch
(
op
,
True
)
self
.
convert_space_to_batch
(
op
,
True
)
elif
self
.
is_softmax
(
op
):
elif
self
.
is_softmax
(
op
):
self
.
convert_softmax
(
op
)
self
.
convert_softmax
(
op
)
elif
op
.
type
in
[
'Relu
'
]:
#elif op.type in ['
']:
self
.
convert_normal_op
(
op
)
#
self.convert_normal_op(op)
else
:
else
:
raise
Exception
(
'Unknown Op: %s, type: %s'
%
(
op
.
name
,
op
.
type
))
raise
Exception
(
'Unknown Op: %s, type: %s'
%
(
op
.
name
,
op
.
type
))
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录