Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
c0dabbac
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
c0dabbac
编写于
8月 01, 2019
作者:
J
jiangjiajun
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add save inference model func
上级
63a3bd17
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
110 addition
and
31 deletion
+110
-31
x2paddle/convert.py
x2paddle/convert.py
+2
-4
x2paddle/core/graph.py
x2paddle/core/graph.py
+7
-0
x2paddle/core/op_mapper.py
x2paddle/core/op_mapper.py
+76
-7
x2paddle/core/util.py
x2paddle/core/util.py
+8
-3
x2paddle/decoder/tf_decoder.py
x2paddle/decoder/tf_decoder.py
+4
-0
x2paddle/op_mapper/caffe_op_mapper.py
x2paddle/op_mapper/caffe_op_mapper.py
+5
-5
x2paddle/op_mapper/tf_op_mapper.py
x2paddle/op_mapper/tf_op_mapper.py
+8
-12
未找到文件。
x2paddle/convert.py
浏览文件 @
c0dabbac
...
...
@@ -58,8 +58,7 @@ def tf2paddle(model_path, save_dir):
print
(
"Now translating model from tensorflow to paddle."
)
model
=
TFDecoder
(
model_path
)
mapper
=
TFOpMapper
(
model
)
mapper
.
run
()
mapper
.
save_python_model
(
save_dir
)
mapper
.
save_inference_model
(
save_dir
)
def
caffe2paddle
(
proto
,
weight
,
save_dir
,
caffe_proto
):
...
...
@@ -69,8 +68,7 @@ def caffe2paddle(proto, weight, save_dir, caffe_proto):
print
(
"Now translating model from caffe to paddle."
)
model
=
CaffeDecoder
(
proto
,
weight
,
caffe_proto
)
mapper
=
CaffeOpMapper
(
model
)
mapper
.
run
()
mapper
.
save_python_model
(
save_dir
)
mapper
.
save_inference_model
(
save_dir
)
def
main
():
...
...
x2paddle/core/graph.py
浏览文件 @
c0dabbac
...
...
@@ -113,6 +113,13 @@ class Graph(object):
idx
=
self
.
topo_sort
.
index
(
node_name
)
del
self
.
topo_sort
[
idx
]
if
node_name
in
self
.
input_nodes
:
idx
=
self
.
input_nodes
.
index
(
node_name
)
del
self
.
input_nodes
[
idx
]
if
node_name
in
self
.
output_nodes
:
idx
=
self
.
output_nodes
.
index
(
node_name
)
del
self
.
output_nodes
[
idx
]
def
print
(
self
):
for
i
,
tmp
in
enumerate
(
self
.
topo_sort
):
print
(
tmp
,
self
.
node_map
[
tmp
].
layer_type
,
self
.
node_map
[
tmp
].
inputs
,
...
...
x2paddle/core/op_mapper.py
浏览文件 @
c0dabbac
...
...
@@ -23,6 +23,8 @@ class OpMapper(object):
self
.
tab
=
" "
self
.
net_code
=
list
()
self
.
weights
=
dict
()
self
.
inputs
=
list
()
self
.
outputs
=
list
()
def
op_checker
(
self
):
unsupported_ops
=
set
()
...
...
@@ -56,16 +58,83 @@ class OpMapper(object):
self
.
add_codes
(
"import paddle.fluid as fluid"
)
self
.
add_codes
(
""
)
def
save_inference_model
(
self
):
print
(
"Not Implement"
)
def
save_inference_model
(
self
,
save_dir
):
self
.
save_python_model
(
save_dir
)
import
sys
import
paddle.fluid
as
fluid
py_code_dir
=
os
.
path
.
join
(
save_dir
,
"model_with_code"
)
sys
.
path
.
append
(
py_code_dir
)
import
model
try
:
inputs
,
outputs
=
model
.
x2paddle_net
()
input_names
=
[
input
.
name
for
input
in
inputs
]
exe
=
fluid
.
Executor
(
fluid
.
CPUPlace
())
exe
.
run
(
fluid
.
default_startup_program
())
def
if_exist
(
var
):
b
=
os
.
path
.
exists
(
os
.
path
.
join
(
os
.
path
.
join
(
save_dir
,
var
.
name
)))
return
b
fluid
.
io
.
load_vars
(
exe
,
save_dir
,
fluid
.
default_main_program
(),
predicate
=
if_exist
)
fluid
.
io
.
save_inference_model
(
dirname
=
os
.
path
.
join
(
save_dir
,
"inference_model"
),
feeded_var_names
=
input_names
,
target_vars
=
outputs
,
executor
=
exe
,
params_filename
=
"__params__"
)
except
:
raise
Exception
(
"Paddle code was saved in {}/model.py, but seems there's wrong exist, please check model.py manually."
.
format
(
py_code_dir
))
def
save_python_model
(
self
,
save_dir
):
if
not
os
.
path
.
exists
(
save_dir
):
os
.
makedirs
(
save_dir
)
py_code_dir
=
os
.
path
.
join
(
save_dir
,
"model_with_code"
)
if
not
os
.
path
.
exists
(
py_code_dir
):
os
.
makedirs
(
py_code_dir
)
for
name
,
param
in
self
.
weights
.
items
():
export_paddle_param
(
param
,
name
,
sav
e_dir
)
export_paddle_param
(
param
,
name
,
py_cod
e_dir
)
self
.
add_heads
()
self
.
add_codes
(
self
.
net_code
)
self
.
add_codes
(
""
)
self
.
add_codes
(
inspect
.
getsourcelines
(
init_net
)[
0
])
fp
=
open
(
os
.
path
.
join
(
save_dir
,
"model.py"
),
'w'
)
if
hasattr
(
self
,
"used_custom_layers"
):
for
_
,
layer_code
in
self
.
used_custom_layers
.
items
():
self
.
add_codes
(
layer_code
,
0
)
self
.
add_codes
(
"
\n
def x2paddle_net():"
,
0
)
for
i
in
range
(
len
(
self
.
graph
.
topo_sort
)):
node_name
=
self
.
graph
.
topo_sort
[
i
]
if
hasattr
(
self
,
"omit_nodes"
)
and
node_name
in
self
.
omit_nodes
:
continue
node
=
self
.
graph
.
get_node
(
node_name
)
self
.
add_codes
(
node
.
fluid_code
.
gen_codes
(),
1
)
self
.
add_codes
(
""
,
0
)
input_str
=
"["
for
name
in
self
.
graph
.
input_nodes
:
input_str
+=
(
name
+
", "
)
input_str
=
input_str
.
strip
(
", "
)
+
"]"
output_str
=
"["
for
name
in
self
.
graph
.
output_nodes
:
output_str
+=
(
name
+
", "
)
output_str
=
output_str
.
strip
(
", "
)
+
"]"
return_code
=
"return {}, {}"
.
format
(
input_str
,
output_str
)
self
.
add_codes
(
return_code
,
1
)
self
.
add_codes
(
""
,
0
)
self
.
add_codes
(
inspect
.
getsourcelines
(
run_net
)[
0
])
fp
=
open
(
os
.
path
.
join
(
py_code_dir
,
"model.py"
),
'w'
)
fp
.
write
(
self
.
paddle_codes
)
fp
.
close
()
x2paddle/core/util.py
浏览文件 @
c0dabbac
...
...
@@ -55,8 +55,6 @@ def export_paddle_param(param, param_name, dir):
assert
param
.
size
==
1
,
"Unexpected situation happend!"
shape
=
[
1
]
assert
str
(
param
.
dtype
)
in
dtype_map
,
"Unknown dtype of params."
if
not
os
.
path
.
exists
(
dir
):
os
.
makedirs
(
dir
)
fp
=
open
(
os
.
path
.
join
(
dir
,
param_name
),
'wb'
)
numpy
.
array
([
0
],
dtype
=
'int32'
).
tofile
(
fp
)
...
...
@@ -72,8 +70,9 @@ def export_paddle_param(param, param_name, dir):
fp
.
close
()
def
init
_net
(
param_dir
=
"./"
):
def
run
_net
(
param_dir
=
"./"
):
import
os
inputs
,
outputs
=
x2paddle_net
()
exe
=
fluid
.
Executor
(
fluid
.
CUDAPlace
(
0
))
exe
.
run
(
fluid
.
default_startup_program
())
...
...
@@ -85,3 +84,9 @@ def init_net(param_dir="./"):
param_dir
,
fluid
.
default_main_program
(),
predicate
=
if_exist
)
fluid
.
io
.
save_inference_model
(
dirname
=
'inference_model'
,
feeded_var_names
=
[
i
.
name
for
i
in
inputs
],
target_vars
=
outputs
,
executor
=
exe
,
params_filename
=
"__params__"
)
x2paddle/decoder/tf_decoder.py
浏览文件 @
c0dabbac
...
...
@@ -163,6 +163,10 @@ class TFGraph(Graph):
idx
=
self
.
topo_sort
.
index
(
node_name
)
del
self
.
topo_sort
[
idx
]
if
node_name
in
self
.
output_nodes
:
idx
=
self
.
output_nodes
.
index
(
node_name
)
self
.
output_nodes
[
idx
]
=
input_node
.
layer_name
class
TFDecoder
(
object
):
def
__init__
(
self
,
pb_model
):
...
...
x2paddle/op_mapper/caffe_op_mapper.py
浏览文件 @
c0dabbac
...
...
@@ -26,7 +26,7 @@ class CaffeOpMapper(OpMapper):
self
.
graph
=
decoder
.
caffe_graph
self
.
weights
=
dict
()
resolver
=
decoder
.
resolver
self
.
my
layers
=
{}
self
.
used_custom_
layers
=
{}
self
.
inputs
=
self
.
graph
.
input_nodes
self
.
outputs
=
self
.
graph
.
output_nodes
if
resolver
.
has_pycaffe
():
...
...
@@ -67,8 +67,8 @@ class CaffeOpMapper(OpMapper):
self
.
deal_custom_layer
(
node
)
else
:
raise
Exception
(
"Model are not supported yet."
)
for
key
in
self
.
my
layers
:
self
.
net_code
.
append
(
self
.
my
layers
[
key
])
for
key
in
self
.
used_custom_
layers
:
self
.
net_code
.
append
(
self
.
used_custom_
layers
[
key
])
for
i
in
range
(
len
(
self
.
graph
.
topo_sort
)):
node_name
=
self
.
graph
.
topo_sort
[
i
]
...
...
@@ -1050,5 +1050,5 @@ class CaffeOpMapper(OpMapper):
output
=
node
,
param_attr
=
kwargs
,
is_custom_layer
=
True
)
if
op
not
in
self
.
my
layers
:
self
.
my
layers
[
op
]
=
custom_code
if
op
not
in
self
.
used_custom_
layers
:
self
.
used_custom_
layers
[
op
]
=
custom_code
x2paddle/op_mapper/tf_op_mapper.py
浏览文件 @
c0dabbac
...
...
@@ -51,12 +51,15 @@ class TFOpMapper(OpMapper):
self
.
weights
=
dict
()
self
.
omit_nodes
=
list
()
def
run
(
self
):
print
(
"Total nodes: {}"
.
format
(
len
(
self
.
graph
.
topo_sort
)))
# check if ops in model are all supported
# TODO
not_placeholder
=
list
()
for
name
in
self
.
graph
.
input_nodes
:
if
self
.
graph
.
get_node
(
name
).
layer_type
!=
"Placeholder"
:
not_placeholder
.
append
(
name
)
for
name
in
not_placeholder
:
idx
=
self
.
graph
.
input_nodes
.
index
(
name
)
del
self
.
graph
.
input_nodes
[
idx
]
print
(
"Total nodes: {}"
.
format
(
len
(
self
.
graph
.
topo_sort
)))
for
node_name
in
self
.
graph
.
topo_sort
:
node
=
self
.
graph
.
get_node
(
node_name
)
op
=
node
.
layer_type
...
...
@@ -70,13 +73,6 @@ class TFOpMapper(OpMapper):
else
:
raise
Exception
(
"OP: [{}] not support yet"
.
format
(
op
))
for
i
in
range
(
len
(
self
.
graph
.
topo_sort
)):
node_name
=
self
.
graph
.
topo_sort
[
i
]
if
node_name
in
self
.
omit_nodes
:
continue
node
=
self
.
graph
.
get_node
(
node_name
)
self
.
net_code
+=
node
.
fluid_code
.
gen_codes
()
def
directly_map
(
self
,
node
):
assert
node
.
layer_type
in
self
.
directly_map_ops
op_info
=
self
.
directly_map_ops
[
node
.
layer_type
]
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录