Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
5d579e1a
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
5d579e1a
编写于
8月 14, 2018
作者:
Q
Qiao Longfei
提交者:
GitHub
8月 14, 2018
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add export_for_deployment flag to save_inference_model (#12582)
add export_for_deployment flag to save_inference_model
上级
83c85f34
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
59 addition
and
16 deletion
+59
-16
paddle/fluid/API.spec
paddle/fluid/API.spec
+2
-2
python/paddle/fluid/framework.py
python/paddle/fluid/framework.py
+17
-12
python/paddle/fluid/io.py
python/paddle/fluid/io.py
+6
-2
python/paddle/fluid/tests/unittests/test_program.py
python/paddle/fluid/tests/unittests/test_program.py
+34
-0
未找到文件。
paddle/fluid/API.spec
浏览文件 @
5d579e1a
...
@@ -6,7 +6,7 @@ paddle.fluid.Program.create_block ArgSpec(args=['self', 'parent_idx'], varargs=N
...
@@ -6,7 +6,7 @@ paddle.fluid.Program.create_block ArgSpec(args=['self', 'parent_idx'], varargs=N
paddle.fluid.Program.current_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.current_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.get_desc ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.get_desc ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.global_block ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.inference_optimize ArgSpec(args=['self'
], varargs=None, keywords=None, defaults=None
)
paddle.fluid.Program.inference_optimize ArgSpec(args=['self'
, 'export_for_deployment'], varargs=None, keywords=None, defaults=(True,)
)
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.list_vars ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.optimized_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.Program.optimized_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
paddle.fluid.Program.parse_from_string ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None)
...
@@ -74,7 +74,7 @@ paddle.fluid.io.save_persistables ArgSpec(args=['executor', 'dirname', 'main_pro
...
@@ -74,7 +74,7 @@ paddle.fluid.io.save_persistables ArgSpec(args=['executor', 'dirname', 'main_pro
paddle.fluid.io.load_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.io.load_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.io.load_params ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_params ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_persistables ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_persistables ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.save_inference_model ArgSpec(args=['dirname', 'feeded_var_names', 'target_vars', 'executor', 'main_program', 'model_filename', 'params_filename'
], varargs=None, keywords=None, defaults=(None, None, Non
e))
paddle.fluid.io.save_inference_model ArgSpec(args=['dirname', 'feeded_var_names', 'target_vars', 'executor', 'main_program', 'model_filename', 'params_filename'
, 'export_for_deployment'], varargs=None, keywords=None, defaults=(None, None, None, Tru
e))
paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.get_inference_program ArgSpec(args=['target_vars', 'main_program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.io.get_inference_program ArgSpec(args=['target_vars', 'main_program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.initializer.ConstantInitializer.__init__ ArgSpec(args=['self', 'value', 'force_cpu'], varargs=None, keywords=None, defaults=(0.0, False))
paddle.fluid.initializer.ConstantInitializer.__init__ ArgSpec(args=['self', 'value', 'force_cpu'], varargs=None, keywords=None, defaults=(0.0, False))
...
...
python/paddle/fluid/framework.py
浏览文件 @
5d579e1a
...
@@ -1518,7 +1518,7 @@ class Program(object):
...
@@ -1518,7 +1518,7 @@ class Program(object):
The two code snippets above will generate same programs.
The two code snippets above will generate same programs.
"""
"""
if
for_test
:
if
for_test
:
p
=
self
.
inference_optimize
()
p
=
self
.
inference_optimize
(
export_for_deployment
=
False
)
else
:
else
:
p
=
Program
()
p
=
Program
()
p
.
desc
=
core
.
ProgramDesc
(
self
.
desc
)
p
.
desc
=
core
.
ProgramDesc
(
self
.
desc
)
...
@@ -1578,7 +1578,7 @@ class Program(object):
...
@@ -1578,7 +1578,7 @@ class Program(object):
res
.
_sync_with_cpp
()
res
.
_sync_with_cpp
()
return
res
return
res
def
inference_optimize
(
self
):
def
inference_optimize
(
self
,
export_for_deployment
=
True
):
"""
"""
This method will create a new program and do following adjustments on it:
This method will create a new program and do following adjustments on it:
1. Remove all reader variables and their creator ops if exist.
1. Remove all reader variables and their creator ops if exist.
...
@@ -1589,6 +1589,10 @@ class Program(object):
...
@@ -1589,6 +1589,10 @@ class Program(object):
attribute of operators to :code:`True`. All the :code:`Parameter`
attribute of operators to :code:`True`. All the :code:`Parameter`
information will be lost.
information will be lost.
Args:
export_for_deployment(bool): remove the read ops that are added by py_reader
for cpp inference library
Notes: This API is a very low level API. Use
Notes: This API is a very low level API. Use
:code:`Program.clone(for_test=True)` instead.
:code:`Program.clone(for_test=True)` instead.
...
@@ -1603,16 +1607,17 @@ class Program(object):
...
@@ -1603,16 +1607,17 @@ class Program(object):
# remove all readers and the read_op if exist
# remove all readers and the read_op if exist
read_op_idx
=
0
read_op_idx
=
0
root_block
=
res
.
desc
.
block
(
0
)
root_block
=
res
.
desc
.
block
(
0
)
while
True
:
if
export_for_deployment
:
if
read_op_idx
>=
root_block
.
op_size
()
or
root_block
.
op
(
while
True
:
read_op_idx
).
type
()
==
'read'
:
if
read_op_idx
>=
root_block
.
op_size
()
or
root_block
.
op
(
break
read_op_idx
).
type
()
==
'read'
:
read_op_idx
+=
1
break
if
read_op_idx
<
root_block
.
op_size
():
read_op_idx
+=
1
root_block
.
_remove_op
(
0
,
read_op_idx
+
1
)
if
read_op_idx
<
root_block
.
op_size
():
for
var
in
root_block
.
all_vars
():
root_block
.
_remove_op
(
0
,
read_op_idx
+
1
)
if
var
.
type
()
==
core
.
VarDesc
.
VarType
.
READER
:
for
var
in
root_block
.
all_vars
():
root_block
.
_remove_var
(
var
.
name
())
if
var
.
type
()
==
core
.
VarDesc
.
VarType
.
READER
:
root_block
.
_remove_var
(
var
.
name
())
# change all `is_test` attributes to True
# change all `is_test` attributes to True
for
i
in
range
(
res
.
desc
.
num_blocks
()):
for
i
in
range
(
res
.
desc
.
num_blocks
()):
...
...
python/paddle/fluid/io.py
浏览文件 @
5d579e1a
...
@@ -555,7 +555,8 @@ def save_inference_model(dirname,
...
@@ -555,7 +555,8 @@ def save_inference_model(dirname,
executor
,
executor
,
main_program
=
None
,
main_program
=
None
,
model_filename
=
None
,
model_filename
=
None
,
params_filename
=
None
):
params_filename
=
None
,
export_for_deployment
=
True
):
"""
"""
Prune the given `main_program` to build a new program especially for inference,
Prune the given `main_program` to build a new program especially for inference,
and then save it and all related parameters to given `dirname` by the `executor`.
and then save it and all related parameters to given `dirname` by the `executor`.
...
@@ -577,6 +578,8 @@ def save_inference_model(dirname,
...
@@ -577,6 +578,8 @@ def save_inference_model(dirname,
params_filename(str|None): The name of file to save all related parameters.
params_filename(str|None): The name of file to save all related parameters.
If it is setted None, parameters will be saved
If it is setted None, parameters will be saved
in separate files .
in separate files .
export_for_deployment(bool): remove the read ops that are added by py_reader
for cpp inference lib. Default True
Returns:
Returns:
None
None
...
@@ -643,7 +646,8 @@ def save_inference_model(dirname,
...
@@ -643,7 +646,8 @@ def save_inference_model(dirname,
copy_program
.
desc
.
flush
()
copy_program
.
desc
.
flush
()
pruned_program
=
copy_program
.
prune
(
targets
=
target_vars
)
pruned_program
=
copy_program
.
prune
(
targets
=
target_vars
)
inference_program
=
pruned_program
.
inference_optimize
()
inference_program
=
pruned_program
.
inference_optimize
(
export_for_deployment
=
export_for_deployment
)
fetch_var_names
=
[
v
.
name
for
v
in
target_vars
]
fetch_var_names
=
[
v
.
name
for
v
in
target_vars
]
prepend_feed_ops
(
inference_program
,
feeded_var_names
)
prepend_feed_ops
(
inference_program
,
feeded_var_names
)
...
...
python/paddle/fluid/tests/unittests/test_program.py
浏览文件 @
5d579e1a
...
@@ -17,6 +17,7 @@ import unittest
...
@@ -17,6 +17,7 @@ import unittest
from
paddle.fluid.framework
import
Program
,
default_main_program
,
program_guard
,
grad_var_name
from
paddle.fluid.framework
import
Program
,
default_main_program
,
program_guard
,
grad_var_name
import
paddle.fluid.layers
as
layers
import
paddle.fluid.layers
as
layers
import
paddle.fluid
as
fluid
main_program
=
default_main_program
()
main_program
=
default_main_program
()
...
@@ -98,6 +99,39 @@ class TestProgram(unittest.TestCase):
...
@@ -98,6 +99,39 @@ class TestProgram(unittest.TestCase):
new_program
=
main_program
.
clone
()
new_program
=
main_program
.
clone
()
self
.
assertNotEqual
(
0
,
len
(
new_program
.
blocks
[
0
].
all_parameters
()))
self
.
assertNotEqual
(
0
,
len
(
new_program
.
blocks
[
0
].
all_parameters
()))
def
test_program_inference_optimize
(
self
):
def
net
():
reader
=
fluid
.
layers
.
py_reader
(
capacity
=
10
,
shapes
=
[[
-
1
,
10
],
[
-
1
,
1
]],
lod_levels
=
[
0
,
0
],
dtypes
=
[
'float32'
,
'int64'
],
use_double_buffer
=
True
)
in_data
,
label
=
fluid
.
layers
.
read_file
(
reader
)
predict_label
=
fluid
.
layers
.
fc
(
in_data
,
size
=
2
,
act
=
'softmax'
)
loss
=
fluid
.
layers
.
mean
(
fluid
.
layers
.
cross_entropy
(
input
=
predict_label
,
label
=
label
))
optimizer
=
fluid
.
optimizer
.
Adam
()
optimizer
.
minimize
(
loss
)
startup_program
=
fluid
.
Program
()
main_program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
main_program
,
startup_program
):
net
()
no_read_program
=
main_program
.
inference_optimize
()
keep_read_program
=
main_program
.
inference_optimize
(
export_for_deployment
=
False
)
no_read_ops
=
no_read_program
.
global_block
().
ops
keep_read_ops
=
keep_read_program
.
global_block
().
ops
self
.
assertEqual
(
len
(
keep_read_ops
)
-
len
(
no_read_ops
),
2
)
self
.
assertEqual
(
keep_read_ops
[
0
].
type
,
'create_double_buffer_reader'
)
self
.
assertEqual
(
keep_read_ops
[
1
].
type
,
'read'
)
for
i
in
range
(
len
(
no_read_ops
)):
self
.
assertEqual
(
no_read_ops
[
i
].
type
,
keep_read_ops
[
i
+
2
].
type
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
unittest
.
main
()
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录