Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
eba71774
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
eba71774
编写于
8月 17, 2018
作者:
T
tangwei12
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add unit test and code fix
上级
ed937bc6
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
55 addition
and
7 deletion
+55
-7
paddle/fluid/API.spec
paddle/fluid/API.spec
+1
-1
python/paddle/fluid/io.py
python/paddle/fluid/io.py
+5
-6
python/paddle/fluid/tests/unittests/test_dist_transpiler.py
python/paddle/fluid/tests/unittests/test_dist_transpiler.py
+49
-0
未找到文件。
paddle/fluid/API.spec
浏览文件 @
eba71774
...
@@ -77,8 +77,8 @@ paddle.fluid.io.save_persistables ArgSpec(args=['executor', 'dirname', 'main_pro
...
@@ -77,8 +77,8 @@ paddle.fluid.io.save_persistables ArgSpec(args=['executor', 'dirname', 'main_pro
paddle.fluid.io.load_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.io.load_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None))
paddle.fluid.io.load_params ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_params ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_persistables ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_persistables ArgSpec(args=['executor', 'dirname', 'main_program', 'filename'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename', 'pserver_endpoints'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.save_inference_model ArgSpec(args=['dirname', 'feeded_var_names', 'target_vars', 'executor', 'main_program', 'model_filename', 'params_filename', 'export_for_deployment'], varargs=None, keywords=None, defaults=(None, None, None, True))
paddle.fluid.io.save_inference_model ArgSpec(args=['dirname', 'feeded_var_names', 'target_vars', 'executor', 'main_program', 'model_filename', 'params_filename', 'export_for_deployment'], varargs=None, keywords=None, defaults=(None, None, None, True))
paddle.fluid.io.load_inference_model ArgSpec(args=['dirname', 'executor', 'model_filename', 'params_filename', 'pserver_endpoints'], varargs=None, keywords=None, defaults=(None, None))
paddle.fluid.io.get_inference_program ArgSpec(args=['target_vars', 'main_program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.io.get_inference_program ArgSpec(args=['target_vars', 'main_program'], varargs=None, keywords=None, defaults=(None,))
paddle.fluid.initializer.ConstantInitializer.__init__ ArgSpec(args=['self', 'value', 'force_cpu'], varargs=None, keywords=None, defaults=(0.0, False))
paddle.fluid.initializer.ConstantInitializer.__init__ ArgSpec(args=['self', 'value', 'force_cpu'], varargs=None, keywords=None, defaults=(0.0, False))
paddle.fluid.initializer.UniformInitializer.__init__ ArgSpec(args=['self', 'low', 'high', 'seed'], varargs=None, keywords=None, defaults=(-1.0, 1.0, 0))
paddle.fluid.initializer.UniformInitializer.__init__ ArgSpec(args=['self', 'low', 'high', 'seed'], varargs=None, keywords=None, defaults=(-1.0, 1.0, 0))
...
...
python/paddle/fluid/io.py
浏览文件 @
eba71774
...
@@ -672,12 +672,11 @@ def save_inference_model(dirname,
...
@@ -672,12 +672,11 @@ def save_inference_model(dirname,
save_persistables
(
executor
,
dirname
,
inference_program
,
params_filename
)
save_persistables
(
executor
,
dirname
,
inference_program
,
params_filename
)
# if there is lookup table, the trainer 0 will notify all pserver to save.
# if there is lookup table, the trainer 0 will notify all pserver to save.
if
main_program
.
_is_distributed
and
main_program
.
_is_chief
:
if
main_program
.
_is_distributed
and
main_program
.
_is_chief
and
main_program
.
_distributed_lookup_table
:
if
main_program
.
_distributed_lookup_table
:
lookup_table_filename
=
os
.
path
.
join
(
dirname
,
"__lookup_table__"
)
lookup_table_filename
=
os
.
path
.
join
(
dirname
,
"__lookup_table__"
)
_save_lookup_tables_by_notify
(
_save_lookup_tables_by_notify
(
executor
,
lookup_table_filename
,
executor
,
lookup_table_filenam
e
,
main_program
.
_distributed_lookup_tabl
e
,
main_program
.
_distributed_lookup_table
,
main_program
.
_endpoints
)
main_program
.
_endpoints
)
def
load_inference_model
(
dirname
,
def
load_inference_model
(
dirname
,
...
...
python/paddle/fluid/tests/unittests/test_dist_transpiler.py
浏览文件 @
eba71774
...
@@ -536,6 +536,19 @@ class TestAsyncDistLookupTable(TestDistLookupTableBase):
...
@@ -536,6 +536,19 @@ class TestAsyncDistLookupTable(TestDistLookupTableBase):
self
.
assertEqual
([
op
.
type
for
op
in
trainer
.
blocks
[
0
].
ops
],
ops
)
self
.
assertEqual
([
op
.
type
for
op
in
trainer
.
blocks
[
0
].
ops
],
ops
)
class
TestDistArgsInProgram
(
TestDistLookupTableBase
):
def
net_conf
(
self
):
self
.
network_with_table
(
is_sparse
=
True
,
is_distributed
=
True
)
def
transpiler_test_impl
(
self
):
config
=
fluid
.
DistributeTranspilerConfig
()
pserver1
,
_
=
self
.
get_pserver
(
self
.
pserver1_ep
,
config
,
False
)
self
.
assertTrue
(
pserver1
.
_is_chief
)
self
.
assertTrue
(
pserver1
.
_is_distributed
)
self
.
assertEqual
(
pserver1
.
_distributed_lookup_table
)
class
TestRMSPropOptimizer
(
TranspilerTest
):
class
TestRMSPropOptimizer
(
TranspilerTest
):
def
net_conf
(
self
):
def
net_conf
(
self
):
x
=
fluid
.
layers
.
data
(
name
=
'x'
,
shape
=
[
1000
],
dtype
=
'float32'
)
x
=
fluid
.
layers
.
data
(
name
=
'x'
,
shape
=
[
1000
],
dtype
=
'float32'
)
...
@@ -566,5 +579,41 @@ class TestRMSPropOptimizer(TranspilerTest):
...
@@ -566,5 +579,41 @@ class TestRMSPropOptimizer(TranspilerTest):
self
.
assertEqual
(
moment_var
.
shape
,
(
500
,
1000
))
self
.
assertEqual
(
moment_var
.
shape
,
(
500
,
1000
))
class
TestLoadSliceVar
(
TranspilerTest
):
def
net_conf
(
self
):
x
=
fluid
.
layers
.
data
(
name
=
'x'
,
shape
=
[
1000
],
dtype
=
'float32'
)
y_predict
=
fluid
.
layers
.
fc
(
input
=
x
,
size
=
1000
,
act
=
None
,
param_attr
=
fluid
.
ParamAttr
(
name
=
'fc_w'
),
bias_attr
=
fluid
.
ParamAttr
(
name
=
'fc_b'
))
y
=
fluid
.
layers
.
data
(
name
=
'y'
,
shape
=
[
1
],
dtype
=
'float32'
)
cost
=
fluid
.
layers
.
square_error_cost
(
input
=
y_predict
,
label
=
y
)
avg_cost
=
fluid
.
layers
.
mean
(
cost
)
optimizer
=
fluid
.
optimizer
.
RMSProp
(
learning_rate
=
0.1
)
optimizer
.
minimize
(
avg_cost
)
return
def
transpiler_test_impl
(
self
):
pserver
,
_
=
self
.
get_pserver
(
self
.
pserver1_ep
)
pserver2
,
_
=
self
.
get_pserver
(
self
.
pserver2_ep
)
self
.
assertTrue
(
pserver
.
_slice_vars_and_atts
)
self
.
assertTrue
(
pserver2
.
_slice_vars_and_atts
)
for
idx
in
xrange
(
len
(
pserver
.
_slice_vars_and_atts
)):
self
.
assertEqual
(
pserver
.
_slice_vars_and_atts
[
idx
][
0
],
pserver2
.
_slice_vars_and_atts
[
idx
][
0
])
total_numel
=
reduce
(
lambda
x
,
y
:
x
*
y
,
pserver
.
_slice_vars_and_atts
[
idx
][
0
].
shape
)
self
.
assertEqual
(
total_numel
,
reduce
(
lambda
x
,
y
:
x
*
y
,
pserver
.
_slice_vars_and_atts
[
idx
][
2
].
shape
)
+
reduce
(
lambda
x
,
y
:
x
*
y
,
pserver2
.
_slice_vars_and_atts
[
idx
][
2
].
shape
))
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
unittest
.
main
()
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录