Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
bc4f16ca
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
bc4f16ca
编写于
12月 17, 2018
作者:
D
dongdaxiang
提交者:
heqiaozhi
12月 17, 2018
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
remove some comments
上级
921b7f45
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
3 addition
and
14 deletion
+3
-14
paddle/fluid/API.spec
paddle/fluid/API.spec
+2
-2
python/paddle/fluid/distributed/helper.py
python/paddle/fluid/distributed/helper.py
+1
-5
python/paddle/fluid/distributed/node.py
python/paddle/fluid/distributed/node.py
+0
-4
python/paddle/fluid/distributed/ps_instance.py
python/paddle/fluid/distributed/ps_instance.py
+0
-3
未找到文件。
paddle/fluid/API.spec
浏览文件 @
bc4f16ca
...
...
@@ -37,14 +37,14 @@ paddle.fluid.DataFeedDesc.desc ArgSpec(args=['self'], varargs=None, keywords=Non
paddle.fluid.DataFeedDesc.set_batch_size ArgSpec(args=['self', 'batch_size'], varargs=None, keywords=None, defaults=None)
paddle.fluid.DataFeedDesc.set_dense_slots ArgSpec(args=['self', 'dense_slots_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.DataFeedDesc.set_use_slots ArgSpec(args=['self', 'use_slots_name'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.__init__ ArgSpec(args=['self', 'place'
], varargs=None, keywords=None, defaults=(None,
))
paddle.fluid.AsyncExecutor.__init__ ArgSpec(args=['self', 'place'
, 'run_mode'], varargs=None, keywords=None, defaults=(None, ''
))
paddle.fluid.AsyncExecutor.config_distributed_nodes ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.download_data ArgSpec(args=['self', 'afs_path', 'local_path', 'fs_default_name', 'ugi', 'file_cnt', 'hadoop_home', 'process_num'], varargs=None, keywords=None, defaults=('$HADOOP_HOME', 12))
paddle.fluid.AsyncExecutor.get_instance ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.init_model ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.init_server ArgSpec(args=['self', 'dist_desc'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.init_worker ArgSpec(args=['self', 'dist_desc', 'startup_program'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.run ArgSpec(args=['self', 'program', 'data_feed', 'filelist', 'thread_num', 'fetch', '
debug'], varargs=None, keywords=None, defaults=(False,
))
paddle.fluid.AsyncExecutor.run ArgSpec(args=['self', 'program', 'data_feed', 'filelist', 'thread_num', 'fetch', '
mode', 'debug'], varargs=None, keywords=None, defaults=('', False
))
paddle.fluid.AsyncExecutor.save_model ArgSpec(args=['self', 'save_path'], varargs=None, keywords=None, defaults=None)
paddle.fluid.AsyncExecutor.stop ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
paddle.fluid.io.save_vars ArgSpec(args=['executor', 'dirname', 'main_program', 'vars', 'predicate', 'filename'], varargs=None, keywords=None, defaults=(None, None, None, None))
...
...
python/paddle/fluid/distributed/helper.py
浏览文件 @
bc4f16ca
...
...
@@ -28,7 +28,7 @@ class FileSystem(object):
def
__init__
(
self
,
fs_type
=
"afs"
,
uri
=
"afs://
tianqi.afs.baidu.com:9902
"
,
uri
=
"afs://
xx
"
,
user
=
None
,
passwd
=
None
,
hadoop_bin
=
""
):
...
...
@@ -37,10 +37,6 @@ class FileSystem(object):
assert
hadoop_bin
!=
None
import
ps_pb2
as
pslib
self
.
fs_client
=
pslib
.
FsClientParameter
()
#if fs_type == "afs":
# fs_client.fs_type = pslib.FsApiType.AFS
#else:
# fs_client.fs_type = pslib.FsApiType.HDFS
self
.
fs_client
.
uri
=
uri
self
.
fs_client
.
user
=
user
self
.
fs_client
.
passwd
=
passwd
...
...
python/paddle/fluid/distributed/node.py
浏览文件 @
bc4f16ca
...
...
@@ -75,8 +75,6 @@ class DownpourServer(Server):
table
.
accessor
.
embedx_dim
=
8
table
.
accessor
.
embedx_threshold
=
5
table
.
accessor
.
fea_dim
=
11
#table.accessor.fea_dim = abs(reduce(lambda x, y: x * y,
# slot_value_var[0].shape, 1))
table
.
accessor
.
downpour_accessor_param
.
nonclk_coeff
=
0.1
table
.
accessor
.
downpour_accessor_param
.
click_coeff
=
2
table
.
accessor
.
downpour_accessor_param
.
base_threshold
=
0.2
...
...
@@ -134,8 +132,6 @@ class DownpourWorker(Worker):
def
__init__
(
self
,
window
):
self
.
window
=
window
self
.
worker_
=
pslib
.
DownpourTrainerParameter
()
#self.worker_.pull_dense_per_batch = window
#self.worker_.push_dense_per_batch = window
def
add_sparse_table
(
self
,
table_id
,
learning_rate
,
slot_key_vars
,
slot_value_vars
):
...
...
python/paddle/fluid/distributed/ps_instance.py
浏览文件 @
bc4f16ca
...
...
@@ -59,9 +59,6 @@ class PaddlePSInstance(object):
else
:
self
.
_node_type
=
-
1
#if self._rankid == 0:
#print "node type: ", self._node_type
def
_split_comm
(
self
):
if
self
.
is_server
():
self
.
_comm
=
self
.
dh
.
comm
.
Split
(
self
.
_node_type
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录