Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
6de9ebc6
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6de9ebc6
编写于
2月 03, 2019
作者:
D
dongdaxiang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine VLOG in fleet_wrapper.h
test=develop
上级
97d5cd30
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
11 addition
and
8 deletion
+11
-8
paddle/fluid/framework/fleet/fleet_wrapper.cc
paddle/fluid/framework/fleet/fleet_wrapper.cc
+7
-7
paddle/fluid/framework/multi_trainer.cc
paddle/fluid/framework/multi_trainer.cc
+1
-0
python/paddle/fluid/trainer_desc.py
python/paddle/fluid/trainer_desc.py
+3
-1
未找到文件。
paddle/fluid/framework/fleet/fleet_wrapper.cc
浏览文件 @
6de9ebc6
...
@@ -42,13 +42,13 @@ std::shared_ptr<paddle::distributed::PSlib> FleetWrapper::pslib_ptr_ = NULL;
...
@@ -42,13 +42,13 @@ std::shared_ptr<paddle::distributed::PSlib> FleetWrapper::pslib_ptr_ = NULL;
void
FleetWrapper
::
InitServer
(
const
std
::
string
&
dist_desc
,
int
index
)
{
void
FleetWrapper
::
InitServer
(
const
std
::
string
&
dist_desc
,
int
index
)
{
#ifdef PADDLE_WITH_PSLIB
#ifdef PADDLE_WITH_PSLIB
if
(
!
is_initialized_
)
{
if
(
!
is_initialized_
)
{
LOG
(
WARNING
)
<<
"Going to init server"
;
VLOG
(
3
)
<<
"Going to init server"
;
pslib_ptr_
=
std
::
shared_ptr
<
paddle
::
distributed
::
PSlib
>
(
pslib_ptr_
=
std
::
shared_ptr
<
paddle
::
distributed
::
PSlib
>
(
new
paddle
::
distributed
::
PSlib
());
new
paddle
::
distributed
::
PSlib
());
pslib_ptr_
->
init_server
(
dist_desc
,
index
);
pslib_ptr_
->
init_server
(
dist_desc
,
index
);
is_initialized_
=
true
;
is_initialized_
=
true
;
}
else
{
}
else
{
LOG
(
WARNING
)
<<
"Server can be initialized only once"
;
VLOG
(
3
)
<<
"Server can be initialized only once"
;
}
}
#endif
#endif
}
}
...
@@ -58,7 +58,7 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
...
@@ -58,7 +58,7 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
int
node_num
,
int
index
)
{
int
node_num
,
int
index
)
{
#ifdef PADDLE_WITH_PSLIB
#ifdef PADDLE_WITH_PSLIB
if
(
!
is_initialized_
)
{
if
(
!
is_initialized_
)
{
LOG
(
WARNING
)
<<
"Going to init serv
er"
;
VLOG
(
3
)
<<
"Going to init work
er"
;
pslib_ptr_
=
std
::
shared_ptr
<
paddle
::
distributed
::
PSlib
>
(
pslib_ptr_
=
std
::
shared_ptr
<
paddle
::
distributed
::
PSlib
>
(
new
paddle
::
distributed
::
PSlib
());
new
paddle
::
distributed
::
PSlib
());
pslib_ptr_
->
init_worker
(
dist_desc
,
pslib_ptr_
->
init_worker
(
dist_desc
,
...
@@ -66,21 +66,21 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
...
@@ -66,21 +66,21 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
node_num
,
index
);
node_num
,
index
);
is_initialized_
=
true
;
is_initialized_
=
true
;
}
else
{
}
else
{
LOG
(
WARNING
)
<<
"Worker can be initialized only once"
;
VLOG
(
3
)
<<
"Worker can be initialized only once"
;
}
}
#endif
#endif
}
}
void
FleetWrapper
::
StopServer
()
{
void
FleetWrapper
::
StopServer
()
{
#ifdef PADDLE_WITH_PSLIB
#ifdef PADDLE_WITH_PSLIB
LOG
(
WARNING
)
<<
"Going to stop server"
;
VLOG
(
3
)
<<
"Going to stop server"
;
pslib_ptr_
->
stop_server
();
pslib_ptr_
->
stop_server
();
#endif
#endif
}
}
uint64_t
FleetWrapper
::
RunServer
()
{
uint64_t
FleetWrapper
::
RunServer
()
{
#ifdef PADDLE_WITH_PSLIB
#ifdef PADDLE_WITH_PSLIB
LOG
(
WARNING
)
<<
"Going to run server"
;
VLOG
(
3
)
<<
"Going to run server"
;
return
pslib_ptr_
->
run_server
();
return
pslib_ptr_
->
run_server
();
#else
#else
return
0
;
return
0
;
...
@@ -90,7 +90,7 @@ uint64_t FleetWrapper::RunServer() {
...
@@ -90,7 +90,7 @@ uint64_t FleetWrapper::RunServer() {
void
FleetWrapper
::
GatherServers
(
const
std
::
vector
<
uint64_t
>&
host_sign_list
,
void
FleetWrapper
::
GatherServers
(
const
std
::
vector
<
uint64_t
>&
host_sign_list
,
int
node_num
)
{
int
node_num
)
{
#ifdef PADDLE_WITH_PSLIB
#ifdef PADDLE_WITH_PSLIB
LOG
(
WARNING
)
<<
"Going to gather server ips"
;
VLOG
(
3
)
<<
"Going to gather server ips"
;
pslib_ptr_
->
gather_servers
(
const_cast
<
uint64_t
*>
(
host_sign_list
.
data
()),
pslib_ptr_
->
gather_servers
(
const_cast
<
uint64_t
*>
(
host_sign_list
.
data
()),
node_num
);
node_num
);
#endif
#endif
...
...
paddle/fluid/framework/multi_trainer.cc
浏览文件 @
6de9ebc6
...
@@ -39,6 +39,7 @@ void MultiTrainer::Initialize(const TrainerDesc& trainer_desc) {
...
@@ -39,6 +39,7 @@ void MultiTrainer::Initialize(const TrainerDesc& trainer_desc) {
for
(
unsigned
i
=
0
;
i
<
trainer_desc
.
filelist_size
();
++
i
)
{
for
(
unsigned
i
=
0
;
i
<
trainer_desc
.
filelist_size
();
++
i
)
{
filelist_vec
.
push_back
(
trainer_desc
.
filelist
(
i
));
filelist_vec
.
push_back
(
trainer_desc
.
filelist
(
i
));
}
}
readers_
[
0
]
->
SetFileList
(
filelist_vec
);
}
}
// call only after all resources are set in current trainer
// call only after all resources are set in current trainer
...
...
python/paddle/fluid/trainer_desc.py
浏览文件 @
6de9ebc6
...
@@ -29,7 +29,9 @@ class TrainerDesc(object):
...
@@ -29,7 +29,9 @@ class TrainerDesc(object):
text_format.Parse(f.read(), self.proto_desc)
text_format.Parse(f.read(), self.proto_desc)
'''
'''
self
.
proto_desc
=
trainer_desc_pb2
.
TrainerDesc
()
self
.
proto_desc
=
trainer_desc_pb2
.
TrainerDesc
()
self
.
proto_desc
.
thread_num
=
12
import
multiprocessing
as
mp
# set default thread num == cpu count
self
.
proto_desc
.
thread_num
=
mp
.
cpu_count
()
def
set_thread
(
self
,
thread_num
):
def
set_thread
(
self
,
thread_num
):
self
.
proto_desc
.
thread_num
=
thread_num
self
.
proto_desc
.
thread_num
=
thread_num
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录