Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
40d0fff2
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
40d0fff2
编写于
12月 14, 2017
作者:
T
typhoonzero
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
single pserver workable version
上级
2b47fb3d
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
39 addition
and
35 deletion
+39
-35
paddle/operators/recv_op.cc
paddle/operators/recv_op.cc
+38
-34
python/paddle/v2/fluid/executor.py
python/paddle/v2/fluid/executor.py
+1
-1
未找到文件。
paddle/operators/recv_op.cc
浏览文件 @
40d0fff2
...
@@ -69,43 +69,47 @@ class RecvOp : public framework::OperatorBase {
...
@@ -69,43 +69,47 @@ class RecvOp : public framework::OperatorBase {
auto
param_list
=
Attr
<
std
::
vector
<
std
::
string
>>
(
"ParamList"
);
auto
param_list
=
Attr
<
std
::
vector
<
std
::
string
>>
(
"ParamList"
);
auto
grad_list
=
Attr
<
std
::
vector
<
std
::
string
>>
(
"GradList"
);
auto
grad_list
=
Attr
<
std
::
vector
<
std
::
string
>>
(
"GradList"
);
size_t
param_count
=
param_list
.
size
();
size_t
param_count
=
param_list
.
size
();
for
(
size_t
i
=
0
;
i
<
param_count
;
++
i
)
{
// TODO(typhoonzero): change this to a while_op for every cluster-batch.
// blocking get one var from client.
while
(
true
)
{
const
detail
::
TensorWithName
&
v
=
rpc_service_
->
Get
();
// TODO(typhoonzero): get from multiple trainers.
auto
grad_var_name
=
v
.
first
;
for
(
size_t
i
=
0
;
i
<
param_count
;
++
i
)
{
auto
it
=
std
::
find
(
grad_list
.
begin
(),
grad_list
.
end
(),
grad_var_name
);
// blocking get one var from client.
std
::
string
param_var_name
;
const
detail
::
TensorWithName
&
v
=
rpc_service_
->
Get
();
if
(
it
!=
grad_list
.
end
())
{
auto
grad_var_name
=
v
.
first
;
param_var_name
=
param_list
[
it
-
grad_list
.
begin
()];
auto
it
=
std
::
find
(
grad_list
.
begin
(),
grad_list
.
end
(),
grad_var_name
);
std
::
string
param_var_name
;
if
(
it
!=
grad_list
.
end
())
{
param_var_name
=
param_list
[
it
-
grad_list
.
begin
()];
}
VLOG
(
10
)
<<
"recved grad: "
<<
grad_var_name
<<
" updating param: "
<<
param_var_name
;
auto
*
var
=
recv_scope
.
Var
(
grad_var_name
);
auto
*
tensor
=
var
->
GetMutable
<
framework
::
LoDTensor
>
();
// FIXME(typhoonzero): do not copy
framework
::
CopyFrom
(
v
.
second
,
dev_ctx
.
GetPlace
(),
dev_ctx
,
tensor
);
}
}
VLOG
(
10
)
<<
"recved grad: "
<<
grad_var_name
<<
" updating param: "
<<
param_var_name
;
auto
*
var
=
recv_scope
.
Var
(
grad_var_name
);
auto
*
tensor
=
var
->
GetMutable
<
framework
::
LoDTensor
>
();
// FIXME(typhoonzero): do not copy
framework
::
CopyFrom
(
v
.
second
,
dev_ctx
.
GetPlace
(),
dev_ctx
,
tensor
);
}
std
::
string
program_str
=
Attr
<
std
::
string
>
(
"OptimizeProgram"
);
std
::
string
program_str
=
Attr
<
std
::
string
>
(
"OptimizeProgram"
);
framework
::
ProgramDesc
program_desc
;
framework
::
ProgramDesc
program_desc
;
program_desc
.
ParseFromString
(
program_str
);
program_desc
.
ParseFromString
(
program_str
);
framework
::
ProgramDescBind
program
(
program_desc
);
framework
::
ProgramDescBind
program
(
program_desc
);
framework
::
Executor
executor
(
dev_ctx
);
framework
::
Executor
executor
(
dev_ctx
);
// Run sub graph to get optimized tensor
// Run sub graph to get optimized tensor
try
{
try
{
executor
.
Run
(
program
,
&
recv_scope
,
0
,
/*global_block*/
executor
.
Run
(
program
,
&
recv_scope
,
0
,
/*global_block*/
false
/*create_local_scope*/
,
false
/*create_vars*/
);
false
/*create_local_scope*/
,
false
/*create_vars*/
);
}
catch
(
std
::
exception
&
e
)
{
}
catch
(
std
::
exception
&
e
)
{
LOG
(
ERROR
)
<<
"run sub program error "
<<
e
.
what
();
LOG
(
ERROR
)
<<
"run sub program error "
<<
e
.
what
();
}
}
for
(
size_t
i
=
0
;
i
<
param_count
;
++
i
)
{
for
(
size_t
i
=
0
;
i
<
param_count
;
++
i
)
{
auto
*
out_var
=
recv_scope
.
FindVar
(
param_list
[
i
]);
auto
*
out_var
=
recv_scope
.
FindVar
(
param_list
[
i
]);
detail
::
TensorWithName
out
;
detail
::
TensorWithName
out
;
out
.
first
=
param_list
[
i
];
out
.
first
=
param_list
[
i
];
out
.
second
=
out_var
->
Get
<
framework
::
LoDTensor
>
();
out
.
second
=
out_var
->
Get
<
framework
::
LoDTensor
>
();
rpc_service_
->
Push
(
out
);
rpc_service_
->
Push
(
out
);
}
}
}
// while(true)
}
}
protected:
protected:
...
...
python/paddle/v2/fluid/executor.py
浏览文件 @
40d0fff2
...
@@ -93,7 +93,7 @@ class Executor(object):
...
@@ -93,7 +93,7 @@ class Executor(object):
dtype
=
var
.
dtype
,
dtype
=
var
.
dtype
,
type
=
var
.
type
,
type
=
var
.
type
,
lod_level
=
var
.
lod_level
,
lod_level
=
var
.
lod_level
,
persistable
=
Tru
e
)
persistable
=
var
.
persistabl
e
)
def
_optimize_distributed
(
self
,
optimize_ops
,
program
,
params_and_grads
,
def
_optimize_distributed
(
self
,
optimize_ops
,
program
,
params_and_grads
,
**
kwargs
):
**
kwargs
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录