提交 e6079390 编写于 作者: T typhoonzero

add example doc in transpiler

上级 17f9be55
...@@ -98,7 +98,8 @@ class RecvOp : public framework::OperatorBase { ...@@ -98,7 +98,8 @@ class RecvOp : public framework::OperatorBase {
auto *merged_grad = recv_scope.FindVar(grad_var_name); auto *merged_grad = recv_scope.FindVar(grad_var_name);
if (merged_grad == nullptr) { if (merged_grad == nullptr) {
// create output of merged var. // create output of merged var.
recv_scope.Var(grad_var_name); auto merged_var = recv_scope.Var(grad_var_name);
merged_var->GetMutable<framework::LoDTensor>();
} }
if (trainer_count > 1) { if (trainer_count > 1) {
......
...@@ -66,6 +66,24 @@ class DistributeTranspiler: ...@@ -66,6 +66,24 @@ class DistributeTranspiler:
Use different methods to split trainable varialbles to different Use different methods to split trainable varialbles to different
parameter servers. parameter servers.
Example to run:
exe = fluid.Executor(place)
t = fluid.DistributeTranspiler()
t.transpile(optimize_ops, params_grads, pservers="127.0.0.1:6174", trainers=1)
pserver_endpoint = os.getenv("PSERVER")
if pserver_endpoint:
pserver_prog = t.get_pserver_program(pserver_endpoint, optimize_ops)
exe.run(fluid.default_startup_program())
exe.run(pserver_prog)
else:
feeder = fluid.DataFeeder(feed_list=[images, label], place=place)
exe.run(fluid.default_startup_program())
for pass_id in range(PASS_NUM):
...
:param optimize_ops: op list of optimization, should be the :param optimize_ops: op list of optimization, should be the
return value of Optimizer.minimize return value of Optimizer.minimize
:type optimize_ops: list :type optimize_ops: list
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册