From e6079390a930b58f5726f7182a83fd2acf61326e Mon Sep 17 00:00:00 2001 From: typhoonzero Date: Mon, 18 Dec 2017 15:28:27 +0800 Subject: [PATCH] add example doc in transpiler --- paddle/operators/recv_op.cc | 3 ++- .../paddle/v2/fluid/distribute_transpiler.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/paddle/operators/recv_op.cc b/paddle/operators/recv_op.cc index 731e5e4756..9c3e8953bb 100644 --- a/paddle/operators/recv_op.cc +++ b/paddle/operators/recv_op.cc @@ -98,7 +98,8 @@ class RecvOp : public framework::OperatorBase { auto *merged_grad = recv_scope.FindVar(grad_var_name); if (merged_grad == nullptr) { // create output of merged var. - recv_scope.Var(grad_var_name); + auto merged_var = recv_scope.Var(grad_var_name); + merged_var->GetMutable(); } if (trainer_count > 1) { diff --git a/python/paddle/v2/fluid/distribute_transpiler.py b/python/paddle/v2/fluid/distribute_transpiler.py index 4919dce20d..13006bfd13 100644 --- a/python/paddle/v2/fluid/distribute_transpiler.py +++ b/python/paddle/v2/fluid/distribute_transpiler.py @@ -66,6 +66,24 @@ class DistributeTranspiler: Use different methods to split trainable varialbles to different parameter servers. + Example to run: + + exe = fluid.Executor(place) + t = fluid.DistributeTranspiler() + t.transpile(optimize_ops, params_grads, pservers="127.0.0.1:6174", trainers=1) + + pserver_endpoint = os.getenv("PSERVER") + if pserver_endpoint: + pserver_prog = t.get_pserver_program(pserver_endpoint, optimize_ops) + exe.run(fluid.default_startup_program()) + exe.run(pserver_prog) + else: + feeder = fluid.DataFeeder(feed_list=[images, label], place=place) + exe.run(fluid.default_startup_program()) + + for pass_id in range(PASS_NUM): + ... + :param optimize_ops: op list of optimization, should be the return value of Optimizer.minimize :type optimize_ops: list -- GitLab