From 8ee17e965f8ff488c6ea4a0437653b504ccab9a2 Mon Sep 17 00:00:00 2001 From: Yang Yang Date: Wed, 3 Jan 2018 10:46:38 +0000 Subject: [PATCH] pass sgd at first iter --- paddle/operators/parallel_do_op.cc | 23 +++++++++++++------ .../paddle/v2/fluid/tests/test_parallel_op.py | 3 ++- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/paddle/operators/parallel_do_op.cc b/paddle/operators/parallel_do_op.cc index b067e3bda4..a81ddb25c4 100644 --- a/paddle/operators/parallel_do_op.cc +++ b/paddle/operators/parallel_do_op.cc @@ -185,18 +185,27 @@ class ParallelDoGradOp : public OperatorBase { // merge grad for (auto &s : Outputs(framework::GradVarName(kParameters))) { LOG(INFO) << s; - // std::string s_buf = s + "@BUF"; - // auto *t_buf = sub_scopes[0]->Var(s_buf)->GetMutable(); + + auto &t = sub_scopes[0]->FindVar(s)->Get(); + LOG(INFO) << t; + + std::string s_buf = s + "@BUF"; + auto *t_buf = sub_scopes[0]->Var(s_buf)->GetMutable(); + for (size_t place_idx = 1; place_idx < places.size(); ++place_idx) { + auto &tt = sub_scopes[place_idx]->FindVar(s)->Get(); LOG(INFO) << place_idx; - LOG(INFO) << sub_scopes[place_idx]->FindVar(s)->Get(); - // Copy grad[i] to grad_buf[0] + LOG(INFO) << tt; + framework::CopyFrom(tt, places[0], t_buf); - // sum_op + auto sum_op = framework::OpRegistry::CreateOp( + "sum", {{"X", {s, s_buf}}}, {{"Out", {s}}}, + framework::AttributeMap{}); + sum_op->Run(*sub_scopes[0], place); } - // Copy grad[0] to grad - // auto *t = scope.FindVar(s)->GetMutable(); + LOG(INFO) << t; + framework::CopyFrom(t, place, scope.FindVar(s)->GetMutable()); } } }; diff --git a/python/paddle/v2/fluid/tests/test_parallel_op.py b/python/paddle/v2/fluid/tests/test_parallel_op.py index eec546107f..c39040869d 100644 --- a/python/paddle/v2/fluid/tests/test_parallel_op.py +++ b/python/paddle/v2/fluid/tests/test_parallel_op.py @@ -27,7 +27,8 @@ class ParallelOpTest(unittest.TestCase): pd.write_output(hidden) data = pd() loss = layers.mean(x=data) - append_backward(loss) + sgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001) + sgd_optimizer.minimize(loss) exe = fluid.Executor(fluid.CPUPlace()) exe.run(fluid.default_startup_program()) -- GitLab