diff --git a/paddle/fluid/operators/async_listen_and_serv_op.cc b/paddle/fluid/operators/async_listen_and_serv_op.cc index 14d9121eff06250c5f619fd0ab136c9f35f125ad..093d44e2d1803a9ea4d33fca84f78c49451c2b11 100644 --- a/paddle/fluid/operators/async_listen_and_serv_op.cc +++ b/paddle/fluid/operators/async_listen_and_serv_op.cc @@ -89,7 +89,7 @@ void AsyncListenAndServOp::RunImpl(const framework::Scope &scope, std::unordered_map grad_to_id; std::unordered_map id_to_grad; - auto grad_map_str = Attr>("grad_map"); + auto grad_map_str = Attr>("grad_to_id"); for (auto &grad_and_id : grad_map_str) { std::vector pieces; split(grad_and_id, ' ', &pieces); @@ -193,7 +193,7 @@ from send_op and send back variables to recv_op. .SetDefault("127.0.0.1:6164") .AddCustomChecker([](const std::string &ip) { return !ip.empty(); }); AddAttr>( - "grad_map(['param1@GRAD.block0:1', 'param2@GRAD.blockn:2'])", + "grad_to_id(['param1@GRAD.block0:1', 'param2@GRAD.blockn:2'])", "a map from grad name to it's optimize block id") .SetDefault({}); AddAttr(kOptimizeBlock, diff --git a/paddle/fluid/operators/listen_and_serv_op.cc b/paddle/fluid/operators/listen_and_serv_op.cc index db8a0cd631b270bf89b49c9d7326e48669a0378c..d5cb1653513b7623cf25f227d72f148cc96d2a93 100644 --- a/paddle/fluid/operators/listen_and_serv_op.cc +++ b/paddle/fluid/operators/listen_and_serv_op.cc @@ -207,8 +207,8 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor, std::unordered_map grad_to_id; std::unordered_map id_to_grad; - auto grad_map_str = Attr>("grad_map"); - for (auto &grad_and_id : grad_map_str) { + auto grad_to_id_str = Attr>("grad_to_id"); + for (auto &grad_and_id : grad_to_id_str) { std::vector pieces; split(grad_and_id, ' ', &pieces); PADDLE_ENFORCE_EQ(pieces.size(), 2); @@ -227,7 +227,7 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor, block_list.push_back(blkid); } } - PADDLE_ENFORCE_EQ(grad_map_str.size(), block_list.size(), + PADDLE_ENFORCE_EQ(grad_to_id_str.size(), block_list.size(), "grad num should be equal to optimize block num"); auto optimize_prepared = executor->Prepare(*program, block_list); @@ -328,7 +328,7 @@ from send_op and send back variables to recv_op. .SetDefault("127.0.0.1:6164") .AddCustomChecker([](const std::string &ip) { return !ip.empty(); }); AddAttr>( - "grad_map(['param1@GRAD.block0:1', 'param2@GRAD.blockn:2'])", + "grad_to_id(['param1@GRAD.block0:1', 'param2@GRAD.blockn:2'])", "a map from grad name to it's optimize block id") .SetDefault({}); AddAttr("sync_mode", "if works at sync_mode or not") diff --git a/paddle/fluid/operators/send_recv_op_test.cc b/paddle/fluid/operators/send_recv_op_test.cc index f247583ce64d0bae77d588ed74cc1d7e412477ea..2b440fe2deeaf25ee5348e5441481c532b6cedf2 100644 --- a/paddle/fluid/operators/send_recv_op_test.cc +++ b/paddle/fluid/operators/send_recv_op_test.cc @@ -137,7 +137,7 @@ void StartServerNet(bool is_sparse) { attrs.insert({"GradList", std::vector({"x1"})}); attrs.insert({"OptimizeBlock", optimize_block}); attrs.insert({"PrefetchBlock", prefetch_block}); - attrs.insert({"grad_map", {}}); + attrs.insert({"grad_to_id", {}}); attrs.insert({"sync_mode", true}); listen_and_serv_op = f::OpRegistry::CreateOp("listen_and_serv", {{"X", {"x1"}}}, {}, attrs);