From 754e96a30c1812a658c5ccf1281a1c39fef9416b Mon Sep 17 00:00:00 2001 From: qiaolongfei Date: Wed, 25 Jul 2018 14:04:58 +0800 Subject: [PATCH] distribute lookup table work with parallel executor --- paddle/fluid/framework/details/multi_devices_graph_builder.cc | 2 +- python/paddle/fluid/transpiler/distribute_transpiler.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/framework/details/multi_devices_graph_builder.cc b/paddle/fluid/framework/details/multi_devices_graph_builder.cc index f1f8674caf..e0fb92cc3c 100644 --- a/paddle/fluid/framework/details/multi_devices_graph_builder.cc +++ b/paddle/fluid/framework/details/multi_devices_graph_builder.cc @@ -207,7 +207,7 @@ std::unique_ptr MultiDevSSAGraphBuilder::Apply( result.Set("ops", new GraphOps); // find send/recv vars so that we can place the distributed training - // realted op in the place 0 + // related op in the place 0 auto send_vars = FindDistTrainSendVars(nodes); auto recv_vars = FindDistTrainRecvVars(nodes); diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index e7698d8c52..4a9ea6af74 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -779,7 +779,9 @@ class DistributeTranspiler(object): outputs={"Out": prefetch_output_vars}, attrs={ "epmap": pserver_endpoints, - RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE + # FIXME(qiao) temporarily disable this config because prefetch + # is not act as other rpc op, it's more like a forward op + # RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE }) # insert concat_op -- GitLab