提交 754e96a3 编写于 作者: Q qiaolongfei

distribute lookup table work with parallel executor

上级 b73806b2
...@@ -207,7 +207,7 @@ std::unique_ptr<Graph> MultiDevSSAGraphBuilder::Apply( ...@@ -207,7 +207,7 @@ std::unique_ptr<Graph> MultiDevSSAGraphBuilder::Apply(
result.Set("ops", new GraphOps); result.Set("ops", new GraphOps);
// find send/recv vars so that we can place the distributed training // find send/recv vars so that we can place the distributed training
// realted op in the place 0 // related op in the place 0
auto send_vars = FindDistTrainSendVars(nodes); auto send_vars = FindDistTrainSendVars(nodes);
auto recv_vars = FindDistTrainRecvVars(nodes); auto recv_vars = FindDistTrainRecvVars(nodes);
......
...@@ -779,7 +779,9 @@ class DistributeTranspiler(object): ...@@ -779,7 +779,9 @@ class DistributeTranspiler(object):
outputs={"Out": prefetch_output_vars}, outputs={"Out": prefetch_output_vars},
attrs={ attrs={
"epmap": pserver_endpoints, "epmap": pserver_endpoints,
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE # FIXME(qiao) temporarily disable this config because prefetch
# is not act as other rpc op, it's more like a forward op
# RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
}) })
# insert concat_op # insert concat_op
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册