未验证 提交 42909238 编写于 作者: C Chengmo 提交者: GitHub

[cherry-pick][release-1.6]Fix transpiler en doc (#20149) (#20371)

* Fix transpiler en doc (#20149)

* test=develop,test=document_fix,fix transpiler doc,add API.spec

* test=develop,test=document_fix,fix transpiler doc,add API.spec
上级 1fda6e1c
...@@ -37,8 +37,8 @@ paddle.fluid.global_scope (ArgSpec(args=[], varargs=None, keywords=None, default ...@@ -37,8 +37,8 @@ paddle.fluid.global_scope (ArgSpec(args=[], varargs=None, keywords=None, default
paddle.fluid.scope_guard (ArgSpec(args=['scope'], varargs=None, keywords=None, defaults=None), ('document', '02fcfc1eda07c03a84ed62422366239c')) paddle.fluid.scope_guard (ArgSpec(args=['scope'], varargs=None, keywords=None, defaults=None), ('document', '02fcfc1eda07c03a84ed62422366239c'))
paddle.fluid.DistributeTranspiler ('paddle.fluid.transpiler.distribute_transpiler.DistributeTranspiler', ('document', '5152a3ed0544d4d9600fb53a73f15c38')) paddle.fluid.DistributeTranspiler ('paddle.fluid.transpiler.distribute_transpiler.DistributeTranspiler', ('document', '5152a3ed0544d4d9600fb53a73f15c38'))
paddle.fluid.DistributeTranspiler.__init__ (ArgSpec(args=['self', 'config'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.DistributeTranspiler.__init__ (ArgSpec(args=['self', 'config'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.DistributeTranspiler.get_pserver_program (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'b1951949c6d21698290aa8ac69afee32')) paddle.fluid.DistributeTranspiler.get_pserver_program (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', '03ad12ad1c36dea4f3595a4e8c592f95'))
paddle.fluid.DistributeTranspiler.get_pserver_programs (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'c89fc350f975ef827f5448d68af388cf')) paddle.fluid.DistributeTranspiler.get_pserver_programs (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'a02723c79325ec4cac352b6d2eea7b52'))
paddle.fluid.DistributeTranspiler.get_startup_program (ArgSpec(args=['self', 'endpoint', 'pserver_program', 'startup_program'], varargs=None, keywords=None, defaults=(None, None)), ('document', '90a40b80e0106f69262cc08b861c3e39')) paddle.fluid.DistributeTranspiler.get_startup_program (ArgSpec(args=['self', 'endpoint', 'pserver_program', 'startup_program'], varargs=None, keywords=None, defaults=(None, None)), ('document', '90a40b80e0106f69262cc08b861c3e39'))
paddle.fluid.DistributeTranspiler.get_trainer_program (ArgSpec(args=['self', 'wait_port'], varargs=None, keywords=None, defaults=(True,)), ('document', '27ed6451913b6ab341bd0d1a36b3f8c1')) paddle.fluid.DistributeTranspiler.get_trainer_program (ArgSpec(args=['self', 'wait_port'], varargs=None, keywords=None, defaults=(True,)), ('document', '27ed6451913b6ab341bd0d1a36b3f8c1'))
paddle.fluid.DistributeTranspiler.transpile (ArgSpec(args=['self', 'trainer_id', 'program', 'pservers', 'trainers', 'sync_mode', 'startup_program', 'current_endpoint'], varargs=None, keywords=None, defaults=(None, '127.0.0.1:6174', 1, True, None, '127.0.0.1:6174')), ('document', '13c09537f7a5660c528242eb370bb4c7')) paddle.fluid.DistributeTranspiler.transpile (ArgSpec(args=['self', 'trainer_id', 'program', 'pservers', 'trainers', 'sync_mode', 'startup_program', 'current_endpoint'], varargs=None, keywords=None, defaults=(None, '127.0.0.1:6174', 1, True, None, '127.0.0.1:6174')), ('document', '13c09537f7a5660c528242eb370bb4c7'))
...@@ -902,8 +902,8 @@ paddle.fluid.dygraph.BackwardStrategy ('paddle.fluid.core_avx.BackwardStrategy', ...@@ -902,8 +902,8 @@ paddle.fluid.dygraph.BackwardStrategy ('paddle.fluid.core_avx.BackwardStrategy',
paddle.fluid.dygraph.BackwardStrategy.__init__ __init__(self: paddle.fluid.core_avx.BackwardStrategy) -> None paddle.fluid.dygraph.BackwardStrategy.__init__ __init__(self: paddle.fluid.core_avx.BackwardStrategy) -> None
paddle.fluid.transpiler.DistributeTranspiler ('paddle.fluid.transpiler.distribute_transpiler.DistributeTranspiler', ('document', '5152a3ed0544d4d9600fb53a73f15c38')) paddle.fluid.transpiler.DistributeTranspiler ('paddle.fluid.transpiler.distribute_transpiler.DistributeTranspiler', ('document', '5152a3ed0544d4d9600fb53a73f15c38'))
paddle.fluid.transpiler.DistributeTranspiler.__init__ (ArgSpec(args=['self', 'config'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.transpiler.DistributeTranspiler.__init__ (ArgSpec(args=['self', 'config'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.transpiler.DistributeTranspiler.get_pserver_program (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'b1951949c6d21698290aa8ac69afee32')) paddle.fluid.transpiler.DistributeTranspiler.get_pserver_program (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', '03ad12ad1c36dea4f3595a4e8c592f95'))
paddle.fluid.transpiler.DistributeTranspiler.get_pserver_programs (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'c89fc350f975ef827f5448d68af388cf')) paddle.fluid.transpiler.DistributeTranspiler.get_pserver_programs (ArgSpec(args=['self', 'endpoint'], varargs=None, keywords=None, defaults=None), ('document', 'a02723c79325ec4cac352b6d2eea7b52'))
paddle.fluid.transpiler.DistributeTranspiler.get_startup_program (ArgSpec(args=['self', 'endpoint', 'pserver_program', 'startup_program'], varargs=None, keywords=None, defaults=(None, None)), ('document', '90a40b80e0106f69262cc08b861c3e39')) paddle.fluid.transpiler.DistributeTranspiler.get_startup_program (ArgSpec(args=['self', 'endpoint', 'pserver_program', 'startup_program'], varargs=None, keywords=None, defaults=(None, None)), ('document', '90a40b80e0106f69262cc08b861c3e39'))
paddle.fluid.transpiler.DistributeTranspiler.get_trainer_program (ArgSpec(args=['self', 'wait_port'], varargs=None, keywords=None, defaults=(True,)), ('document', '27ed6451913b6ab341bd0d1a36b3f8c1')) paddle.fluid.transpiler.DistributeTranspiler.get_trainer_program (ArgSpec(args=['self', 'wait_port'], varargs=None, keywords=None, defaults=(True,)), ('document', '27ed6451913b6ab341bd0d1a36b3f8c1'))
paddle.fluid.transpiler.DistributeTranspiler.transpile (ArgSpec(args=['self', 'trainer_id', 'program', 'pservers', 'trainers', 'sync_mode', 'startup_program', 'current_endpoint'], varargs=None, keywords=None, defaults=(None, '127.0.0.1:6174', 1, True, None, '127.0.0.1:6174')), ('document', '13c09537f7a5660c528242eb370bb4c7')) paddle.fluid.transpiler.DistributeTranspiler.transpile (ArgSpec(args=['self', 'trainer_id', 'program', 'pservers', 'trainers', 'sync_mode', 'startup_program', 'current_endpoint'], varargs=None, keywords=None, defaults=(None, '127.0.0.1:6174', 1, True, None, '127.0.0.1:6174')), ('document', '13c09537f7a5660c528242eb370bb4c7'))
......
...@@ -936,6 +936,7 @@ void GeoSgdCommunicator::RpcRecv(const std::string &var_name, ...@@ -936,6 +936,7 @@ void GeoSgdCommunicator::RpcRecv(const std::string &var_name,
auto &cpu_ctx_recv = *pool.Get(platform::CPUPlace()); auto &cpu_ctx_recv = *pool.Get(platform::CPUPlace());
distributed::RPCClient *rpc_client = distributed::RPCClient *rpc_client =
distributed::RPCClient::GetInstance<RPCCLIENT_T>(train_id); distributed::RPCClient::GetInstance<RPCCLIENT_T>(train_id);
pserver_scope_->Var(splited_var_name); pserver_scope_->Var(splited_var_name);
rpc_client->AsyncGetVar(endpoint, cpu_ctx_recv, *pserver_scope_.get(), rpc_client->AsyncGetVar(endpoint, cpu_ctx_recv, *pserver_scope_.get(),
splited_var_name, splited_var_name, splited_var_name); splited_var_name, splited_var_name, splited_var_name);
......
...@@ -831,7 +831,15 @@ class DistributeTranspiler(object): ...@@ -831,7 +831,15 @@ class DistributeTranspiler(object):
def get_trainer_program(self, wait_port=True): def get_trainer_program(self, wait_port=True):
""" """
Get transpiled trainer side program. Get transpiled trainer side program. The program on trainer side compared with origin program
has following difference:
- Delete optimizer related op, because parameter updated on Pserver
- After the op which computed gradient of each parameter, add ``Send_op`` and ``Recv_op``
Args:
wait_port(bool): Whether to wait for the parameter server to be ready before returning to program,
default is True
Returns: Returns:
Program: trainer side program. Program: trainer side program.
...@@ -965,7 +973,12 @@ class DistributeTranspiler(object): ...@@ -965,7 +973,12 @@ class DistributeTranspiler(object):
def get_pserver_program(self, endpoint): def get_pserver_program(self, endpoint):
""" """
Get parameter server side program. Get parameter server side program.The program on pserver side compared with origin program
has following difference:
- Only the following op is included: optimize-related op and communication-related op
- NO.0 block only has variable definitions and ``listen_and_serv_op``
- Every variable which need to be updated has a unique block
Args: Args:
endpoint (str): current parameter server endpoint. endpoint (str): current parameter server endpoint.
...@@ -1224,6 +1237,8 @@ class DistributeTranspiler(object): ...@@ -1224,6 +1237,8 @@ class DistributeTranspiler(object):
def get_pserver_programs(self, endpoint): def get_pserver_programs(self, endpoint):
""" """
Get pserver side main program and startup program for distributed training. Get pserver side main program and startup program for distributed training.
The ``main_program`` returned by this function is consistent with the
return value of the function ``get_pserver_program`` .
Args: Args:
endpoint (str): current pserver endpoint. endpoint (str): current pserver endpoint.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册