未验证 提交 0b250a79 编写于 作者: Z zmx 提交者: GitHub

[heterps]remove api for heter pipeline ps (#37396)

* fix api. test=develop

* fix api. test=develop
上级 a258badb
...@@ -68,14 +68,11 @@ server_num = fleet.server_num ...@@ -68,14 +68,11 @@ server_num = fleet.server_num
server_index = fleet.server_index server_index = fleet.server_index
server_endpoints = fleet.server_endpoints server_endpoints = fleet.server_endpoints
is_server = fleet.is_server is_server = fleet.is_server
is_heter_worker = fleet.is_heter_worker
util = UtilBase() util = UtilBase()
barrier_worker = fleet.barrier_worker barrier_worker = fleet.barrier_worker
init_worker = fleet.init_worker init_worker = fleet.init_worker
init_heter_worker = fleet.init_heter_worker
init_server = fleet.init_server init_server = fleet.init_server
run_server = fleet.run_server run_server = fleet.run_server
run_heter_worker = fleet.run_heter_worker
stop_worker = fleet.stop_worker stop_worker = fleet.stop_worker
distributed_optimizer = fleet.distributed_optimizer distributed_optimizer = fleet.distributed_optimizer
save_inference_model = fleet.save_inference_model save_inference_model = fleet.save_inference_model
......
...@@ -565,24 +565,6 @@ class Fleet(object): ...@@ -565,24 +565,6 @@ class Fleet(object):
""" """
return self._role_maker._is_server() return self._role_maker._is_server()
def is_heter_worker(self):
"""
Check whether the node is an instance of heter worker.
Returns:
bool: True if this is a node of heter worker,
False if not.
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
fleet.is_heter_worker()
"""
return self._role_maker._is_heter_worker()
def barrier_worker(self): def barrier_worker(self):
""" """
barrier all workers barrier all workers
...@@ -617,30 +599,6 @@ class Fleet(object): ...@@ -617,30 +599,6 @@ class Fleet(object):
""" """
self._runtime_handle._init_worker() self._runtime_handle._init_worker()
@is_non_distributed_check
@inited_runtime_handler
def init_heter_worker(self):
"""
init_heter_worker executor to initialize startup program,
Returns:
None
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
# build net
# fleet.distributed_optimizer(...)
fleet.init_heter_worker()
"""
self._runtime_handle._init_heter_worker()
@is_non_distributed_check @is_non_distributed_check
@inited_runtime_handler @inited_runtime_handler
def init_server(self, *args, **kwargs): def init_server(self, *args, **kwargs):
...@@ -690,31 +648,6 @@ class Fleet(object): ...@@ -690,31 +648,6 @@ class Fleet(object):
""" """
self._runtime_handle.load_model(path, mode) self._runtime_handle.load_model(path, mode)
@is_non_distributed_check
@inited_runtime_handler
def run_heter_worker(self, dataset):
"""
run_heter_worker will run heter trainer main program with executor.
Returns:
None
Examples:
.. code-block:: python
import paddle.distributed.fleet as fleet
fleet.init()
# build net
# fleet.distributed_optimizer(...)
dataset = ""
if fleet.is_heter_worker():
fleet.run_heter_worker(dataset)
"""
self._runtime_handle._run_heter_worker(dataset)
@is_non_distributed_check @is_non_distributed_check
@inited_runtime_handler @inited_runtime_handler
def run_server(self): def run_server(self):
......
...@@ -884,33 +884,6 @@ class TheOnePSRuntime(RuntimeBase): ...@@ -884,33 +884,6 @@ class TheOnePSRuntime(RuntimeBase):
host, port = ep.split(":") host, port = ep.split(":")
self._server.run_server(host, int(port)) self._server.run_server(host, int(port))
def _init_heter_worker(self):
executor = self._get_executor()
startup_program = fluid.default_startup_program()
#real_startup_program = startup_program._heter_pipeline_opt[
# "startup_program"]
executor.run(startup_program)
self._init_worker()
def _run_heter_worker(self,
dataset=None,
scope=None,
thread=0,
debug=False,
fetch_list=None,
fetch_info=None,
print_period=100,
fetch_handler=None):
executor = self._get_executor()
# dataset is not needed for heter worker
executor.train_from_dataset(
program=fluid.default_main_program(),
dataset=None,
debug=debug,
fetch_list=fetch_list,
fetch_info=fetch_info,
print_period=print_period)
def _stop_worker(self): def _stop_worker(self):
self._communicator.stop() self._communicator.stop()
if self.role_maker._is_heter_parameter_server_mode: if self.role_maker._is_heter_parameter_server_mode:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册