From 0b250a79fa91bfb65cfa45840aa8d4a6197710ae Mon Sep 17 00:00:00 2001 From: zmx Date: Mon, 22 Nov 2021 15:01:22 +0800 Subject: [PATCH] [heterps]remove api for heter pipeline ps (#37396) * fix api. test=develop * fix api. test=develop --- python/paddle/distributed/fleet/__init__.py | 3 - .../distributed/fleet/base/fleet_base.py | 67 ------------------- .../distributed/fleet/runtime/the_one_ps.py | 27 -------- 3 files changed, 97 deletions(-) diff --git a/python/paddle/distributed/fleet/__init__.py b/python/paddle/distributed/fleet/__init__.py index 894ada4ba4f..3186df7db58 100644 --- a/python/paddle/distributed/fleet/__init__.py +++ b/python/paddle/distributed/fleet/__init__.py @@ -68,14 +68,11 @@ server_num = fleet.server_num server_index = fleet.server_index server_endpoints = fleet.server_endpoints is_server = fleet.is_server -is_heter_worker = fleet.is_heter_worker util = UtilBase() barrier_worker = fleet.barrier_worker init_worker = fleet.init_worker -init_heter_worker = fleet.init_heter_worker init_server = fleet.init_server run_server = fleet.run_server -run_heter_worker = fleet.run_heter_worker stop_worker = fleet.stop_worker distributed_optimizer = fleet.distributed_optimizer save_inference_model = fleet.save_inference_model diff --git a/python/paddle/distributed/fleet/base/fleet_base.py b/python/paddle/distributed/fleet/base/fleet_base.py index 6006aa7b2ab..57199b8a1e8 100755 --- a/python/paddle/distributed/fleet/base/fleet_base.py +++ b/python/paddle/distributed/fleet/base/fleet_base.py @@ -565,24 +565,6 @@ class Fleet(object): """ return self._role_maker._is_server() - def is_heter_worker(self): - """ - Check whether the node is an instance of heter worker. - - Returns: - bool: True if this is a node of heter worker, - False if not. - - Examples: - - .. code-block:: python - - import paddle.distributed.fleet as fleet - fleet.init() - fleet.is_heter_worker() - """ - return self._role_maker._is_heter_worker() - def barrier_worker(self): """ barrier all workers @@ -617,30 +599,6 @@ class Fleet(object): """ self._runtime_handle._init_worker() - @is_non_distributed_check - @inited_runtime_handler - def init_heter_worker(self): - """ - init_heter_worker executor to initialize startup program, - - Returns: - None - - Examples: - - .. code-block:: python - - import paddle.distributed.fleet as fleet - fleet.init() - - # build net - # fleet.distributed_optimizer(...) - - fleet.init_heter_worker() - - """ - self._runtime_handle._init_heter_worker() - @is_non_distributed_check @inited_runtime_handler def init_server(self, *args, **kwargs): @@ -690,31 +648,6 @@ class Fleet(object): """ self._runtime_handle.load_model(path, mode) - @is_non_distributed_check - @inited_runtime_handler - def run_heter_worker(self, dataset): - """ - run_heter_worker will run heter trainer main program with executor. - - Returns: - None - - Examples: - - .. code-block:: python - - import paddle.distributed.fleet as fleet - fleet.init() - - # build net - # fleet.distributed_optimizer(...) - dataset = "" - if fleet.is_heter_worker(): - fleet.run_heter_worker(dataset) - - """ - self._runtime_handle._run_heter_worker(dataset) - @is_non_distributed_check @inited_runtime_handler def run_server(self): diff --git a/python/paddle/distributed/fleet/runtime/the_one_ps.py b/python/paddle/distributed/fleet/runtime/the_one_ps.py index 76ed8c9d5d7..dc555b5ae23 100644 --- a/python/paddle/distributed/fleet/runtime/the_one_ps.py +++ b/python/paddle/distributed/fleet/runtime/the_one_ps.py @@ -884,33 +884,6 @@ class TheOnePSRuntime(RuntimeBase): host, port = ep.split(":") self._server.run_server(host, int(port)) - def _init_heter_worker(self): - executor = self._get_executor() - startup_program = fluid.default_startup_program() - #real_startup_program = startup_program._heter_pipeline_opt[ - # "startup_program"] - executor.run(startup_program) - self._init_worker() - - def _run_heter_worker(self, - dataset=None, - scope=None, - thread=0, - debug=False, - fetch_list=None, - fetch_info=None, - print_period=100, - fetch_handler=None): - executor = self._get_executor() - # dataset is not needed for heter worker - executor.train_from_dataset( - program=fluid.default_main_program(), - dataset=None, - debug=debug, - fetch_list=fetch_list, - fetch_info=fetch_info, - print_period=print_period) - def _stop_worker(self): self._communicator.stop() if self.role_maker._is_heter_parameter_server_mode: -- GitLab