From 5d322ced9c3e554dc1da7947f3faad90764b9aae Mon Sep 17 00:00:00 2001 From: wangzhen38 <41941775+wangzhen38@users.noreply.github.com> Date: Mon, 27 Feb 2023 10:13:02 +0800 Subject: [PATCH] [mv fleet] mv fleet to distributed (#50834) * [mv fleet] mv fleet to distributed * [mv fleet] for ci * [mv fleet] for ci * [mv fleet] solve ci of version --- paddle/fluid/distributed/CMakeLists.txt | 2 +- .../parameter_server_optimizer.py | 22 +++++++++++-------- .../fleet/runtime/parameter_server_runtime.py | 18 +++++++-------- .../distributed/fleet/runtime/the_one_ps.py | 20 ++++++++--------- python/paddle/fluid/dataset.py | 12 +++++----- python/paddle/fluid/device_worker.py | 4 ++-- .../fleet/test_distributed_strategy.py | 6 +++-- .../collective/fleet/test_recv_save_op.py | 4 +++- .../tests/unittests/dist_fleet_debug_gloo.py | 4 +++- .../tests/unittests/fleet_ps_training.py | 4 +++- .../tests/unittests/ps/ps_dnn_trainer.py | 6 ++--- .../tests/unittests/test_communicator_geo.py | 2 +- .../fluid/tests/unittests/test_dataset.py | 10 ++++++--- .../fluid/tests/unittests/test_downpoursgd.py | 4 ++-- .../fluid/tests/unittests/test_fleet.py | 4 +++- .../tests/unittests/test_fleet_api_input.py | 8 ++++--- .../tests/unittests/test_fleet_nocvm_1.py | 4 +++- .../fluid/tests/unittests/test_fleet_ps.py | 4 ++-- .../unittests/test_fleet_pyramid_hash.py | 6 +++-- .../tests/unittests/test_fleet_rolemaker.py | 4 +++- .../tests/unittests/test_fleet_rolemaker_2.py | 6 ++--- .../tests/unittests/test_fleet_rolemaker_3.py | 4 +++- .../unittests/test_fleet_unitaccessor.py | 4 +++- .../tests/unittests/test_ps_dispatcher.py | 2 +- .../fluid/transpiler/geo_sgd_transpiler.py | 4 +++- .../incubate/distributed/fleet/fleet_util.py | 4 ++-- .../fleet/parameter_server/__init__.py | 0 .../distribute_transpiler/__init__.py | 22 +++++++++++-------- .../distributed_strategy.py | 4 +++- .../fleet/parameter_server/ir/__init__.py | 0 .../parameter_server/ir/heter_trainer_pass.py | 2 +- .../parameter_server/ir/ps_dispatcher.py | 0 .../fleet/parameter_server/ir/pserver_pass.py | 2 +- .../fleet/parameter_server/ir/public.py | 10 ++++++--- .../fleet/parameter_server/ir/trainer_pass.py | 6 +++-- .../fleet/parameter_server/ir/ufind.py | 0 .../parameter_server/ir/vars_metatools.py | 0 .../fleet/parameter_server/mode.py | 0 .../fleet/parameter_server/pslib/.gitignore | 0 .../fleet/parameter_server/pslib/__init__.py | 0 .../fleet/parameter_server/pslib/node.py | 0 .../pslib/optimizer_factory.py | 0 python/setup.py.in | 14 ++++++------ setup.py | 14 ++++++------ 44 files changed, 145 insertions(+), 101 deletions(-) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/__init__.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/distribute_transpiler/__init__.py (97%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/distribute_transpiler/distributed_strategy.py (99%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/__init__.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/heter_trainer_pass.py (97%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/ps_dispatcher.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/pserver_pass.py (99%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/public.py (99%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/trainer_pass.py (99%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/ufind.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/ir/vars_metatools.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/mode.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/pslib/.gitignore (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/pslib/__init__.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/pslib/node.py (100%) rename python/paddle/incubate/{ => distributed}/fleet/parameter_server/pslib/optimizer_factory.py (100%) diff --git a/paddle/fluid/distributed/CMakeLists.txt b/paddle/fluid/distributed/CMakeLists.txt index 5eb5ae79ad6..5f4bef5a2b1 100755 --- a/paddle/fluid/distributed/CMakeLists.txt +++ b/paddle/fluid/distributed/CMakeLists.txt @@ -7,7 +7,7 @@ if(WITH_PYTHON) file(MAKE_DIRECTORY ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto) set(PSLIB_PROTO_DSTPATH - "${PADDLE_SOURCE_DIR}/python/paddle/incubate/fleet/parameter_server/pslib/" + "${PADDLE_SOURCE_DIR}/python/paddle/incubate/distributed/fleet/parameter_server/pslib/" ) if(NOT WIN32) add_custom_command( diff --git a/python/paddle/distributed/fleet/meta_optimizers/parameter_server_optimizer.py b/python/paddle/distributed/fleet/meta_optimizers/parameter_server_optimizer.py index dba51efa768..7f4cf25947f 100644 --- a/python/paddle/distributed/fleet/meta_optimizers/parameter_server_optimizer.py +++ b/python/paddle/distributed/fleet/meta_optimizers/parameter_server_optimizer.py @@ -74,7 +74,7 @@ class ParameterServerOptimizer(MetaOptimizerBase): } def _get_distributed_strategy(self): - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) @@ -96,7 +96,7 @@ class ParameterServerOptimizer(MetaOptimizerBase): return strategy def _build_trainer_programs(self, compiled_config): - from paddle.incubate.fleet.parameter_server.ir import ( + from paddle.incubate.distributed.fleet.parameter_server.ir import ( trainer_pass as worker, ) @@ -106,7 +106,7 @@ class ParameterServerOptimizer(MetaOptimizerBase): use_ps_gpu = self.user_defined_strategy.a_sync_configs["use_ps_gpu"] if not compiled_config.is_geo_mode(): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _add_lr_decay_table_pass, ) @@ -150,7 +150,7 @@ class ParameterServerOptimizer(MetaOptimizerBase): compiled_config.set_origin_ps_startup_program(_startup) # for heter program if self.role_maker._is_heter_parameter_server_mode: - from paddle.incubate.fleet.parameter_server.ir import ( + from paddle.incubate.distributed.fleet.parameter_server.ir import ( heter_trainer_pass as heter_worker, ) @@ -191,13 +191,13 @@ class ParameterServerOptimizer(MetaOptimizerBase): _main = paddle.static.Program() _startup = paddle.static.Program() - from paddle.incubate.fleet.parameter_server.ir import ( + from paddle.incubate.distributed.fleet.parameter_server.ir import ( pserver_pass as server, ) if not compiled_config.is_geo_mode(): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_optimize_ops, ) @@ -209,7 +209,7 @@ class ParameterServerOptimizer(MetaOptimizerBase): if len(ops) == 0: return _main, _startup - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _add_lr_decay_table_pass, ) @@ -299,7 +299,9 @@ class ParameterServerOptimizer(MetaOptimizerBase): free = get_sys_free_mem() - from paddle.incubate.fleet.parameter_server.ir import vars_metatools + from paddle.incubate.distributed.fleet.parameter_server.ir import ( + vars_metatools, + ) processed_var_names = set(["@EMPTY@"]) param_memory_size = 0 @@ -369,7 +371,9 @@ class ParameterServerOptimizer(MetaOptimizerBase): _origin_main_program = loss.block.program _origin_startup_program = startup_program - from paddle.incubate.fleet.parameter_server.ir import public as public + from paddle.incubate.distributed.fleet.parameter_server.ir import ( + public as public, + ) compiled_config = public.CompileTimeStrategy( _origin_main_program, diff --git a/python/paddle/distributed/fleet/runtime/parameter_server_runtime.py b/python/paddle/distributed/fleet/runtime/parameter_server_runtime.py index c05b0c63464..0d4c3944c72 100644 --- a/python/paddle/distributed/fleet/runtime/parameter_server_runtime.py +++ b/python/paddle/distributed/fleet/runtime/parameter_server_runtime.py @@ -50,7 +50,7 @@ class ParameterServerRuntime(RuntimeBase): def _get_distributed_strategy(self): strategy = None - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) @@ -72,7 +72,7 @@ class ParameterServerRuntime(RuntimeBase): return strategy def build_compiled_startegy(self): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( CompileTimeStrategy, ) @@ -101,7 +101,7 @@ class ParameterServerRuntime(RuntimeBase): if main_program is None: main_program = self.origin_main_program - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_varname_parts, ) @@ -137,7 +137,7 @@ class ParameterServerRuntime(RuntimeBase): def _load_distributed_params(self, dirname, varnames): from paddle.distributed.communicator import LargeScaleKV - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_varname_parts, ) @@ -153,7 +153,7 @@ class ParameterServerRuntime(RuntimeBase): if var.name in exclude_var_names: return False - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_varname_parts, ) @@ -184,7 +184,7 @@ class ParameterServerRuntime(RuntimeBase): return kwargs def geo_strategy_envs(): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( get_sparse_tablenames, ) @@ -238,11 +238,11 @@ class ParameterServerRuntime(RuntimeBase): kwargs["sparse_attrs"] = get_sparse_attrs() return kwargs - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( GeoStrategy, SyncStrategy, ) - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_lr_ops, _has_global_step, ) @@ -474,7 +474,7 @@ class ParameterServerRuntime(RuntimeBase): return reshaped_names, origin_names def _get_optimizer_op(self, param_name): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_optimize_ops, ) diff --git a/python/paddle/distributed/fleet/runtime/the_one_ps.py b/python/paddle/distributed/fleet/runtime/the_one_ps.py index c54a1a74901..51e8cd2bfc1 100644 --- a/python/paddle/distributed/fleet/runtime/the_one_ps.py +++ b/python/paddle/distributed/fleet/runtime/the_one_ps.py @@ -36,7 +36,7 @@ PSERVER_SAVE_SUFFIX = ".shard" def parse_table_class(varname, o_main_program): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( is_distributed_sparse_op, is_sparse_op, ) @@ -247,7 +247,7 @@ class CommonAccessor: self.opt_init_map = opt_init_map def parse_entry(self, varname, o_main_program): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( is_distributed_sparse_op, is_sparse_op, ) @@ -304,7 +304,7 @@ class CommonAccessor: compiled_strategy, adam_d2sum, ): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_optimize_ops, ) @@ -692,7 +692,7 @@ class TheOnePSRuntime(RuntimeBase): def _get_distributed_strategy(self): strategy = None - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) @@ -716,7 +716,7 @@ class TheOnePSRuntime(RuntimeBase): return strategy def build_compiled_startegy(self): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( CompileTimeStrategy, ) @@ -731,7 +731,7 @@ class TheOnePSRuntime(RuntimeBase): return compiled_config def _init_worker(self): - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( SyncStrategy, ) @@ -1191,7 +1191,7 @@ class TheOnePSRuntime(RuntimeBase): proto_txt, string_hosts, role_id, trainers, self._server_sub_program ) - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( get_sparse_tablenames, ) @@ -1252,7 +1252,7 @@ class TheOnePSRuntime(RuntimeBase): if var.name in exclude_var_names: return False - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_varname_parts, ) @@ -1283,7 +1283,7 @@ class TheOnePSRuntime(RuntimeBase): def _save_sparse_params( self, executor, dirname, context, main_program, mode ): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( get_sparse_tablenames, ) @@ -1479,7 +1479,7 @@ class TheOnePSRuntime(RuntimeBase): self._ps_inference_save_persistables(*args, **kwargs) def _load_sparse_params(self, dirname, context, main_program, mode): - from paddle.incubate.fleet.parameter_server.ir.public import ( + from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( get_sparse_tablenames, ) diff --git a/python/paddle/fluid/dataset.py b/python/paddle/fluid/dataset.py index 563027caed1..6f1ef89c504 100644 --- a/python/paddle/fluid/dataset.py +++ b/python/paddle/fluid/dataset.py @@ -867,7 +867,7 @@ class InMemoryDataset(DatasetBase): # required: skiptest import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset") filelist = ["a.txt", "b.txt"] dataset.set_filelist(filelist) @@ -929,7 +929,7 @@ class InMemoryDataset(DatasetBase): # required: skiptest import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset") filelist = ["a.txt", "b.txt"] dataset.set_filelist(filelist) @@ -993,7 +993,7 @@ class InMemoryDataset(DatasetBase): # required: skiptest import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset") filelist = ["a.txt", "b.txt"] dataset.set_filelist(filelist) @@ -1037,7 +1037,7 @@ class InMemoryDataset(DatasetBase): # required: skiptest import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset") filelist = ["a.txt", "b.txt"] dataset.set_filelist(filelist) @@ -1084,7 +1084,7 @@ class InMemoryDataset(DatasetBase): # required: skiptest import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset") graph_config = {"walk_len": 24, "walk_degree": 10, @@ -1251,7 +1251,7 @@ class QueueDataset(DatasetBase): .. code-block:: python import paddle.fluid as fluid - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet dataset = fluid.DatasetFactory().create_dataset("QueueDataset") #dataset.global_shuffle(fleet) diff --git a/python/paddle/fluid/device_worker.py b/python/paddle/fluid/device_worker.py index b2e01e3b9d3..6836ee8fad0 100644 --- a/python/paddle/fluid/device_worker.py +++ b/python/paddle/fluid/device_worker.py @@ -123,7 +123,7 @@ class Hogwild(DeviceWorker): hogwild.stat_var_names.extend([i]) downpour.stat_var_names.extend([i]) - from paddle.incubate.fleet.parameter_server import version + from paddle.incubate.distributed.fleet.parameter_server import version if ( version.is_transpiler() @@ -271,7 +271,7 @@ class DownpourLite(DeviceWorker): for i in opt_info["stat_var_names"]: downpour.stat_var_names.extend([i]) - from paddle.incubate.fleet.parameter_server import version + from paddle.incubate.distributed.fleet.parameter_server import version if ( version.is_transpiler() diff --git a/python/paddle/fluid/tests/unittests/collective/fleet/test_distributed_strategy.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_distributed_strategy.py index 5e7cf29afc8..f9148365ffc 100644 --- a/python/paddle/fluid/tests/unittests/collective/fleet/test_distributed_strategy.py +++ b/python/paddle/fluid/tests/unittests/collective/fleet/test_distributed_strategy.py @@ -22,8 +22,10 @@ from paddle.fluid.transpiler.distribute_transpiler import ( DistributeTranspilerConfig, ServerRuntimeConfig, ) -from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet -from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, +) +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) diff --git a/python/paddle/fluid/tests/unittests/collective/fleet/test_recv_save_op.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_recv_save_op.py index 39487aa6212..ca95ac4abed 100644 --- a/python/paddle/fluid/tests/unittests/collective/fleet/test_recv_save_op.py +++ b/python/paddle/fluid/tests/unittests/collective/fleet/test_recv_save_op.py @@ -26,7 +26,9 @@ import paddle.fluid as fluid import paddle.fluid.core as core from paddle.fluid.framework import Program, program_guard from paddle.fluid.op import Operator -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, +) def run_pserver(pserver_id): diff --git a/python/paddle/fluid/tests/unittests/dist_fleet_debug_gloo.py b/python/paddle/fluid/tests/unittests/dist_fleet_debug_gloo.py index eae12cbf371..83c566feb2d 100644 --- a/python/paddle/fluid/tests/unittests/dist_fleet_debug_gloo.py +++ b/python/paddle/fluid/tests/unittests/dist_fleet_debug_gloo.py @@ -16,7 +16,9 @@ import logging # import paddle.incubate.distributed.fleet.role_maker as role_maker import paddle.distributed.fleet.base.role_maker as role_maker -from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, +) logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s") logger = logging.getLogger("fluid") diff --git a/python/paddle/fluid/tests/unittests/fleet_ps_training.py b/python/paddle/fluid/tests/unittests/fleet_ps_training.py index e4092e6eef8..fb2e23bfec1 100644 --- a/python/paddle/fluid/tests/unittests/fleet_ps_training.py +++ b/python/paddle/fluid/tests/unittests/fleet_ps_training.py @@ -18,7 +18,9 @@ from utils import gen_data import paddle import paddle.fluid as fluid from paddle.incubate.distributed.fleet import role_maker -from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, +) input_x = paddle.static.data(name="x", shape=[-1, 32], dtype='float32') input_y = paddle.static.data(name="y", shape=[-1, 1], dtype='int64') diff --git a/python/paddle/fluid/tests/unittests/ps/ps_dnn_trainer.py b/python/paddle/fluid/tests/unittests/ps/ps_dnn_trainer.py index 0133516a52f..ffa0bbe5def 100755 --- a/python/paddle/fluid/tests/unittests/ps/ps_dnn_trainer.py +++ b/python/paddle/fluid/tests/unittests/ps/ps_dnn_trainer.py @@ -233,7 +233,7 @@ def get_user_defined_strategy(config): def get_distributed_strategy(user_defined_strategy): # pslib - from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) @@ -444,7 +444,7 @@ class DnnTrainer: print( "entering run {} - old".format(str(config["applied_pass_name"])) ) - from paddle.incubate.fleet.parameter_server.ir import ( + from paddle.incubate.distributed.fleet.parameter_server.ir import ( public as public, ) @@ -458,7 +458,7 @@ class DnnTrainer: _main = compiled_config.origin_main_program.clone() _startup = compiled_config.origin_startup_program.clone() - from paddle.incubate.fleet.parameter_server.ir import ( + from paddle.incubate.distributed.fleet.parameter_server.ir import ( trainer_pass as worker, ) diff --git a/python/paddle/fluid/tests/unittests/test_communicator_geo.py b/python/paddle/fluid/tests/unittests/test_communicator_geo.py index 580343443dd..7dcf9251fb4 100644 --- a/python/paddle/fluid/tests/unittests/test_communicator_geo.py +++ b/python/paddle/fluid/tests/unittests/test_communicator_geo.py @@ -140,7 +140,7 @@ import paddle.fluid as fluid from paddle.distributed.communicator import Communicator import paddle.incubate.distributed.fleet.role_maker as role_maker -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.mode import DistributedMode import paddle.distributed.fleet as fleet from test_communicator_geo import TestCommunicatorGeoEnd2End diff --git a/python/paddle/fluid/tests/unittests/test_dataset.py b/python/paddle/fluid/tests/unittests/test_dataset.py index 803c33ddd5c..d04cb0965b3 100644 --- a/python/paddle/fluid/tests/unittests/test_dataset.py +++ b/python/paddle/fluid/tests/unittests/test_dataset.py @@ -1113,7 +1113,7 @@ class TestDataset2(unittest.TestCase): train_program = fluid.Program() startup_program = fluid.Program() scope = fluid.Scope() - from paddle.incubate.fleet.parameter_server.distribute_transpiler import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( fleet, ) @@ -1185,7 +1185,9 @@ class TestDataset2(unittest.TestCase): train_program = fluid.Program() startup_program = fluid.Program() scope = fluid.Scope() - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) with fluid.program_guard(train_program, startup_program): slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"] @@ -1316,7 +1318,9 @@ class TestDataset2(unittest.TestCase): train_program = fluid.Program() startup_program = fluid.Program() scope = fluid.Scope() - from paddle.incubate.fleet.parameter_server.pslib import fleet + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) with fluid.program_guard(train_program, startup_program): slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"] diff --git a/python/paddle/fluid/tests/unittests/test_downpoursgd.py b/python/paddle/fluid/tests/unittests/test_downpoursgd.py index 809ac04f436..bee753309fe 100644 --- a/python/paddle/fluid/tests/unittests/test_downpoursgd.py +++ b/python/paddle/fluid/tests/unittests/test_downpoursgd.py @@ -21,9 +21,9 @@ from google.protobuf import text_format import paddle import paddle.fluid as fluid -import paddle.incubate.fleet.parameter_server.pslib.ps_pb2 as pslib +import paddle.incubate.distributed.fleet.parameter_server.pslib.ps_pb2 as pslib from paddle.fluid.trainer_factory import TrainerFactory -from paddle.incubate.fleet.parameter_server.pslib.node import ( +from paddle.incubate.distributed.fleet.parameter_server.pslib.node import ( DownpourServer, DownpourWorker, ) diff --git a/python/paddle/fluid/tests/unittests/test_fleet.py b/python/paddle/fluid/tests/unittests/test_fleet.py index 04ef33a5108..3af706876e0 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet.py +++ b/python/paddle/fluid/tests/unittests/test_fleet.py @@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase): """Test cases for pslib.""" import paddle import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, ) - from paddle.incubate.fleet.parameter_server.pslib import fleet os.environ["POD_IP"] = "127.0.0.1" os.environ["PADDLE_PORT"] = "36001" diff --git a/python/paddle/fluid/tests/unittests/test_fleet_api_input.py b/python/paddle/fluid/tests/unittests/test_fleet_api_input.py index faae7b301bf..596af335b6e 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_api_input.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_api_input.py @@ -23,15 +23,17 @@ from paddle.fluid.transpiler.distribute_transpiler import ( DistributeTranspilerConfig, ) from paddle.incubate.distributed.fleet.collective import CollectiveOptimizer + +# from paddle.incubate.distributed.fleet.parameter_server import TranspilerOptimizer +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, +) from paddle.incubate.distributed.fleet.role_maker import ( Role, UserDefinedCollectiveRoleMaker, UserDefinedRoleMaker, ) -# from paddle.incubate.fleet.parameter_server import TranspilerOptimizer -from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet - class DistributeTranspilerConfigTest(unittest.TestCase): def set_runtime_split_send_recv(self, config, value): diff --git a/python/paddle/fluid/tests/unittests/test_fleet_nocvm_1.py b/python/paddle/fluid/tests/unittests/test_fleet_nocvm_1.py index e6071c43070..2959043482c 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_nocvm_1.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_nocvm_1.py @@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase): def test_pslib_1(self): """Test cases for pslib.""" import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, ) - from paddle.incubate.fleet.parameter_server.pslib import fleet os.environ["POD_IP"] = "127.0.0.1" os.environ["PADDLE_PORT"] = "36001" diff --git a/python/paddle/fluid/tests/unittests/test_fleet_ps.py b/python/paddle/fluid/tests/unittests/test_fleet_ps.py index 0f55f6a94d9..75cb4b595b3 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_ps.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_ps.py @@ -15,7 +15,7 @@ import unittest from paddle.fluid.framework import default_main_program -from paddle.incubate.fleet.parameter_server.ir.pserver_pass import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.pserver_pass import ( _get_optimizer_input_shape, ) @@ -24,7 +24,7 @@ main_program = default_main_program() class TestFleetPS(unittest.TestCase): def test_version(self): - from paddle.incubate.fleet.parameter_server import version + from paddle.incubate.distributed.fleet.parameter_server import version transpiler = version.is_transpiler() self.assertEqual(transpiler, True) diff --git a/python/paddle/fluid/tests/unittests/test_fleet_pyramid_hash.py b/python/paddle/fluid/tests/unittests/test_fleet_pyramid_hash.py index bbd312d9f3a..e34b98747f7 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_pyramid_hash.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_pyramid_hash.py @@ -17,8 +17,10 @@ import unittest import paddle import paddle.fluid as fluid import paddle.incubate.distributed.fleet.role_maker as role_maker -from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet -from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, +) +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( StrategyFactory, ) diff --git a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker.py b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker.py index 675adf5a1e7..feea3d413a4 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker.py @@ -63,10 +63,12 @@ class TestCloudRoleMaker(unittest.TestCase): def test_pslib_1(self): """Test cases for pslib.""" import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, ) - from paddle.incubate.fleet.parameter_server.pslib import fleet os.environ["POD_IP"] = "127.0.0.1" os.environ["PADDLE_PORT"] = "36001" diff --git a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_2.py b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_2.py index ebe5ed2ac28..8be8bab7466 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_2.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_2.py @@ -35,13 +35,13 @@ class TestCloudRoleMaker2(unittest.TestCase): def test_pslib_2(self): """Test cases for pslib.""" import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, RoleMakerBase, ) - from paddle.incubate.fleet.parameter_server.distribute_transpiler import ( - fleet, - ) paddle.enable_static() diff --git a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_3.py b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_3.py index 5833fd74e84..b17f877dc15 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_3.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_rolemaker_3.py @@ -34,10 +34,12 @@ class TestCloudRoleMaker(unittest.TestCase): def test_pslib_1(self): """Test cases for pslib.""" import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, ) - from paddle.incubate.fleet.parameter_server.pslib import fleet os.environ["POD_IP"] = "127.0.0.1" os.environ["PADDLE_PORT"] = "36001" diff --git a/python/paddle/fluid/tests/unittests/test_fleet_unitaccessor.py b/python/paddle/fluid/tests/unittests/test_fleet_unitaccessor.py index 86e75a43f4e..40e7851c350 100644 --- a/python/paddle/fluid/tests/unittests/test_fleet_unitaccessor.py +++ b/python/paddle/fluid/tests/unittests/test_fleet_unitaccessor.py @@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase): def test_pslib_1(self): """Test cases for pslib.""" import paddle.fluid as fluid + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( + fleet, + ) from paddle.incubate.distributed.fleet.role_maker import ( GeneralRoleMaker, ) - from paddle.incubate.fleet.parameter_server.pslib import fleet os.environ["POD_IP"] = "127.0.0.1" os.environ["PADDLE_PORT"] = "36001" diff --git a/python/paddle/fluid/tests/unittests/test_ps_dispatcher.py b/python/paddle/fluid/tests/unittests/test_ps_dispatcher.py index 33709165747..6308307c870 100644 --- a/python/paddle/fluid/tests/unittests/test_ps_dispatcher.py +++ b/python/paddle/fluid/tests/unittests/test_ps_dispatcher.py @@ -14,7 +14,7 @@ import unittest -from paddle.incubate.fleet.parameter_server.ir.ps_dispatcher import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import ( HashName, PSDispatcher, RoundRobin, diff --git a/python/paddle/fluid/transpiler/geo_sgd_transpiler.py b/python/paddle/fluid/transpiler/geo_sgd_transpiler.py index 8b6cbd8055a..82921781145 100644 --- a/python/paddle/fluid/transpiler/geo_sgd_transpiler.py +++ b/python/paddle/fluid/transpiler/geo_sgd_transpiler.py @@ -49,7 +49,9 @@ from .distribute_transpiler import ( same_or_split_var, ServerRuntimeConfig, ) -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, +) from paddle.distributed.distribute_lookup_table import ( find_distributed_lookup_table, ) diff --git a/python/paddle/incubate/distributed/fleet/fleet_util.py b/python/paddle/incubate/distributed/fleet/fleet_util.py index fc12d5eea27..ce1cdd74de1 100644 --- a/python/paddle/incubate/distributed/fleet/fleet_util.py +++ b/python/paddle/incubate/distributed/fleet/fleet_util.py @@ -54,13 +54,13 @@ class FleetUtil: def __init__(self, mode="pslib"): global fleet if mode == "pslib": - from paddle.incubate.fleet.parameter_server.pslib import ( + from paddle.incubate.distributed.fleet.parameter_server.pslib import ( fleet as fleet_pslib, ) fleet = fleet_pslib elif mode == "transpiler": - from paddle.incubate.fleet.parameter_server.distribute_transpiler import ( + from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import ( fleet as fleet_transpiler, ) diff --git a/python/paddle/incubate/fleet/parameter_server/__init__.py b/python/paddle/incubate/distributed/fleet/parameter_server/__init__.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/__init__.py rename to python/paddle/incubate/distributed/fleet/parameter_server/__init__.py diff --git a/python/paddle/incubate/fleet/parameter_server/distribute_transpiler/__init__.py b/python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/__init__.py similarity index 97% rename from python/paddle/incubate/fleet/parameter_server/distribute_transpiler/__init__.py rename to python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/__init__.py index 46c95ac3d72..f60ef7fd54f 100644 --- a/python/paddle/incubate/fleet/parameter_server/distribute_transpiler/__init__.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/__init__.py @@ -39,15 +39,17 @@ from paddle.incubate.distributed.fleet.base import Fleet from paddle.incubate.distributed.fleet.base import Mode from paddle.incubate.distributed.fleet.role_maker import MPISymetricRoleMaker -from paddle.incubate.fleet.parameter_server import version -from paddle.incubate.fleet.parameter_server.ir.public import ( +from paddle.incubate.distributed.fleet.parameter_server import version +from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( get_sparse_tablenames, ) -from paddle.incubate.fleet.parameter_server.ir.public import _get_lr_ops -from paddle.incubate.fleet.parameter_server.ir.public import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( + _get_lr_ops, +) +from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _has_global_step, ) -from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( +from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import ( TrainerRuntimeConfig, DistributedStrategy, SyncStrategy, @@ -61,15 +63,17 @@ from paddle.distributed.fleet.base.private_helper_function import ( wait_server_ready, ) from paddle.incubate.distributed.fleet.base import DistributedOptimizer -from paddle.incubate.fleet.parameter_server.mode import PSMode +from paddle.incubate.distributed.fleet.parameter_server.mode import PSMode -from paddle.incubate.fleet.parameter_server.ir import ( +from paddle.incubate.distributed.fleet.parameter_server.ir import ( trainer_pass as worker, ) -from paddle.incubate.fleet.parameter_server.ir import ( +from paddle.incubate.distributed.fleet.parameter_server.ir import ( pserver_pass as server, ) -from paddle.incubate.fleet.parameter_server.ir import public as public +from paddle.incubate.distributed.fleet.parameter_server.ir import ( + public as public, +) class FleetTranspiler(Fleet): diff --git a/python/paddle/incubate/fleet/parameter_server/distribute_transpiler/distributed_strategy.py b/python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/distributed_strategy.py similarity index 99% rename from python/paddle/incubate/fleet/parameter_server/distribute_transpiler/distributed_strategy.py rename to python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/distributed_strategy.py index a5ae6ab02b2..799755a2957 100644 --- a/python/paddle/incubate/fleet/parameter_server/distribute_transpiler/distributed_strategy.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/distribute_transpiler/distributed_strategy.py @@ -29,7 +29,9 @@ from paddle.fluid.transpiler.distribute_transpiler import ( DistributeTranspilerConfig, ServerRuntimeConfig, ) -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, +) class TrainerRuntimeConfig: diff --git a/python/paddle/incubate/fleet/parameter_server/ir/__init__.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/__init__.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/ir/__init__.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/__init__.py diff --git a/python/paddle/incubate/fleet/parameter_server/ir/heter_trainer_pass.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/heter_trainer_pass.py similarity index 97% rename from python/paddle/incubate/fleet/parameter_server/ir/heter_trainer_pass.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/heter_trainer_pass.py index 56c2e7fa200..36ad45be3eb 100644 --- a/python/paddle/incubate/fleet/parameter_server/ir/heter_trainer_pass.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/ir/heter_trainer_pass.py @@ -15,7 +15,7 @@ import warnings import paddle -from paddle.incubate.fleet.parameter_server.ir.trainer_pass import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.trainer_pass import ( create_heter_program, create_trainer_program, find_block_joints, diff --git a/python/paddle/incubate/fleet/parameter_server/ir/ps_dispatcher.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/ir/ps_dispatcher.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py diff --git a/python/paddle/incubate/fleet/parameter_server/ir/pserver_pass.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/pserver_pass.py similarity index 99% rename from python/paddle/incubate/fleet/parameter_server/ir/pserver_pass.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/pserver_pass.py index 86db0764b35..8eb24587e16 100644 --- a/python/paddle/incubate/fleet/parameter_server/ir/pserver_pass.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/ir/pserver_pass.py @@ -15,7 +15,7 @@ import collections from paddle.framework import core -from paddle.incubate.fleet.parameter_server.ir.public import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_lr_ops, _get_optimize_ops, _get_varname_parts, diff --git a/python/paddle/incubate/fleet/parameter_server/ir/public.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/public.py similarity index 99% rename from python/paddle/incubate/fleet/parameter_server/ir/public.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/public.py index 9b58396d74e..9860cc24473 100755 --- a/python/paddle/incubate/fleet/parameter_server/ir/public.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/ir/public.py @@ -20,9 +20,13 @@ from functools import reduce import paddle from paddle.framework import core -from paddle.incubate.fleet.parameter_server.ir import vars_metatools -from paddle.incubate.fleet.parameter_server.ir.ps_dispatcher import RoundRobin -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.ir import vars_metatools +from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import ( + RoundRobin, +) +from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, +) OP_NAME_SCOPE = "op_namescope" CLIP_OP_NAME_SCOPE = "gradient_clip" diff --git a/python/paddle/incubate/fleet/parameter_server/ir/trainer_pass.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/trainer_pass.py similarity index 99% rename from python/paddle/incubate/fleet/parameter_server/ir/trainer_pass.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/trainer_pass.py index faab929cf47..8060a7b311e 100644 --- a/python/paddle/incubate/fleet/parameter_server/ir/trainer_pass.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/ir/trainer_pass.py @@ -22,12 +22,14 @@ import paddle import paddle.framework as framework from paddle.distributed.transpiler.details.program_utils import delete_ops from paddle.framework import core -from paddle.incubate.fleet.parameter_server.ir.public import ( +from paddle.incubate.distributed.fleet.parameter_server.ir.public import ( _get_lr_ops, _get_optimize_ops, get_sparse_tablenames, ) -from paddle.incubate.fleet.parameter_server.mode import DistributedMode +from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, +) OP_NAME_SCOPE = "op_namescope" CLIP_OP_NAME_SCOPE = "gradient_clip" diff --git a/python/paddle/incubate/fleet/parameter_server/ir/ufind.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/ufind.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/ir/ufind.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/ufind.py diff --git a/python/paddle/incubate/fleet/parameter_server/ir/vars_metatools.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/vars_metatools.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/ir/vars_metatools.py rename to python/paddle/incubate/distributed/fleet/parameter_server/ir/vars_metatools.py diff --git a/python/paddle/incubate/fleet/parameter_server/mode.py b/python/paddle/incubate/distributed/fleet/parameter_server/mode.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/mode.py rename to python/paddle/incubate/distributed/fleet/parameter_server/mode.py diff --git a/python/paddle/incubate/fleet/parameter_server/pslib/.gitignore b/python/paddle/incubate/distributed/fleet/parameter_server/pslib/.gitignore similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/pslib/.gitignore rename to python/paddle/incubate/distributed/fleet/parameter_server/pslib/.gitignore diff --git a/python/paddle/incubate/fleet/parameter_server/pslib/__init__.py b/python/paddle/incubate/distributed/fleet/parameter_server/pslib/__init__.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/pslib/__init__.py rename to python/paddle/incubate/distributed/fleet/parameter_server/pslib/__init__.py diff --git a/python/paddle/incubate/fleet/parameter_server/pslib/node.py b/python/paddle/incubate/distributed/fleet/parameter_server/pslib/node.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/pslib/node.py rename to python/paddle/incubate/distributed/fleet/parameter_server/pslib/node.py diff --git a/python/paddle/incubate/fleet/parameter_server/pslib/optimizer_factory.py b/python/paddle/incubate/distributed/fleet/parameter_server/pslib/optimizer_factory.py similarity index 100% rename from python/paddle/incubate/fleet/parameter_server/pslib/optimizer_factory.py rename to python/paddle/incubate/distributed/fleet/parameter_server/pslib/optimizer_factory.py diff --git a/python/setup.py.in b/python/setup.py.in index dbcd4d4d5fc..d5a4d20397d 100644 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -293,7 +293,7 @@ os.environ['CUDA_CACHE_MAXSIZE'] = '805306368' write_cuda_env_config_py(filename='@PADDLE_BINARY_DIR@/python/paddle/cuda_env.py') -def write_distributed_training_mode_py(filename='paddle/incubate/fleet/parameter_server/version.py'): +def write_distributed_training_mode_py(filename='paddle/incubate/distributed/fleet/parameter_server/version.py'): cnt = ''' # THIS FILE IS GENERATED FROM PADDLEPADDLE SETUP.PY @@ -320,7 +320,7 @@ def is_transpiler(): 'mode': 'PSLIB' if '${WITH_PSLIB}' == 'ON' else 'TRANSPILER' }) -write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/incubate/fleet/parameter_server/version.py') +write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/incubate/distributed/fleet/parameter_server/version.py') packages=['paddle', @@ -432,10 +432,10 @@ packages=['paddle', 'paddle.incubate.distributed.models', 'paddle.incubate.distributed.models.moe', 'paddle.incubate.distributed.models.moe.gate', - 'paddle.incubate.fleet.parameter_server', - 'paddle.incubate.fleet.parameter_server.distribute_transpiler', - 'paddle.incubate.fleet.parameter_server.pslib', - 'paddle.incubate.fleet.parameter_server.ir', + 'paddle.incubate.distributed.fleet.parameter_server', + 'paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler', + 'paddle.incubate.distributed.fleet.parameter_server.pslib', + 'paddle.incubate.distributed.fleet.parameter_server.ir', 'paddle.quantization', 'paddle.quantization.quanters', 'paddle.quantization.observers', @@ -592,7 +592,7 @@ if '${WITH_CINN}' == 'ON': if '${WITH_PSLIB}' == 'ON': shutil.copy('${PSLIB_LIB}', libs_path) if os.path.exists('${PSLIB_VERSION_PY}'): - shutil.copy('${PSLIB_VERSION_PY}', '${PADDLE_BINARY_DIR}/python/paddle/incubate/fleet/parameter_server/pslib/') + shutil.copy('${PSLIB_VERSION_PY}', '${PADDLE_BINARY_DIR}/python/paddle/incubate/distributed/fleet/parameter_server/pslib/') package_data['paddle.libs'] += ['libps' + ext_name] if '${WITH_MKLDNN}' == 'ON': diff --git a/setup.py b/setup.py index 275fe3b6bc9..aadac3d1d1e 100644 --- a/setup.py +++ b/setup.py @@ -579,7 +579,7 @@ os.environ['CUDA_CACHE_MAXSIZE'] = '805306368' def write_parameter_server_version_py( - filename='paddle/incubate/fleet/parameter_server/version.py', + filename='paddle/incubate/distributed/fleet/parameter_server/version.py', ): cnt = ''' @@ -973,7 +973,7 @@ def get_package_data_and_package_dir(): shutil.copy( env_dict.get("PSLIB_VERSION_PY"), paddle_binary_dir - + '/python/paddle/incubate/fleet/parameter_server/pslib/', + + '/python/paddle/incubate/distributed/fleet/parameter_server/pslib/', ) package_data['paddle.libs'] += ['libps' + ext_suffix] if env_dict.get("WITH_MKLDNN") == 'ON': @@ -1332,10 +1332,10 @@ def get_setup_parameters(): 'paddle.incubate.distributed.models', 'paddle.incubate.distributed.models.moe', 'paddle.incubate.distributed.models.moe.gate', - 'paddle.incubate.fleet.parameter_server', - 'paddle.incubate.fleet.parameter_server.distribute_transpiler', - 'paddle.incubate.fleet.parameter_server.ir', - 'paddle.incubate.fleet.parameter_server.pslib', + 'paddle.incubate.distributed.fleet.parameter_server', + 'paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler', + 'paddle.incubate.distributed.fleet.parameter_server.ir', + 'paddle.incubate.distributed.fleet.parameter_server.pslib', 'paddle.quantization', 'paddle.quantization.quanters', 'paddle.quantization.observers', @@ -1461,7 +1461,7 @@ def main(): filename='{}/python/paddle/cuda_env.py'.format(paddle_binary_dir) ) write_parameter_server_version_py( - filename='{}/python/paddle/incubate/fleet/parameter_server/version.py'.format( + filename='{}/python/paddle/incubate/distributed/fleet/parameter_server/version.py'.format( paddle_binary_dir ) ) -- GitLab