未验证 提交 5d322ced 编写于 作者: W wangzhen38 提交者: GitHub

[mv fleet] mv fleet to distributed (#50834)

* [mv fleet] mv fleet to distributed

* [mv fleet] for ci

* [mv fleet] for ci

* [mv fleet] solve ci of version
上级 76c495d7
......@@ -7,7 +7,7 @@ if(WITH_PYTHON)
file(MAKE_DIRECTORY
${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto)
set(PSLIB_PROTO_DSTPATH
"${PADDLE_SOURCE_DIR}/python/paddle/incubate/fleet/parameter_server/pslib/"
"${PADDLE_SOURCE_DIR}/python/paddle/incubate/distributed/fleet/parameter_server/pslib/"
)
if(NOT WIN32)
add_custom_command(
......
......@@ -74,7 +74,7 @@ class ParameterServerOptimizer(MetaOptimizerBase):
}
def _get_distributed_strategy(self):
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......@@ -96,7 +96,7 @@ class ParameterServerOptimizer(MetaOptimizerBase):
return strategy
def _build_trainer_programs(self, compiled_config):
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
trainer_pass as worker,
)
......@@ -106,7 +106,7 @@ class ParameterServerOptimizer(MetaOptimizerBase):
use_ps_gpu = self.user_defined_strategy.a_sync_configs["use_ps_gpu"]
if not compiled_config.is_geo_mode():
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_add_lr_decay_table_pass,
)
......@@ -150,7 +150,7 @@ class ParameterServerOptimizer(MetaOptimizerBase):
compiled_config.set_origin_ps_startup_program(_startup)
# for heter program
if self.role_maker._is_heter_parameter_server_mode:
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
heter_trainer_pass as heter_worker,
)
......@@ -191,13 +191,13 @@ class ParameterServerOptimizer(MetaOptimizerBase):
_main = paddle.static.Program()
_startup = paddle.static.Program()
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
pserver_pass as server,
)
if not compiled_config.is_geo_mode():
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_optimize_ops,
)
......@@ -209,7 +209,7 @@ class ParameterServerOptimizer(MetaOptimizerBase):
if len(ops) == 0:
return _main, _startup
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_add_lr_decay_table_pass,
)
......@@ -299,7 +299,9 @@ class ParameterServerOptimizer(MetaOptimizerBase):
free = get_sys_free_mem()
from paddle.incubate.fleet.parameter_server.ir import vars_metatools
from paddle.incubate.distributed.fleet.parameter_server.ir import (
vars_metatools,
)
processed_var_names = set(["@EMPTY@"])
param_memory_size = 0
......@@ -369,7 +371,9 @@ class ParameterServerOptimizer(MetaOptimizerBase):
_origin_main_program = loss.block.program
_origin_startup_program = startup_program
from paddle.incubate.fleet.parameter_server.ir import public as public
from paddle.incubate.distributed.fleet.parameter_server.ir import (
public as public,
)
compiled_config = public.CompileTimeStrategy(
_origin_main_program,
......
......@@ -50,7 +50,7 @@ class ParameterServerRuntime(RuntimeBase):
def _get_distributed_strategy(self):
strategy = None
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......@@ -72,7 +72,7 @@ class ParameterServerRuntime(RuntimeBase):
return strategy
def build_compiled_startegy(self):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
CompileTimeStrategy,
)
......@@ -101,7 +101,7 @@ class ParameterServerRuntime(RuntimeBase):
if main_program is None:
main_program = self.origin_main_program
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
......@@ -137,7 +137,7 @@ class ParameterServerRuntime(RuntimeBase):
def _load_distributed_params(self, dirname, varnames):
from paddle.distributed.communicator import LargeScaleKV
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
......@@ -153,7 +153,7 @@ class ParameterServerRuntime(RuntimeBase):
if var.name in exclude_var_names:
return False
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
......@@ -184,7 +184,7 @@ class ParameterServerRuntime(RuntimeBase):
return kwargs
def geo_strategy_envs():
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
......@@ -238,11 +238,11 @@ class ParameterServerRuntime(RuntimeBase):
kwargs["sparse_attrs"] = get_sparse_attrs()
return kwargs
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
GeoStrategy,
SyncStrategy,
)
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_lr_ops,
_has_global_step,
)
......@@ -474,7 +474,7 @@ class ParameterServerRuntime(RuntimeBase):
return reshaped_names, origin_names
def _get_optimizer_op(self, param_name):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_optimize_ops,
)
......
......@@ -36,7 +36,7 @@ PSERVER_SAVE_SUFFIX = ".shard"
def parse_table_class(varname, o_main_program):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
is_distributed_sparse_op,
is_sparse_op,
)
......@@ -247,7 +247,7 @@ class CommonAccessor:
self.opt_init_map = opt_init_map
def parse_entry(self, varname, o_main_program):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
is_distributed_sparse_op,
is_sparse_op,
)
......@@ -304,7 +304,7 @@ class CommonAccessor:
compiled_strategy,
adam_d2sum,
):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_optimize_ops,
)
......@@ -692,7 +692,7 @@ class TheOnePSRuntime(RuntimeBase):
def _get_distributed_strategy(self):
strategy = None
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......@@ -716,7 +716,7 @@ class TheOnePSRuntime(RuntimeBase):
return strategy
def build_compiled_startegy(self):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
CompileTimeStrategy,
)
......@@ -731,7 +731,7 @@ class TheOnePSRuntime(RuntimeBase):
return compiled_config
def _init_worker(self):
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
SyncStrategy,
)
......@@ -1191,7 +1191,7 @@ class TheOnePSRuntime(RuntimeBase):
proto_txt, string_hosts, role_id, trainers, self._server_sub_program
)
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
......@@ -1252,7 +1252,7 @@ class TheOnePSRuntime(RuntimeBase):
if var.name in exclude_var_names:
return False
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_varname_parts,
)
......@@ -1283,7 +1283,7 @@ class TheOnePSRuntime(RuntimeBase):
def _save_sparse_params(
self, executor, dirname, context, main_program, mode
):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
......@@ -1479,7 +1479,7 @@ class TheOnePSRuntime(RuntimeBase):
self._ps_inference_save_persistables(*args, **kwargs)
def _load_sparse_params(self, dirname, context, main_program, mode):
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
......
......@@ -867,7 +867,7 @@ class InMemoryDataset(DatasetBase):
# required: skiptest
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
......@@ -929,7 +929,7 @@ class InMemoryDataset(DatasetBase):
# required: skiptest
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
......@@ -993,7 +993,7 @@ class InMemoryDataset(DatasetBase):
# required: skiptest
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
......@@ -1037,7 +1037,7 @@ class InMemoryDataset(DatasetBase):
# required: skiptest
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
filelist = ["a.txt", "b.txt"]
dataset.set_filelist(filelist)
......@@ -1084,7 +1084,7 @@ class InMemoryDataset(DatasetBase):
# required: skiptest
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
graph_config = {"walk_len": 24,
"walk_degree": 10,
......@@ -1251,7 +1251,7 @@ class QueueDataset(DatasetBase):
.. code-block:: python
import paddle.fluid as fluid
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import fleet
dataset = fluid.DatasetFactory().create_dataset("QueueDataset")
#dataset.global_shuffle(fleet)
......
......@@ -123,7 +123,7 @@ class Hogwild(DeviceWorker):
hogwild.stat_var_names.extend([i])
downpour.stat_var_names.extend([i])
from paddle.incubate.fleet.parameter_server import version
from paddle.incubate.distributed.fleet.parameter_server import version
if (
version.is_transpiler()
......@@ -271,7 +271,7 @@ class DownpourLite(DeviceWorker):
for i in opt_info["stat_var_names"]:
downpour.stat_var_names.extend([i])
from paddle.incubate.fleet.parameter_server import version
from paddle.incubate.distributed.fleet.parameter_server import version
if (
version.is_transpiler()
......
......@@ -22,8 +22,10 @@ from paddle.fluid.transpiler.distribute_transpiler import (
DistributeTranspilerConfig,
ServerRuntimeConfig,
)
from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......
......@@ -26,7 +26,9 @@ import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import Program, program_guard
from paddle.fluid.op import Operator
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
def run_pserver(pserver_id):
......
......@@ -16,7 +16,9 @@ import logging
# import paddle.incubate.distributed.fleet.role_maker as role_maker
import paddle.distributed.fleet.base.role_maker as role_maker
from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger("fluid")
......
......@@ -18,7 +18,9 @@ from utils import gen_data
import paddle
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet import role_maker
from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
input_x = paddle.static.data(name="x", shape=[-1, 32], dtype='float32')
input_y = paddle.static.data(name="y", shape=[-1, 1], dtype='int64')
......
......@@ -233,7 +233,7 @@ def get_user_defined_strategy(config):
def get_distributed_strategy(user_defined_strategy): # pslib
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......@@ -444,7 +444,7 @@ class DnnTrainer:
print(
"entering run {} - old".format(str(config["applied_pass_name"]))
)
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
public as public,
)
......@@ -458,7 +458,7 @@ class DnnTrainer:
_main = compiled_config.origin_main_program.clone()
_startup = compiled_config.origin_startup_program.clone()
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
trainer_pass as worker,
)
......
......@@ -140,7 +140,7 @@ import paddle.fluid as fluid
from paddle.distributed.communicator import Communicator
import paddle.incubate.distributed.fleet.role_maker as role_maker
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import DistributedMode
import paddle.distributed.fleet as fleet
from test_communicator_geo import TestCommunicatorGeoEnd2End
......
......@@ -1113,7 +1113,7 @@ class TestDataset2(unittest.TestCase):
train_program = fluid.Program()
startup_program = fluid.Program()
scope = fluid.Scope()
from paddle.incubate.fleet.parameter_server.distribute_transpiler import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
......@@ -1185,7 +1185,9 @@ class TestDataset2(unittest.TestCase):
train_program = fluid.Program()
startup_program = fluid.Program()
scope = fluid.Scope()
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
with fluid.program_guard(train_program, startup_program):
slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"]
......@@ -1316,7 +1318,9 @@ class TestDataset2(unittest.TestCase):
train_program = fluid.Program()
startup_program = fluid.Program()
scope = fluid.Scope()
from paddle.incubate.fleet.parameter_server.pslib import fleet
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
with fluid.program_guard(train_program, startup_program):
slots = ["slot1_ff", "slot2_ff", "slot3_ff", "slot4_ff"]
......
......@@ -21,9 +21,9 @@ from google.protobuf import text_format
import paddle
import paddle.fluid as fluid
import paddle.incubate.fleet.parameter_server.pslib.ps_pb2 as pslib
import paddle.incubate.distributed.fleet.parameter_server.pslib.ps_pb2 as pslib
from paddle.fluid.trainer_factory import TrainerFactory
from paddle.incubate.fleet.parameter_server.pslib.node import (
from paddle.incubate.distributed.fleet.parameter_server.pslib.node import (
DownpourServer,
DownpourWorker,
)
......
......@@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase):
"""Test cases for pslib."""
import paddle
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
)
from paddle.incubate.fleet.parameter_server.pslib import fleet
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
......
......@@ -23,15 +23,17 @@ from paddle.fluid.transpiler.distribute_transpiler import (
DistributeTranspilerConfig,
)
from paddle.incubate.distributed.fleet.collective import CollectiveOptimizer
# from paddle.incubate.distributed.fleet.parameter_server import TranspilerOptimizer
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
Role,
UserDefinedCollectiveRoleMaker,
UserDefinedRoleMaker,
)
# from paddle.incubate.fleet.parameter_server import TranspilerOptimizer
from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet
class DistributeTranspilerConfigTest(unittest.TestCase):
def set_runtime_split_send_recv(self, config, value):
......
......@@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase):
def test_pslib_1(self):
"""Test cases for pslib."""
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
)
from paddle.incubate.fleet.parameter_server.pslib import fleet
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
......
......@@ -15,7 +15,7 @@
import unittest
from paddle.fluid.framework import default_main_program
from paddle.incubate.fleet.parameter_server.ir.pserver_pass import (
from paddle.incubate.distributed.fleet.parameter_server.ir.pserver_pass import (
_get_optimizer_input_shape,
)
......@@ -24,7 +24,7 @@ main_program = default_main_program()
class TestFleetPS(unittest.TestCase):
def test_version(self):
from paddle.incubate.fleet.parameter_server import version
from paddle.incubate.distributed.fleet.parameter_server import version
transpiler = version.is_transpiler()
self.assertEqual(transpiler, True)
......
......@@ -17,8 +17,10 @@ import unittest
import paddle
import paddle.fluid as fluid
import paddle.incubate.distributed.fleet.role_maker as role_maker
from paddle.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
StrategyFactory,
)
......
......@@ -63,10 +63,12 @@ class TestCloudRoleMaker(unittest.TestCase):
def test_pslib_1(self):
"""Test cases for pslib."""
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
)
from paddle.incubate.fleet.parameter_server.pslib import fleet
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
......
......@@ -35,13 +35,13 @@ class TestCloudRoleMaker2(unittest.TestCase):
def test_pslib_2(self):
"""Test cases for pslib."""
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
RoleMakerBase,
)
from paddle.incubate.fleet.parameter_server.distribute_transpiler import (
fleet,
)
paddle.enable_static()
......
......@@ -34,10 +34,12 @@ class TestCloudRoleMaker(unittest.TestCase):
def test_pslib_1(self):
"""Test cases for pslib."""
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
)
from paddle.incubate.fleet.parameter_server.pslib import fleet
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
......
......@@ -34,10 +34,12 @@ class TestFleet1(unittest.TestCase):
def test_pslib_1(self):
"""Test cases for pslib."""
import paddle.fluid as fluid
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet,
)
from paddle.incubate.distributed.fleet.role_maker import (
GeneralRoleMaker,
)
from paddle.incubate.fleet.parameter_server.pslib import fleet
os.environ["POD_IP"] = "127.0.0.1"
os.environ["PADDLE_PORT"] = "36001"
......
......@@ -14,7 +14,7 @@
import unittest
from paddle.incubate.fleet.parameter_server.ir.ps_dispatcher import (
from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import (
HashName,
PSDispatcher,
RoundRobin,
......
......@@ -49,7 +49,9 @@ from .distribute_transpiler import (
same_or_split_var,
ServerRuntimeConfig,
)
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
from paddle.distributed.distribute_lookup_table import (
find_distributed_lookup_table,
)
......
......@@ -54,13 +54,13 @@ class FleetUtil:
def __init__(self, mode="pslib"):
global fleet
if mode == "pslib":
from paddle.incubate.fleet.parameter_server.pslib import (
from paddle.incubate.distributed.fleet.parameter_server.pslib import (
fleet as fleet_pslib,
)
fleet = fleet_pslib
elif mode == "transpiler":
from paddle.incubate.fleet.parameter_server.distribute_transpiler import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler import (
fleet as fleet_transpiler,
)
......
......@@ -39,15 +39,17 @@ from paddle.incubate.distributed.fleet.base import Fleet
from paddle.incubate.distributed.fleet.base import Mode
from paddle.incubate.distributed.fleet.role_maker import MPISymetricRoleMaker
from paddle.incubate.fleet.parameter_server import version
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server import version
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
get_sparse_tablenames,
)
from paddle.incubate.fleet.parameter_server.ir.public import _get_lr_ops
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_lr_ops,
)
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_has_global_step,
)
from paddle.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
from paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler.distributed_strategy import (
TrainerRuntimeConfig,
DistributedStrategy,
SyncStrategy,
......@@ -61,15 +63,17 @@ from paddle.distributed.fleet.base.private_helper_function import (
wait_server_ready,
)
from paddle.incubate.distributed.fleet.base import DistributedOptimizer
from paddle.incubate.fleet.parameter_server.mode import PSMode
from paddle.incubate.distributed.fleet.parameter_server.mode import PSMode
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
trainer_pass as worker,
)
from paddle.incubate.fleet.parameter_server.ir import (
from paddle.incubate.distributed.fleet.parameter_server.ir import (
pserver_pass as server,
)
from paddle.incubate.fleet.parameter_server.ir import public as public
from paddle.incubate.distributed.fleet.parameter_server.ir import (
public as public,
)
class FleetTranspiler(Fleet):
......
......@@ -29,7 +29,9 @@ from paddle.fluid.transpiler.distribute_transpiler import (
DistributeTranspilerConfig,
ServerRuntimeConfig,
)
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
class TrainerRuntimeConfig:
......
......@@ -15,7 +15,7 @@
import warnings
import paddle
from paddle.incubate.fleet.parameter_server.ir.trainer_pass import (
from paddle.incubate.distributed.fleet.parameter_server.ir.trainer_pass import (
create_heter_program,
create_trainer_program,
find_block_joints,
......
......@@ -15,7 +15,7 @@
import collections
from paddle.framework import core
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_lr_ops,
_get_optimize_ops,
_get_varname_parts,
......
......@@ -20,9 +20,13 @@ from functools import reduce
import paddle
from paddle.framework import core
from paddle.incubate.fleet.parameter_server.ir import vars_metatools
from paddle.incubate.fleet.parameter_server.ir.ps_dispatcher import RoundRobin
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.ir import vars_metatools
from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import (
RoundRobin,
)
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
OP_NAME_SCOPE = "op_namescope"
CLIP_OP_NAME_SCOPE = "gradient_clip"
......
......@@ -22,12 +22,14 @@ import paddle
import paddle.framework as framework
from paddle.distributed.transpiler.details.program_utils import delete_ops
from paddle.framework import core
from paddle.incubate.fleet.parameter_server.ir.public import (
from paddle.incubate.distributed.fleet.parameter_server.ir.public import (
_get_lr_ops,
_get_optimize_ops,
get_sparse_tablenames,
)
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
OP_NAME_SCOPE = "op_namescope"
CLIP_OP_NAME_SCOPE = "gradient_clip"
......
......@@ -293,7 +293,7 @@ os.environ['CUDA_CACHE_MAXSIZE'] = '805306368'
write_cuda_env_config_py(filename='@PADDLE_BINARY_DIR@/python/paddle/cuda_env.py')
def write_distributed_training_mode_py(filename='paddle/incubate/fleet/parameter_server/version.py'):
def write_distributed_training_mode_py(filename='paddle/incubate/distributed/fleet/parameter_server/version.py'):
cnt = '''
# THIS FILE IS GENERATED FROM PADDLEPADDLE SETUP.PY
......@@ -320,7 +320,7 @@ def is_transpiler():
'mode': 'PSLIB' if '${WITH_PSLIB}' == 'ON' else 'TRANSPILER'
})
write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/incubate/fleet/parameter_server/version.py')
write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/incubate/distributed/fleet/parameter_server/version.py')
packages=['paddle',
......@@ -432,10 +432,10 @@ packages=['paddle',
'paddle.incubate.distributed.models',
'paddle.incubate.distributed.models.moe',
'paddle.incubate.distributed.models.moe.gate',
'paddle.incubate.fleet.parameter_server',
'paddle.incubate.fleet.parameter_server.distribute_transpiler',
'paddle.incubate.fleet.parameter_server.pslib',
'paddle.incubate.fleet.parameter_server.ir',
'paddle.incubate.distributed.fleet.parameter_server',
'paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler',
'paddle.incubate.distributed.fleet.parameter_server.pslib',
'paddle.incubate.distributed.fleet.parameter_server.ir',
'paddle.quantization',
'paddle.quantization.quanters',
'paddle.quantization.observers',
......@@ -592,7 +592,7 @@ if '${WITH_CINN}' == 'ON':
if '${WITH_PSLIB}' == 'ON':
shutil.copy('${PSLIB_LIB}', libs_path)
if os.path.exists('${PSLIB_VERSION_PY}'):
shutil.copy('${PSLIB_VERSION_PY}', '${PADDLE_BINARY_DIR}/python/paddle/incubate/fleet/parameter_server/pslib/')
shutil.copy('${PSLIB_VERSION_PY}', '${PADDLE_BINARY_DIR}/python/paddle/incubate/distributed/fleet/parameter_server/pslib/')
package_data['paddle.libs'] += ['libps' + ext_name]
if '${WITH_MKLDNN}' == 'ON':
......
......@@ -579,7 +579,7 @@ os.environ['CUDA_CACHE_MAXSIZE'] = '805306368'
def write_parameter_server_version_py(
filename='paddle/incubate/fleet/parameter_server/version.py',
filename='paddle/incubate/distributed/fleet/parameter_server/version.py',
):
cnt = '''
......@@ -973,7 +973,7 @@ def get_package_data_and_package_dir():
shutil.copy(
env_dict.get("PSLIB_VERSION_PY"),
paddle_binary_dir
+ '/python/paddle/incubate/fleet/parameter_server/pslib/',
+ '/python/paddle/incubate/distributed/fleet/parameter_server/pslib/',
)
package_data['paddle.libs'] += ['libps' + ext_suffix]
if env_dict.get("WITH_MKLDNN") == 'ON':
......@@ -1332,10 +1332,10 @@ def get_setup_parameters():
'paddle.incubate.distributed.models',
'paddle.incubate.distributed.models.moe',
'paddle.incubate.distributed.models.moe.gate',
'paddle.incubate.fleet.parameter_server',
'paddle.incubate.fleet.parameter_server.distribute_transpiler',
'paddle.incubate.fleet.parameter_server.ir',
'paddle.incubate.fleet.parameter_server.pslib',
'paddle.incubate.distributed.fleet.parameter_server',
'paddle.incubate.distributed.fleet.parameter_server.distribute_transpiler',
'paddle.incubate.distributed.fleet.parameter_server.ir',
'paddle.incubate.distributed.fleet.parameter_server.pslib',
'paddle.quantization',
'paddle.quantization.quanters',
'paddle.quantization.observers',
......@@ -1461,7 +1461,7 @@ def main():
filename='{}/python/paddle/cuda_env.py'.format(paddle_binary_dir)
)
write_parameter_server_version_py(
filename='{}/python/paddle/incubate/fleet/parameter_server/version.py'.format(
filename='{}/python/paddle/incubate/distributed/fleet/parameter_server/version.py'.format(
paddle_binary_dir
)
)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册