__init__.py 3.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
# TODO: define distributed api under this directory,
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
from .base.role_maker import Role  # noqa: F401
from .base.role_maker import UserDefinedRoleMaker  # noqa: F401
from .base.role_maker import PaddleCloudRoleMaker  # noqa: F401
from .base.distributed_strategy import DistributedStrategy  # noqa: F401
from .base.util_factory import UtilBase  # noqa: F401
from .dataset import DatasetBase  # noqa: F401
from .dataset import InMemoryDataset  # noqa: F401
from .dataset import QueueDataset  # noqa: F401
from .dataset import FileInstantDataset  # noqa: F401
from .dataset import BoxPSDataset  # noqa: F401
from .data_generator.data_generator import MultiSlotDataGenerator  # noqa: F401
from .data_generator.data_generator import MultiSlotStringDataGenerator  # noqa: F401
from . import metrics  # noqa: F401
from .base.topology import CommunicateTopology
from .base.topology import HybridCommunicateGroup  # noqa: F401
W
wuhuachaocoding 已提交
31 32 33 34
from .fleet import Fleet
from .model import distributed_model
from .optimizer import distributed_optimizer
from .scaler import distributed_scaler
35

36 37 38 39 40
__all__ = [  #noqa
    "CommunicateTopology", "UtilBase", "HybridCommunicateGroup",
    "MultiSlotStringDataGenerator", "UserDefinedRoleMaker",
    "DistributedStrategy", "Role", "MultiSlotDataGenerator",
    "PaddleCloudRoleMaker", "Fleet"
41
]
42 43

fleet = Fleet()
D
Dong Daxiang 已提交
44
_final_strategy = fleet._final_strategy
45 46
_get_applied_meta_list = fleet._get_applied_meta_list
_get_applied_graph_list = fleet._get_applied_graph_list
47 48 49 50
init = fleet.init
is_first_worker = fleet.is_first_worker
worker_index = fleet.worker_index
worker_num = fleet.worker_num
51 52 53 54 55 56 57 58 59 60 61
node_num = fleet.node_num
rank = fleet.worker_index
nranks = fleet.worker_num
world_size = fleet.worker_num
# device id in current trainer
local_device_ids = fleet.local_device_ids
# device ids in world
world_device_ids = fleet.world_device_ids
# rank in node
local_rank = fleet.local_rank
rank_in_node = local_rank
62
is_worker = fleet.is_worker
63 64 65 66
is_coordinator = fleet.is_coordinator
init_coordinator = fleet.init_coordinator
make_fl_strategy = fleet.make_fl_strategy
get_fl_client = fleet.get_fl_client
67 68 69 70 71
worker_endpoints = fleet.worker_endpoints
server_num = fleet.server_num
server_index = fleet.server_index
server_endpoints = fleet.server_endpoints
is_server = fleet.is_server
72
util = UtilBase()
73 74 75 76 77
barrier_worker = fleet.barrier_worker
init_worker = fleet.init_worker
init_server = fleet.init_server
run_server = fleet.run_server
stop_worker = fleet.stop_worker
W
wuhuachaocoding 已提交
78
distributed_optimizer = distributed_optimizer
79 80
save_inference_model = fleet.save_inference_model
save_persistables = fleet.save_persistables
Z
zhaocaibei123 已提交
81
save_cache_model = fleet.save_cache_model
82 83 84
check_save_pre_patch_done = fleet.check_save_pre_patch_done
save_one_table = fleet.save_one_table
save_dense_params = fleet.save_dense_params
T
Thunderbrook 已提交
85
load_model = fleet.load_model
86 87
load_inference_model = fleet.load_inference_model
load_one_table = fleet.load_one_table
88
minimize = fleet.minimize
W
wuhuachaocoding 已提交
89
distributed_model = distributed_model
90
shrink = fleet.shrink
91
get_hybrid_communicate_group = fleet.get_hybrid_communicate_group
W
wuhuachaocoding 已提交
92
distributed_scaler = distributed_scaler
93 94

from .. import auto_parallel as auto