__init__.py 3.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
# TODO: define distributed api under this directory,
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
from .base.role_maker import Role  # noqa: F401
from .base.role_maker import UserDefinedRoleMaker  # noqa: F401
from .base.role_maker import PaddleCloudRoleMaker  # noqa: F401
from .base.distributed_strategy import DistributedStrategy  # noqa: F401
from .base.util_factory import UtilBase  # noqa: F401
from .dataset import DatasetBase  # noqa: F401
from .dataset import InMemoryDataset  # noqa: F401
from .dataset import QueueDataset  # noqa: F401
from .dataset import FileInstantDataset  # noqa: F401
from .dataset import BoxPSDataset  # noqa: F401
from .data_generator.data_generator import MultiSlotDataGenerator  # noqa: F401
from .data_generator.data_generator import MultiSlotStringDataGenerator  # noqa: F401
from . import metrics  # noqa: F401
from .base.topology import CommunicateTopology
from .base.topology import HybridCommunicateGroup  # noqa: F401
W
wuhuachaocoding 已提交
31 32 33 34
from .fleet import Fleet
from .model import distributed_model
from .optimizer import distributed_optimizer
from .scaler import distributed_scaler
R
Roc 已提交
35
from .utils import log_util
36

37 38 39 40 41
__all__ = [  #noqa
    "CommunicateTopology", "UtilBase", "HybridCommunicateGroup",
    "MultiSlotStringDataGenerator", "UserDefinedRoleMaker",
    "DistributedStrategy", "Role", "MultiSlotDataGenerator",
    "PaddleCloudRoleMaker", "Fleet"
42
]
43 44

fleet = Fleet()
D
Dong Daxiang 已提交
45
_final_strategy = fleet._final_strategy
46 47
_get_applied_meta_list = fleet._get_applied_meta_list
_get_applied_graph_list = fleet._get_applied_graph_list
48 49 50 51
init = fleet.init
is_first_worker = fleet.is_first_worker
worker_index = fleet.worker_index
worker_num = fleet.worker_num
52 53 54 55 56 57 58 59 60 61 62
node_num = fleet.node_num
rank = fleet.worker_index
nranks = fleet.worker_num
world_size = fleet.worker_num
# device id in current trainer
local_device_ids = fleet.local_device_ids
# device ids in world
world_device_ids = fleet.world_device_ids
# rank in node
local_rank = fleet.local_rank
rank_in_node = local_rank
63
is_worker = fleet.is_worker
64 65 66 67
is_coordinator = fleet.is_coordinator
init_coordinator = fleet.init_coordinator
make_fl_strategy = fleet.make_fl_strategy
get_fl_client = fleet.get_fl_client
68 69 70 71 72
worker_endpoints = fleet.worker_endpoints
server_num = fleet.server_num
server_index = fleet.server_index
server_endpoints = fleet.server_endpoints
is_server = fleet.is_server
73
util = UtilBase()
74 75 76 77 78
barrier_worker = fleet.barrier_worker
init_worker = fleet.init_worker
init_server = fleet.init_server
run_server = fleet.run_server
stop_worker = fleet.stop_worker
W
wuhuachaocoding 已提交
79
distributed_optimizer = distributed_optimizer
80 81
save_inference_model = fleet.save_inference_model
save_persistables = fleet.save_persistables
Z
zhaocaibei123 已提交
82
save_cache_model = fleet.save_cache_model
83 84 85
check_save_pre_patch_done = fleet.check_save_pre_patch_done
save_one_table = fleet.save_one_table
save_dense_params = fleet.save_dense_params
T
Thunderbrook 已提交
86
load_model = fleet.load_model
87 88
load_inference_model = fleet.load_inference_model
load_one_table = fleet.load_one_table
89
minimize = fleet.minimize
W
wuhuachaocoding 已提交
90
distributed_model = distributed_model
91
shrink = fleet.shrink
92
get_hybrid_communicate_group = fleet.get_hybrid_communicate_group
W
wuhuachaocoding 已提交
93
distributed_scaler = distributed_scaler
R
Roc 已提交
94 95 96
set_log_level = log_util.set_log_level
get_log_level_code = log_util.get_log_level_code
get_log_level_name = log_util.get_log_level_name
97
from .. import auto_parallel as auto