__init__.py 2.6 KB
Newer Older
K
kuizhiqing 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import six
16
import os
K
kuizhiqing 已提交
17 18 19 20

__all__ = []


K
kuizhiqing 已提交
21
# print configuration after args are well filled in controller init
K
kuizhiqing 已提交
22 23 24 25 26 27 28 29 30 31 32 33
def log(ctx):
    ctx.logger.info("-----------  Configuration  ----------------------")
    for arg, value in sorted(six.iteritems(vars(ctx.args))):
        ctx.logger.info("%s: %s" % (arg, value))
    ctx.logger.info("--------------------------------------------------")


def process_args(ctx):
    # reset device by args
    #argdev = ctx.args.gpus or ctx.args.xpus or ctx.args.npus
    argdev = ctx.args.devices
    if argdev:
K
kuizhiqing 已提交
34 35 36
        for d in argdev.split(','):
            assert d in ctx.node.device.labels, 'Device not found {}'.format(
                argdev)
K
kuizhiqing 已提交
37 38 39 40


def collective_compatible(ctx):
    if 'PADDLE_TRAINER_ENDPOINTS' in ctx.envs:
41 42 43 44
        eps = ctx.envs['PADDLE_TRAINER_ENDPOINTS'].split(',')
        hosts = set([h.split(':')[0] for h in eps])
        ctx.args.master = eps[0] if ':' in eps[0] else '{}:6768'.format(eps[0])
        ctx.args.nnodes = len(hosts)
45 46
        ctx.logger.info(
            'args reset by env PADDLE_TRAINER_ENDPOINTS\n{}'.format(eps))
47
    '''
K
kuizhiqing 已提交
48
    if 'DISTRIBUTED_TRAINER_ENDPOINTS' in ctx.envs:
49 50 51 52 53 54 55
        eps = ctx.envs['DISTRIBUTED_TRAINER_ENDPOINTS'].split(',')
        hosts = set([h.split(':')[0] for h in eps])
        ctx.args.master = eps[0]
        ctx.args.nnodes = len(hosts)
        ctx.logger.info(
            'args reset by env DISTRIBUTED_TRAINER_ENDPOINTS\n{}'.format(eps))
    '''
K
kuizhiqing 已提交
56 57 58 59 60 61 62 63


def rewrite_host_ip(ctx):
    if ctx.args.host is not None and "." in ctx.args.host:
        ctx.logger.warning('Host ip reset to {}'.format(ctx.args.host))
        ctx.node.ip = ctx.args.host


64
def test_mode(ctx):
K
kuizhiqing 已提交
65
    if ctx.args.training_script == 'run_check':
66 67 68 69 70 71 72 73 74 75
        ctx.logger.info('Paddle Distributed Test begin...')
        if int(ctx.args.nnodes) < 2:
            ctx.args.nnodes = 2
        ctx.args.training_script = '{}/test.py'.format(
            os.path.dirname(__file__))


enabled_plugins = [
    test_mode, collective_compatible, rewrite_host_ip, process_args
]