train.py 4.6 KB
Newer Older
W
WuHaobo 已提交
1
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
W
WuHaobo 已提交
2
#
W
WuHaobo 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
W
WuHaobo 已提交
6 7 8
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
W
WuHaobo 已提交
9 10 11 12 13
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
W
add ad  
WuHaobo 已提交
14 15 16 17 18

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

W
WuHaobo 已提交
19 20
import argparse
import os
21 22 23 24
import sys
__dir__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__)
sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))
W
WuHaobo 已提交
25

littletomatodonkey's avatar
littletomatodonkey 已提交
26
import paddle
W
WuHaobo 已提交
27

28 29 30 31 32 33
from ppcls.data import Reader
from ppcls.utils.config import get_config
from ppcls.utils.save_load import init_model, save_model
from ppcls.utils import logger
import program

littletomatodonkey's avatar
littletomatodonkey 已提交
34

W
WuHaobo 已提交
35 36 37 38 39 40
def parse_args():
    parser = argparse.ArgumentParser("PaddleClas train script")
    parser.add_argument(
        '-c',
        '--config',
        type=str,
W
WuHaobo 已提交
41
        default='configs/ResNet/ResNet50.yaml',
W
WuHaobo 已提交
42 43 44 45 46 47 48 49 50 51 52 53
        help='config file path')
    parser.add_argument(
        '-o',
        '--override',
        action='append',
        default=[],
        help='config options to be overridden')
    args = parser.parse_args()
    return args


def main(args):
54
    paddle.seed(12345)
55

W
WuHaobo 已提交
56
    config = get_config(args.config, overrides=args.override, show=True)
W
WuHaobo 已提交
57
    # assign the place
58
    use_gpu = config.get("use_gpu", True)
59
    place = paddle.set_device('gpu' if use_gpu else 'cpu')
W
WuHaobo 已提交
60

61
    trainer_num = paddle.distributed.get_world_size()
L
littletomatodonkey 已提交
62
    use_data_parallel = trainer_num != 1
63 64
    config["use_data_parallel"] = use_data_parallel

L
littletomatodonkey 已提交
65
    if config["use_data_parallel"]:
66
        paddle.distributed.init_parallel_env()
L
littletomatodonkey 已提交
67

littletomatodonkey's avatar
littletomatodonkey 已提交
68
    net = program.create_model(config.ARCHITECTURE, config.classes_num)
69
    optimizer, lr_scheduler = program.create_optimizer(
littletomatodonkey's avatar
littletomatodonkey 已提交
70 71
        config, parameter_list=net.parameters())

L
littletomatodonkey 已提交
72
    dp_net = net
littletomatodonkey's avatar
littletomatodonkey 已提交
73
    if config["use_data_parallel"]:
74
        find_unused_parameters = config.get("find_unused_parameters", False)
L
littletomatodonkey 已提交
75
        dp_net = paddle.DataParallel(
76
            net, find_unused_parameters=find_unused_parameters)
littletomatodonkey's avatar
littletomatodonkey 已提交
77 78 79 80

    # load model from checkpoint or pretrained model
    init_model(config, net, optimizer)

littletomatodonkey's avatar
littletomatodonkey 已提交
81
    train_dataloader = Reader(config, 'train', places=place)()
littletomatodonkey's avatar
littletomatodonkey 已提交
82

83
    if config.validate:
littletomatodonkey's avatar
littletomatodonkey 已提交
84
        valid_dataloader = Reader(config, 'valid', places=place)()
littletomatodonkey's avatar
littletomatodonkey 已提交
85

littletomatodonkey's avatar
littletomatodonkey 已提交
86
    last_epoch_id = config.get("last_epoch", -1)
littletomatodonkey's avatar
littletomatodonkey 已提交
87
    best_top1_acc = 0.0  # best top1 acc record
littletomatodonkey's avatar
littletomatodonkey 已提交
88
    best_top1_epoch = last_epoch_id
T
Tingquan Gao 已提交
89 90 91 92 93 94 95 96 97 98 99

    vdl_writer_path = config.get("vdl_dir", None)
    vdl_writer = None
    if vdl_writer_path:
        from visualdl import LogWriter
        vdl_writer = LogWriter(vdl_writer_path)
    # Ensure that the vdl log file can be closed normally
    try:
        for epoch_id in range(last_epoch_id + 1, config.epochs):
            net.train()
            # 1. train with train dataset
L
littletomatodonkey 已提交
100 101
            program.run(train_dataloader, config, dp_net, optimizer,
                        lr_scheduler, epoch_id, 'train', vdl_writer)
T
Tingquan Gao 已提交
102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120

            # 2. validate with validate dataset
            if config.validate and epoch_id % config.valid_interval == 0:
                net.eval()
                with paddle.no_grad():
                    top1_acc = program.run(valid_dataloader, config, net, None,
                                           None, epoch_id, 'valid', vdl_writer)
                if top1_acc > best_top1_acc:
                    best_top1_acc = top1_acc
                    best_top1_epoch = epoch_id
                    model_path = os.path.join(config.model_save_dir,
                                              config.ARCHITECTURE["name"])
                    save_model(net, optimizer, model_path, "best_model")
                message = "The best top1 acc {:.5f}, in epoch: {:d}".format(
                    best_top1_acc, best_top1_epoch)
                logger.info(message)

            # 3. save the persistable model
            if epoch_id % config.save_interval == 0:
L
littletomatodonkey 已提交
121 122
                model_path = os.path.join(config.model_save_dir,
                                          config.ARCHITECTURE["name"])
T
Tingquan Gao 已提交
123 124 125 126 127
                save_model(net, optimizer, model_path, epoch_id)
    except Exception as e:
        logger.error(e)
    finally:
        vdl_writer.close() if vdl_writer else None
W
WuHaobo 已提交
128 129 130 131


if __name__ == '__main__':
    args = parse_args()
132
    main(args)