single_trainer.py 4.8 KB
Newer Older
T
tangwei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
Training use fluid with one node only.
"""

from __future__ import print_function
T
tangwei 已提交
20

T
tangwei 已提交
21
import time
T
tangwei 已提交
22
import logging
T
tangwei 已提交
23

T
tangwei 已提交
24 25
import paddle.fluid as fluid

26 27
from paddlerec.core.trainers.transpiler_trainer import TranspileTrainer
from paddlerec.core.utils import envs
T
tangwei 已提交
28 29 30 31 32 33

logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger("fluid")
logger.setLevel(logging.INFO)


T
tangwei 已提交
34
class SingleTrainer(TranspileTrainer):
T
tangwei 已提交
35 36 37
    def processor_register(self):
        self.regist_context_processor('uninit', self.instance)
        self.regist_context_processor('init_pass', self.init)
C
chengmo 已提交
38
        self.regist_context_processor('startup_pass', self.startup)
Y
add din  
yaoxuefeng 已提交
39
        if envs.get_platform() == "LINUX" and envs.get_global_env("dataset_class", None, "train.reader") != "DataLoader":
T
tangwei 已提交
40 41 42 43
            self.regist_context_processor('train_pass', self.dataset_train)
        else:
            self.regist_context_processor('train_pass', self.dataloader_train)

T
tangwei 已提交
44 45 46 47
        self.regist_context_processor('infer_pass', self.infer)
        self.regist_context_processor('terminal_pass', self.terminal)

    def init(self, context):
T
tangwei 已提交
48
        self.model.train_net()
T
tangwei 已提交
49
        optimizer = self.model.optimizer()
T
tangwei 已提交
50
        optimizer.minimize((self.model.get_cost_op()))
T
tangwei 已提交
51 52 53 54

        self.fetch_vars = []
        self.fetch_alias = []
        self.fetch_period = self.model.get_fetch_period()
T
tangwei 已提交
55

T
tangwei 已提交
56 57 58 59
        metrics = self.model.get_metrics()
        if metrics:
            self.fetch_vars = metrics.values()
            self.fetch_alias = metrics.keys()
C
chengmo 已提交
60 61 62 63 64 65
        evaluate_only = envs.get_global_env(
            'evaluate_only', False, namespace='evaluate')
        if evaluate_only:
            context['status'] = 'infer_pass'
        else:
            context['status'] = 'startup_pass'
C
chengmo 已提交
66 67 68

    def startup(self, context):
        self._exe.run(fluid.default_startup_program())
T
tangwei 已提交
69 70
        context['status'] = 'train_pass'

T
tangwei 已提交
71
    def dataloader_train(self, context):
M
malin10 已提交
72
        reader = self._get_dataloader("TRAIN")
T
tangwei 已提交
73
        epochs = envs.get_global_env("train.epochs")
T
tangwei 已提交
74

T
tangwei 已提交
75 76
        program = fluid.compiler.CompiledProgram(
            fluid.default_main_program()).with_data_parallel(
T
tangwei 已提交
77
            loss_name=self.model.get_cost_op().name)
T
tangwei 已提交
78 79 80 81 82 83 84 85

        metrics_varnames = []
        metrics_format = []

        metrics_format.append("{}: {{}}".format("epoch"))
        metrics_format.append("{}: {{}}".format("batch"))

        for name, var in self.model.get_metrics().items():
T
tangwei 已提交
86
            metrics_varnames.append(var.name)
T
tangwei 已提交
87 88 89
            metrics_format.append("{}: {{}}".format(name))

        metrics_format = ", ".join(metrics_format)
T
tangwei 已提交
90

T
tangwei 已提交
91 92 93 94 95 96 97 98 99 100
        for epoch in range(epochs):
            reader.start()
            batch_id = 0
            try:
                while True:
                    metrics_rets = self._exe.run(
                        program=program,
                        fetch_list=metrics_varnames)

                    metrics = [epoch, batch_id]
T
tangwei 已提交
101
                    metrics.extend(metrics_rets)
T
tangwei 已提交
102

M
malin10 已提交
103
                    if batch_id % self.fetch_period == 0 and batch_id != 0:
T
tangwei 已提交
104
                        print(metrics_format.format(*metrics))
T
tangwei 已提交
105 106 107
                    batch_id += 1
            except fluid.core.EOFException:
                reader.reset()
M
malin10 已提交
108
            self.save(epoch, "train", is_fleet=False)
T
tangwei 已提交
109 110 111 112

        context['status'] = 'infer_pass'

    def dataset_train(self, context):
M
malin10 已提交
113
        dataset = self._get_dataset("TRAIN")
T
tangwei 已提交
114
        ins = self._get_dataset_ins()
T
tangwei 已提交
115

T
tangwei 已提交
116
        epochs = envs.get_global_env("train.epochs")
T
tangwei 已提交
117
        for i in range(epochs):
T
tangwei 已提交
118
            begin_time = time.time()
T
tangwei 已提交
119 120 121 122
            self._exe.train_from_dataset(program=fluid.default_main_program(),
                                         dataset=dataset,
                                         fetch_list=self.fetch_vars,
                                         fetch_info=self.fetch_alias,
M
debug  
malin10 已提交
123
                                         print_period=self.fetch_period)
T
tangwei 已提交
124 125 126
            end_time = time.time()
            times = end_time-begin_time
            print("epoch {} using time {}, speed {:.2f} lines/s".format(i, times, ins/times))
T
tangwei 已提交
127

T
tangwei 已提交
128 129 130 131 132 133 134
            self.save(i, "train", is_fleet=False)
        context['status'] = 'infer_pass'

    def terminal(self, context):
        for model in self.increment_models:
            print("epoch :{}, dir: {}".format(model[0], model[1]))
        context['is_exit'] = True