fetcher.py 5.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
import logging
from ..log_helper import get_logger
17
from collections.abc import Sequence, Mapping
18

19
_WARNING_TO_LOG = True
20

21 22

class _DatasetFetcher(object):
23
    def __init__(self, dataset, auto_collate_batch, collate_fn, drop_last):
24
        self.dataset = dataset
25
        self.auto_collate_batch = auto_collate_batch
26 27 28
        self.collate_fn = collate_fn
        self.drop_last = drop_last

29 30 31 32 33 34 35 36 37 38
    # NOTE: fetch function here perform the whole pipeline of dataset
    #       reading and data trasforms of a batch in each calling, this
    #       may take a long time inside, if DataLoader is exit outside,
    #       fetch need to perceive exit situation, so we pass done_event
    #       here for fetch to check exit status
    # NOTE: if DataLoadet exit by `break`, performing GPU tensor operations,
    #       e.g. to_tensor may cause SIGSEGV in thread, so we pass the
    #       done_event argument to check DataLoader exit status between
    #       ecah sample processing in the batch
    def fetch(self, batch_indices, done_event=None):
39 40 41
        raise NotImplementedError("'fetch' not implement for class {}".format(
            self.__class__.__name__))

42
    def _log_warning(self):
43 44 45 46 47
        # only log warning on GPU 0 when distributed launch
        from ...distributed import get_world_size, get_rank
        if get_world_size() >= 2 and get_rank() != 0:
            return

48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
        warn_str = "Detect dataset only contains single fileds, return format " \
                   "changed since Paddle 2.1. In Paddle <= 2.0, DataLoader add " \
                   "a list surround output data(e.g. return [data]), and in " \
                   "Paddle >= 2.1, DataLoader return the single filed directly " \
                   "(e.g. return data). For example, in following code: \n\n"
        warn_str += \
                "import numpy as np\n" \
                "from paddle.io import DataLoader, Dataset\n\n" \
                "class RandomDataset(Dataset):\n" \
                "    def __getitem__(self, idx):\n" \
                "        data = np.random.random((2, 3)).astype('float32')\n\n" \
                "        return data\n\n" \
                "    def __len__(self):\n" \
                "        return 10\n\n" \
                "dataset = RandomDataset()\n" \
                "loader = DataLoader(dataset, batch_size=1)\n" \
                "data = next(loader())\n\n"

        warn_str += "In Paddle <= 2.0, data is in format '[Tensor(shape=(1, 2, 3), " \
                    "dtype=float32)]', and in Paddle >= 2.1, data is in format" \
                    " 'Tensor(shape=(1, 2, 3), dtype=float32)'\n"

        logger = get_logger(
            "DataLoader", logging.INFO, fmt='%(levelname)s: %(message)s')
        logger.warning(warn_str)

74 75

class _IterableDatasetFetcher(_DatasetFetcher):
76
    def __init__(self, dataset, auto_collate_batch, collate_fn, drop_last):
S
seemingwang 已提交
77 78
        super(_IterableDatasetFetcher, self).__init__(
            dataset, auto_collate_batch, collate_fn, drop_last)
79 80
        self.dataset_iter = iter(dataset)

81
    def fetch(self, batch_indices, done_event=None):
82

83 84 85
        if self.auto_collate_batch:
            data = []
            for _ in batch_indices:
86 87 88 89 90 91 92
                if done_event is None or not done_event.is_set():
                    try:
                        data.append(next(self.dataset_iter))
                    except StopIteration:
                        break
                else:
                    return None
93

94 95 96
            if len(data) == 0 or (self.drop_last and
                                  len(data) < len(batch_indices)):
                raise StopIteration
97 98 99 100

            global _WARNING_TO_LOG
            if not isinstance(data[0], (Sequence, Mapping)) \
                    and _WARNING_TO_LOG:
101
                self._log_warning()
102
                _WARNING_TO_LOG = False
103 104 105 106 107 108
        else:
            data = next(self.dataset_iter)

        if self.collate_fn:
            data = self.collate_fn(data)
        return data
109 110 111


class _MapDatasetFetcher(_DatasetFetcher):
112
    def __init__(self, dataset, auto_collate_batch, collate_fn, drop_last):
S
seemingwang 已提交
113 114
        super(_MapDatasetFetcher, self).__init__(dataset, auto_collate_batch,
                                                 collate_fn, drop_last)
115

116
    def fetch(self, batch_indices, done_event=None):
117
        if self.auto_collate_batch:
118 119 120 121 122 123
            data = []
            for idx in batch_indices:
                if done_event is None or not done_event.is_set():
                    data.append(self.dataset[idx])
                else:
                    return None
124

125 126 127
            global _WARNING_TO_LOG
            if not isinstance(data[0], (Sequence, Mapping)) \
                    and _WARNING_TO_LOG:
128
                self._log_warning()
129
                _WARNING_TO_LOG = False
130 131 132 133 134 135
        else:
            data = self.dataset[batch_indices]

        if self.collate_fn:
            data = self.collate_fn(data)
        return data