decorator.py 20.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

T
tangwei12 已提交
15 16
from threading import Thread
import subprocess
Q
Qiao Longfei 已提交
17
import multiprocessing
18
import six
Q
Qiao Longfei 已提交
19
import sys
20
import warnings
T
tangwei12 已提交
21

22
from six.moves.queue import Queue
23
from six.moves import zip_longest
24 25
from six.moves import map
from six.moves import zip
26 27
import itertools
import random
T
tangwei12 已提交
28
import zlib
29

M
minqiyang 已提交
30
import paddle.compat as cpt
31
from paddle.fluid.reader import QUEUE_GET_TIMEOUT
32

33 34
__all__ = []

35 36 37 38 39 40 41 42 43 44
# On macOS, the 'spawn' start method is now the default in Python3.8 multiprocessing,
# Paddle is currently unable to solve this, so forces the process to start using 
# the 'fork' start method.
#
# TODO: This solution is not good, because the fork start method could lead to 
# crashes of the subprocess. Figure out how to make 'spawn' work.
#
# For more details, please refer to
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
# https://bugs.python.org/issue33725
45
if sys.version_info >= (3, 8) and sys.platform == 'darwin':
46 47 48 49
    fork_context = multiprocessing.get_context('fork')
else:
    fork_context = multiprocessing

50

S
sneaxiy 已提交
51 52 53 54 55 56 57 58 59 60 61 62 63
def cache(reader):
    """
    Cache the reader data into memory. 

    Be careful that this method may take long time to process, 
    and consume lots of memory. :code:`reader()` would only 
    call once. 

    Args:
        reader (generator): a reader object which yields 
            data each time.

    Returns:
S
sneaxiy 已提交
64
        generator: a decorated reader object which yields data from cached memory.
65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
    
    Examples:
        .. code-block:: python

            import paddle
            
            def reader():
                for i in range(3):
                    yield i
            
            # All data is cached into memory
            cached_reader = paddle.io.cache(reader)
            
            # Output: 0 1 2
            for i in cached_reader():
                print(i)
S
sneaxiy 已提交
81 82 83 84 85 86 87 88 89 90
    """
    all_data = tuple(reader())

    def __impl__():
        for item in all_data:
            yield item

    return __impl__


H
Helin Wang 已提交
91 92 93
def map_readers(func, *readers):
    """
    Creates a data reader that outputs return value of function using
94
    output of each data reader as arguments.
H
Helin Wang 已提交
95

96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
    If input readers output the following data entries: 2 3,
    and the input func is mul(x, y),
    the output of the resulted reader will be 6.


    Args:
        func: a function to read data and compute result, the output of this function 
              will be set as the output of the resulted data reader.
        readers (Reader|list of Reader): list of readers whose outputs will be used as arguments of func.
 
    Returns:
        the resulted data reader (Reader)

    Examples:

        .. code-block:: python

         import paddle.reader
         d = {"h": 0, "i": 1}
         def func(x):
             return d[x]
         def reader():
             yield "h"
             yield "i"
         map_reader_result = paddle.reader.map_readers(func, reader)
H
Helin Wang 已提交
121 122 123 124 125 126
    """

    def reader():
        rs = []
        for r in readers:
            rs.append(r())
127
        for e in map(func, *rs):
H
Helin Wang 已提交
128 129 130 131 132
            yield e

    return reader


H
Helin Wang 已提交
133
def shuffle(reader, buf_size):
134
    """
135 136
    paddle.fluid.io.shuffle ( :ref:`api_fluid_io_shuffle` ) is recommended to use,
    and paddle.reader.shuffle is an alias.
137

138
    This API creates a decorated reader that outputs the shuffled data.
139

140 141 142 143 144 145
    The output data from the origin reader will be saved into a buffer, 
    and then shuffle the data. The size of buffer is determined by argument buf_size.
 
    Args:
        reader(callable): the original reader whose data will be shuffled.
        buf_size(int): the size of shuffled buffer.
146

147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
    Returns:
        callable: a decorated reader.

    Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            def reader():
                for i in range(5):
                    yield i
            shuffled_reader = fluid.io.shuffle(reader, 3)
            for e in shuffled_reader():
                print(e)
            # outputs are 0~4 unordered arrangement
162 163
    """

H
Helin Wang 已提交
164
    def data_reader():
165
        buf = []
H
Helin Wang 已提交
166
        for e in reader():
167 168 169 170 171 172 173 174 175 176 177 178
            buf.append(e)
            if len(buf) >= buf_size:
                random.shuffle(buf)
                for b in buf:
                    yield b
                buf = []

        if len(buf) > 0:
            random.shuffle(buf)
            for b in buf:
                yield b

H
Helin Wang 已提交
179
    return data_reader
180 181


H
Helin Wang 已提交
182
def chain(*readers):
183
    """
184
    Use the input data readers to create a chained data reader. The new created reader
185 186
    chains the outputs of input readers together as its output, and it do not change
    the format of the outputs.
187

188 189 190 191 192 193 194 195
    **Note**:
        ``paddle.reader.chain`` is the alias of ``paddle.fluid.io.chain``, and
        ``paddle.fluid.io.chain`` is recommended to use.

    For example, if three input readers' outputs are as follows:
    [0, 0, 0],
    [10, 10, 10],
    [20, 20, 20].
H
Helin Wang 已提交
196
    The chained reader will output:
197
    [0, 0, 0], [10, 10, 10], [20, 20, 20].
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228

    Args:
        readers(list): input data readers.

    Returns:
        callable: the new chained data reader.

    Examples:
        ..  code-block:: python

            import paddle

            def reader_creator_3(start):
                def reader():
                    for i in range(start, start + 3):
                        yield [i, i, i]
                return reader

            c = paddle.reader.chain(reader_creator_3(0), reader_creator_3(10), reader_creator_3(20))
            for e in c():
                print(e)
            # Output:
            # [0, 0, 0]
            # [1, 1, 1]
            # [2, 2, 2]
            # [10, 10, 10]
            # [11, 11, 11]
            # [12, 12, 12]
            # [20, 20, 20]
            # [21, 21, 21]
            # [22, 22, 22]
229 230 231

    """

H
Helin Wang 已提交
232
    def reader():
233
        rs = []
H
Helin Wang 已提交
234
        for r in readers:
235 236 237 238 239
            rs.append(r())

        for e in itertools.chain(*rs):
            yield e

H
Helin Wang 已提交
240
    return reader
241 242


H
Helin Wang 已提交
243
class ComposeNotAligned(ValueError):
244 245 246
    pass


H
Helin Wang 已提交
247
def compose(*readers, **kwargs):
248 249
    """
    Creates a data reader whose output is the combination of input readers.
250

H
Helin Wang 已提交
251
    If input readers output following data entries:
252
    (1, 2)    3    (4, 5)
H
Helin Wang 已提交
253
    The composed reader will output:
254 255
    (1, 2, 3, 4, 5)

H
huzhiqiang 已提交
256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
    Args:
        readers (Reader|list of Reader): readers that will be composed together. 
        check_alignment(bool, optional): Indicates whether the input readers are checked for
                              alignment. If True, whether input readers are aligned
                              correctly will be checked, else alignment will not be checkout and trailing outputs
                              will be discarded. Defaults to True.

    Returns: 
        the new data reader (Reader).

    Raises:
        ComposeNotAligned: outputs of readers are not aligned. This will not raise if check_alignment is set to False.
  
    Examples:
        .. code-block:: python
271

H
huzhiqiang 已提交
272 273 274 275 276 277 278
          import paddle.fluid as fluid
          def reader_creator_10(dur):
              def reader():
                 for i in range(10):
                     yield i
              return reader
          reader = fluid.io.compose(reader_creator_10(0), reader_creator_10(0))
279 280 281 282 283 284 285 286 287
    """
    check_alignment = kwargs.pop('check_alignment', True)

    def make_tuple(x):
        if isinstance(x, tuple):
            return x
        else:
            return (x, )

H
Helin Wang 已提交
288
    def reader():
289
        rs = []
H
Helin Wang 已提交
290
        for r in readers:
291 292
            rs.append(r())
        if not check_alignment:
293 294
            for outputs in zip(*rs):
                yield sum(list(map(make_tuple, outputs)), ())
295
        else:
296
            for outputs in zip_longest(*rs):
297 298 299
                for o in outputs:
                    if o is None:
                        # None will be not be present if compose is aligned
H
Helin Wang 已提交
300 301
                        raise ComposeNotAligned(
                            "outputs of readers are not aligned.")
302
                yield sum(list(map(make_tuple, outputs)), ())
303

H
Helin Wang 已提交
304
    return reader
305 306


H
Helin Wang 已提交
307
def buffered(reader, size):
308 309
    """
    Creates a buffered data reader.
310

H
Helin Wang 已提交
311 312
    The buffered data reader will read and save data entries into a
    buffer. Reading from the buffered data reader will proceed as long
313
    as the buffer is not empty.
314

315 316 317 318 319 320 321 322 323
    Args:
        reader(generator): the data reader to read from.
        size(int): max buffer size.

    Returns:
        generator: the buffered data reader.
    
    Examples:
        .. code-block:: python
324

325 326 327 328 329 330 331 332 333 334 335 336
            import paddle
            
            def reader():
                for i in range(3):
                    yield i
            
            # Create a buffered reader, and the buffer size is 2.
            buffered_reader = paddle.io.buffered(reader, 2)
            
            # Output: 0 1 2
            for i in buffered_reader():
                print(i)
337 338 339 340 341 342 343 344 345 346 347 348
    """

    class EndSignal():
        pass

    end = EndSignal()

    def read_worker(r, q):
        for d in r:
            q.put(d)
        q.put(end)

H
Helin Wang 已提交
349 350
    def data_reader():
        r = reader()
351
        q = Queue(maxsize=size)
352 353 354 355 356 357 358 359 360 361 362
        t = Thread(
            target=read_worker, args=(
                r,
                q, ))
        t.daemon = True
        t.start()
        e = q.get()
        while e != end:
            yield e
            e = q.get()

H
Helin Wang 已提交
363
    return data_reader
Y
Yu Yang 已提交
364 365


Y
Yu Yang 已提交
366
def firstn(reader, n):
Y
Yu Yang 已提交
367
    """
368 369 370 371 372
    paddle.fluid.io.firstn ( :ref:`api_fluid_io_firstn` ) is recommended to use,
    and paddle.reader.firstn is an alias.
    
    This API creates a decorated reader, and limits the max number of 
    samples that reader could return.
Y
Yu Yang 已提交
373

374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392
    Args:
        reader(callable): the input reader.
        n(int): the max number of samples in the reader.

    Returns:
        callable: the decorated reader.

    Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            def reader():
                for i in range(100):
                    yield i
            firstn_reader = fluid.io.firstn(reader, 5)
            for e in firstn_reader():
                print(e)
            # the outputs are: 0 1 2 3 4  
Y
Yu Yang 已提交
393 394
    """

Y
Yu Yang 已提交
395 396 397 398
    # TODO(yuyang18): Check if just drop the reader, could clean the opened
    # resource or not?

    def firstn_reader():
Y
Yu Yang 已提交
399
        for i, item in enumerate(reader()):
Y
Yu Yang 已提交
400
            if i == n:
Y
Yu Yang 已提交
401 402 403
                break
            yield item

Y
Yu Yang 已提交
404
    return firstn_reader
405 406 407 408 409 410


class XmapEndSignal():
    pass


411
def xmap_readers(mapper, reader, process_num, buffer_size, order=False):
412
    """
Z
Zeng Jinle 已提交
413 414 415 416 417 418 419 420 421 422 423 424
    Use multi-threads to map samples from reader by a mapper defined by user.

    Args:
        mapper (callable): a function to map the data from reader.
        reader (callable): a data reader which yields the data. 
        process_num (int): thread number to handle original sample.
        buffer_size (int): size of the queue to read data in. 
        order (bool): whether to keep the data order from original reader. 
            Default False.

    Returns:
        callable: a decorated reader with data mapping. 
425 426
    """
    end = XmapEndSignal()
W
wanghaoshuang 已提交
427

428 429 430 431 432
    # define a worker to read samples from reader to in_queue
    def read_worker(reader, in_queue):
        for i in reader():
            in_queue.put(i)
        in_queue.put(end)
W
wanghaoshuang 已提交
433

434 435 436 437
    # define a worker to read samples from reader to in_queue with order flag
    def order_read_worker(reader, in_queue):
        in_order = 0
        for i in reader():
W
wanghaoshuang 已提交
438 439
            in_queue.put((in_order, i))
            in_order += 1
440
        in_queue.put(end)
441 442 443 444 445 446 447 448 449 450 451

    # define a worker to handle samples from in_queue by mapper
    # and put mapped samples into out_queue
    def handle_worker(in_queue, out_queue, mapper):
        sample = in_queue.get()
        while not isinstance(sample, XmapEndSignal):
            r = mapper(sample)
            out_queue.put(r)
            sample = in_queue.get()
        in_queue.put(end)
        out_queue.put(end)
W
wanghaoshuang 已提交
452

453 454 455 456 457 458 459 460 461 462
    # define a worker to handle samples from in_queue by mapper
    # and put mapped samples into out_queue by order
    def order_handle_worker(in_queue, out_queue, mapper, out_order):
        ins = in_queue.get()
        while not isinstance(ins, XmapEndSignal):
            order, sample = ins
            r = mapper(sample)
            while order != out_order[0]:
                pass
            out_queue.put(r)
W
wanghaoshuang 已提交
463
            out_order[0] += 1
464 465 466
            ins = in_queue.get()
        in_queue.put(end)
        out_queue.put(end)
467 468

    def xreader():
469 470
        in_queue = Queue(buffer_size)
        out_queue = Queue(buffer_size)
471 472 473 474 475 476 477 478 479 480 481
        out_order = [0]
        # start a read worker in a thread
        target = order_read_worker if order else read_worker
        t = Thread(target=target, args=(reader, in_queue))
        t.daemon = True
        t.start()
        # start several handle_workers
        target = order_handle_worker if order else handle_worker
        args = (in_queue, out_queue, mapper, out_order) if order else (
            in_queue, out_queue, mapper)
        workers = []
482
        for i in range(process_num):
483 484 485 486 487 488
            worker = Thread(target=target, args=args)
            worker.daemon = True
            workers.append(worker)
        for w in workers:
            w.start()

489 490 491 492 493 494 495 496 497 498 499 500 501
        sample = out_queue.get()
        while not isinstance(sample, XmapEndSignal):
            yield sample
            sample = out_queue.get()
        finish = 1
        while finish < process_num:
            sample = out_queue.get()
            if isinstance(sample, XmapEndSignal):
                finish += 1
            else:
                yield sample

    return xreader
502 503


Q
Qiao Longfei 已提交
504 505
def multiprocess_reader(readers, use_pipe=True, queue_size=1000):
    """
506 507
    This API use python ``multiprocessing`` to read data from ``readers`` parallelly,
    and then ``multiprocess.Queue`` or ``multiprocess.Pipe`` is used to merge 
T
tianshuo78520a 已提交
508
    these data. A separate process will be created for each reader in the 
509 510 511 512
    ``readers`` list, please guarantee every reader can work independently 
    to avoid conflicts in parallel environment.
    

T
tianshuo78520a 已提交
513
    ``Multiprocess.Queue`` require the rw access right to /dev/shm, and it's not supported 
514
    in some platforms.
Q
Qiao Longfei 已提交
515

516 517 518 519 520 521 522 523
    Parameters:
       readers (list( ``generator`` ) | tuple( ``generator`` )): a python ``generator`` list 
           used to read input data
       use_pipe (bool, optional): control the inner API used to implement the multi-processing,
           default True - use ``multiprocess.Pipe`` which is recommended
       queue_size (int, optional): only useful when ``use_pipe`` is False - ``multiprocess.Queue``
           is used, default 1000. Increase this value can speed up the data reading, and more memory
           will be consumed.
Q
Qiao Longfei 已提交
524

525 526
    Returns:
        ``generator``: a new reader which can be run parallelly
Q
Qiao Longfei 已提交
527

528 529

    Example:
Q
Qiao Longfei 已提交
530 531 532

    .. code-block:: python

533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583
        import paddle.fluid as fluid
        from paddle.fluid.io import multiprocess_reader
        import numpy as np
        
        sample_files = ['sample_file_1', 'sample_file_2']
        
        def fake_input_files():
            with open(sample_files[0], 'w') as f:
               np.savez(f, a=np.array([1, 2]), b=np.array([3, 4]), c=np.array([5, 6]), d=np.array([7, 8]))
            with open(sample_files[1], 'w') as f:
               np.savez(f, a=np.array([9, 10]), b=np.array([11, 12]), c=np.array([13, 14]))
        
        
        def generate_reader(file_name):
            # load data file
            def _impl():
                data = np.load(file_name)
                for item in sorted(data.files):
                    yield data[item],
            return _impl
        
        if __name__ == '__main__':
            # generate sample input files
            fake_input_files()
            
            with fluid.program_guard(fluid.Program(), fluid.Program()):
                place = fluid.CPUPlace()
                # the 1st 2 is batch size
                image = fluid.data(name='image', dtype='int64', shape=[2, 1, 2]) 
                fluid.layers.Print(image)
                # print detailed tensor info of image variable
            
                reader = fluid.io.PyReader(feed_list=[image], capacity=2)
            
                decorated_reader = multiprocess_reader(
                    [generate_reader(sample_files[0]), generate_reader(sample_files[1])], False)
            
                reader.decorate_sample_generator(decorated_reader, batch_size=2, places=[place])
            
                exe = fluid.Executor(place)
                exe.run(fluid.default_startup_program())
            
                for data in reader():
                    res = exe.run(feed=data, fetch_list=[image])
                    print(res[0])
                    # print below content in this case
                    # [[[1 2]], [[3 4]]]
                    # [[[5 6]], [[7 8]]]
                    # [[[9 10]], [[11 12]]]
                    # [13,14] will be dropped

Q
Qiao Longfei 已提交
584 585
    """

586 587 588 589
    if sys.platform == 'win32':
        raise NotImplementedError(
            "The multiprocess_reader method is not supported on windows.")

590
    # ujson is ultra fast json encoder and decoder written in pure C with bindings for Python 3.6+.
Q
Qiao Longfei 已提交
591 592 593
    try:
        import ujson as json
    except Exception as e:
594 595 596
        warnings.warn(
            "The `ujson` module is not found, use the `json` module, `ujson` encodes and decodes faster, "
            "you can install `ujson` through `pip install ujson`.")
Q
Qiao Longfei 已提交
597 598
        import json

599 600
    assert isinstance(readers, (list, tuple)) and len(readers) > 0, (
        "`readers` must be list or tuple.")
Q
Qiao Longfei 已提交
601 602

    def _read_into_queue(reader, queue):
603 604 605 606 607 608 609 610 611
        try:
            for sample in reader():
                if sample is None:
                    raise ValueError("sample has None")
                queue.put(sample)
            queue.put(None)
        except:
            queue.put("")
            six.reraise(*sys.exc_info())
Q
Qiao Longfei 已提交
612 613

    def queue_reader():
614
        queue = fork_context.Queue(queue_size)
Q
Qiao Longfei 已提交
615
        for reader in readers:
616
            p = fork_context.Process(
Q
Qiao Longfei 已提交
617 618 619 620 621 622
                target=_read_into_queue, args=(reader, queue))
            p.start()

        reader_num = len(readers)
        finish_num = 0
        while finish_num < reader_num:
623 624 625 626 627 628 629 630
            try:
                sample = queue.get(timeout=QUEUE_GET_TIMEOUT)
            except:
                logging.error(
                    "multiprocess_reader failed to get data from the multiprocessing.Queue."
                )
                six.reraise(*sys.exc_info())

Q
Qiao Longfei 已提交
631 632
            if sample is None:
                finish_num += 1
633
            elif sample == "":
634 635 636
                raise ValueError(
                    "multiprocess_reader failed to put data into the multiprocessing.Queue."
                )
Q
Qiao Longfei 已提交
637 638 639 640
            else:
                yield sample

    def _read_into_pipe(reader, conn):
641 642 643 644 645 646 647 648 649 650 651
        try:
            for sample in reader():
                if sample is None:
                    raise ValueError("sample has None!")
                conn.send(json.dumps(sample))
            conn.send(json.dumps(None))
            conn.close()
        except:
            conn.send(json.dumps(""))
            conn.close()
            six.reraise(*sys.exc_info())
Q
Qiao Longfei 已提交
652 653 654 655

    def pipe_reader():
        conns = []
        for reader in readers:
656
            parent_conn, child_conn = fork_context.Pipe()
Q
Qiao Longfei 已提交
657
            conns.append(parent_conn)
658
            p = fork_context.Process(
Q
Qiao Longfei 已提交
659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674
                target=_read_into_pipe, args=(reader, child_conn))
            p.start()

        reader_num = len(readers)
        finish_num = 0
        conn_to_remove = []
        while finish_num < reader_num:
            for conn in conn_to_remove:
                conns.remove(conn)
            conn_to_remove = []
            for conn in conns:
                sample = json.loads(conn.recv())
                if sample is None:
                    finish_num += 1
                    conn.close()
                    conn_to_remove.append(conn)
675 676 677
                elif sample == "":
                    conn.close()
                    conn_to_remove.append(conn)
678 679 680
                    raise ValueError(
                        "multiprocess_reader failed to send data into the multiprocessing.Pipe."
                    )
Q
Qiao Longfei 已提交
681 682 683 684 685 686 687
                else:
                    yield sample

    if use_pipe:
        return pipe_reader
    else:
        return queue_reader