From 478b0c8a904b8db0d59d070d4c82db6704d333a7 Mon Sep 17 00:00:00 2001 From: Helin Wang Date: Tue, 21 Feb 2017 15:10:42 -0800 Subject: [PATCH] correct data reader and data reader creator usage --- doc/design/reader/README.md | 73 +++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 36 deletions(-) diff --git a/doc/design/reader/README.md b/doc/design/reader/README.md index f8f79a75b1d..230b8c2326b 100644 --- a/doc/design/reader/README.md +++ b/doc/design/reader/README.md @@ -1,35 +1,35 @@ -# Python Data Reading Design Doc +# Python Data Reader Design Doc -Paddle reads data from *data reader* during training. *data reader creator* (or *reader creator*) creates a *data reader* when invoked. *reader creator* will be passed into `paddle.train` as a parameter. +Paddle reads data from *data reader* during training. *data reader* will be passed into `paddle.train` as a parameter. -## Data Reader Creator Interface +## Data Reader Interface -Data reader creator is a function with no parameter that creates a iterable (anything can be used in `for x in iterable`): +Data reader is a function with no parameter that creates a iterable (anything can be used in `for x in iterable`): ``` -iterable = data_reader_creator() +iterable = data_reader() ``` Element produced for the iterable should be a **single** entry of data, **not** a mini batch. That entry of data could be a single item, or a tuple of items. Item should be of [supported type](http://www.paddlepaddle.org/doc/ui/data_provider/pydataprovider2.html?highlight=dense_vector#input-types) (e.g., numpy 1d array of float32, int, list of int) -An example implementation for single item data reader creator: +An example implementation for single item data reader: ```python -def data_reader_creator_fake_image(): +def data_reader_fake_image(): while True: yield numpy.random.uniform(-1, 1, size=20*20) ``` -An example implementation for multiple item data reader creator: +An example implementation for multiple item data reader: ```python -def data_reader_creator_fake_image_and_label(): +def data_reader_fake_image_and_label(): while True: yield numpy.random.uniform(-1, 1, size=20*20), False ``` ## Usage -data reader creator, mapping from item(s) read to data layer, batch size and number of total pass will be passed into `paddle.train`: +data reader, mapping from item(s) read to data layer, batch size and number of total pass will be passed into `paddle.train`: ```python # two data layer is created: @@ -41,11 +41,11 @@ label_layer = paddle.layer.data("label", ...) paddle.train(paddle.dataset.mnist, {"image":0, "label":1}, 128, 10, ...) ``` -## Data Reader Creator Decorator +## Data Reader Decorator -*Data reader creator decorator* (or *reader creator decorator*) takes a single or multiple data reader creator, returns a new data reader creator. It is similar to a [python decorator](https://wiki.python.org/moin/PythonDecorators), but it does not use `@` syntax. +*Data reader decorator* takes a single or multiple data reader, returns a new data reader. It is similar to a [python decorator](https://wiki.python.org/moin/PythonDecorators), but it does not use `@` syntax. -Since we have a strict interface for data readers creators (no parameter, return a single data item). Data reader creators can be used flexiable via data reader creator decorators. Following are a few examples: +Since we have a strict interface for data readers (no parameter, return a single data item). Data reader can be used flexiable via data reader decorators. Following are a few examples: ### Prefetch Data @@ -54,63 +54,63 @@ Since reading data may take time and training can not proceed without data. It i Use `paddle.reader.buffered` to prefetch data: ```python -buffered_reader_creator = paddle.reader.buffered(paddle.dataset.mnist, 100) +buffered_reader = paddle.reader.buffered(paddle.dataset.mnist, 100) ``` -`buffered_reader_creator` will try to buffer (prefetch) `100` data entries. +`buffered_reader` will try to buffer (prefetch) `100` data entries. -### Compose Multiple Data Reader Creators +### Compose Multiple Data Readers For example, we want to use a source of real images (reusing mnist dataset), and a source of fake images as input for [Generative Adversarial Networks](https://arxiv.org/abs/1406.2661). We can do: ```python -def data_reader_creator_fake_image(): +def data_reader_fake_image(): while True: yield numpy.random.uniform(-1, 1, size=20*20) def data_reader_creator_bool(t): - def creator: + def reader: while True: yield t - return creator + return reader -true_reader_creator = data_reader_creator_bool(True) -false_reade_creatorr = data_reader_creator_bool(False) +true_reader = data_reader_creator_bool(True) +false_reader = data_reader_creator_bool(False) -reader_creator = paddle.reader.compose(paddle.dataset.mnist, data_reader_creator_fake_image, true_reader_creator, false_reader_creator) +reader = paddle.reader.compose(paddle.dataset.mnist, data_reader_fake_image, true_reader, false_reader) # Skipped 1 because paddle.dataset.mnist produces two items per data entry. # And we don't care second item at this time. -paddle.train(reader_creator, {"true_image":0, "fake_image": 2, "true_label": 3, "false_label": 4}, ...) +paddle.train(reader, {"true_image":0, "fake_image": 2, "true_label": 3, "false_label": 4}, ...) ``` ### Shuffle -Given shuffle buffer size `n`, `paddle.reader.shuffle` will return a data reader creator that buffers `n` data entries and shuffle them before a data entry is read. +Given shuffle buffer size `n`, `paddle.reader.shuffle` will return a data reader that buffers `n` data entries and shuffle them before a data entry is read. Example: ```python -reader_creator = paddle.reader.shuffle(paddle.dataset.mnist, 512) +reader = paddle.reader.shuffle(paddle.dataset.mnist, 512) ``` ## Q & A ### Why return only a single entry, but not a mini batch? -If a mini batch is returned, data reader creator need to take care of batch size. But batch size is a concept for training, it makes more sense for user to specify batch size as a parameter for `train`. +If a mini batch is returned, data reader need to take care of batch size. But batch size is a concept for training, it makes more sense for user to specify batch size as a parameter for `train`. -Practically, always return a single entry make reusing existing data reader creators much easier (e.g., if existing reader creator return not a single entry but 3 entries, training code will be more complex because it need to handle cases like batch size 2). +Practically, always return a single entry make reusing existing data readers much easier (e.g., if existing reader return not a single entry but 3 entries, training code will be more complex because it need to handle cases like batch size 2). ### Why use a dictionary but not a list to provide mapping? We decided to use dictionary (`{"image":0, "label":1}`) instead of list (`["image", "label"]`) is because that user can easily resue item (e.g., using `{"image_a":0, "image_b":0, "label":1}`) or skip item (e.g., using `{"image_a":0, "label":2}`). -### How to create custom data reader creator +### How to create custom data reader ```python def image_reader_creator(image_path, label_path, n): - def reader_creator(): + def reader(): f = open(image_path) l = open(label_path) images = numpy.fromfile( @@ -121,10 +121,11 @@ def image_reader_creator(image_path, label_path, n): yield images[i, :], labels[i] # a single entry of data is created each time f.close() l.close() - return reader_creator + return reader -reader_creator = image_reader_creator("/path/to/image_file", "/path/to/label_file", 1024) -paddle.train(reader_creator, {"image":0, "label":1}, ...) +# images_reader_creator creates a reader +reader = image_reader_creator("/path/to/image_file", "/path/to/label_file", 1024) +paddle.train(reader, {"image":0, "label":1}, ...) ``` ### How is `paddle.train` implemented @@ -132,17 +133,17 @@ paddle.train(reader_creator, {"image":0, "label":1}, ...) An example implementation of paddle.train could be: ```python -def minibatch_decorater(reader_creator, minibatch_size): +def minibatch_decorater(reader, minibatch_size): def ret(): - r = reader_creator() + r = reader() buf = [r.next() for x in xrange(minibatch_size)] while len(buf) > 0: yield buf buf = [r.next() for x in xrange(minibatch_size)] return ret -def train(reader_creator, mapping, batch_size, total_pass): +def train(reader, mapping, batch_size, total_pass): for pass_idx in range(total_pass): - for mini_batch in minibatch_decorater(reader_creator): # this loop will never end in online learning. + for mini_batch in minibatch_decorater(reader): # this loop will never end in online learning. do_forward_backward(mini_batch, mapping) ``` -- GitLab