提交 9ba231d3 编写于 作者: Y Yu Yang

Complete inferencer.

上级 4c24ac1a
......@@ -44,6 +44,19 @@ def main():
batch_size=32),
event_handler=event_handler)
# output is a softmax layer. It returns probabilities.
# Shape should be (100, 10)
probs = paddle.infer(
output=inference,
parameters=parameters,
reader=paddle.reader.batched(
paddle.reader.limited(
paddle.reader.map_readers(lambda item: (item[0], ),
paddle.dataset.mnist.test()),
limit=100),
batch_size=32))
print probs.shape
if __name__ == '__main__':
main()
......@@ -35,6 +35,7 @@ def reader_creator(image_filename, label_filename, buffer_size):
l = subprocess.Popen([zcat_cmd, label_filename], stdout=subprocess.PIPE)
l.stdout.read(8) # skip some magic bytes
try: # reader could be break.
while True:
labels = numpy.fromfile(
l.stdout, 'ubyte', count=buffer_size).astype("int")
......@@ -50,7 +51,7 @@ def reader_creator(image_filename, label_filename, buffer_size):
for i in xrange(buffer_size):
yield images[i, :], int(labels[i])
finally:
m.terminate()
l.terminate()
......
......@@ -16,18 +16,18 @@ class InferenceEngine(object):
for param in gm.getParameters():
val = param.getBuf(api.PARAMETER_VALUE)
name = param.getName()
assert isinstance(val, api.Matrix)
val.copyFromNumpyMat(parameters.get(name))
assert isinstance(val, api.Vector)
val.copyFromNumpyArray(parameters.get(name).flatten())
self.__gradient_machine__ = gm
self.__data_types__ = topo.data_type()
def iter_infer(self, reader, reader_dict=None):
if reader_dict is None:
reader_dict = self.default_reader_dict()
feeder = DataFeeder(self.__data_types__, reader_dict)
out_args = api.Arguments.createArguments(0)
self.__gradient_machine__.start()
for data_batch in reader():
yield self.__gradient_machine__.forwardTest(
feeder(data_batch), out_args, api.PASS_TEST)
yield self.__gradient_machine__.forwardTest(feeder(data_batch))
self.__gradient_machine__.finish()
def iter_infer_field(self, field, **kwargs):
......@@ -35,11 +35,16 @@ class InferenceEngine(object):
yield [each_result[field] for each_result in result]
def infer(self, field='value', **kwargs):
retv = []
for result in itertools.izip(
self.iter_infer_field(
field=field, **kwargs)):
retv.append(numpy.concatenate(result))
retv = None
for result in self.iter_infer_field(field=field, **kwargs):
if retv is None:
retv = [[]] * len(result)
for i, item in enumerate(result):
retv[i].append(item)
retv = [numpy.concatenate(out) for out in retv]
if len(retv) == 1:
return retv[0]
else:
return retv
def default_reader_dict(self):
......
......@@ -14,13 +14,13 @@
__all__ = [
'map_readers', 'buffered', 'compose', 'chain', 'shuffle',
'ComposeNotAligned', 'batched'
'ComposeNotAligned', 'batched', 'limited'
]
from Queue import Queue
from threading import Thread
import itertools
import random
from Queue import Queue
from threading import Thread
def map_readers(func, *readers):
......@@ -213,3 +213,17 @@ def batched(reader, batch_size):
yield batch
return batched_reader
def limited(reader, limit):
"""
Limit the max number of samples that reader could return.
"""
def limited_reader():
for i, item in enumerate(reader()):
if i == limit:
break
yield item
return limited_reader
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册