提交 6b1a91f9 编写于 作者: Q qiaolongfei

Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into rnn

...@@ -39,17 +39,14 @@ def main(): ...@@ -39,17 +39,14 @@ def main():
trainer = paddle.trainer.SGD(update_equation=adam_optimizer) trainer = paddle.trainer.SGD(update_equation=adam_optimizer)
trainer.train(train_data_reader=train_reader, trainer.train(
topology=cost, train_data_reader=train_reader,
parameters=parameters, cost=cost,
event_handler=event_handler, parameters=parameters,
batch_size=32, # batch size should be refactor in Data reader event_handler=event_handler,
data_types=[ # data_types will be removed, It should be in batch_size=32, # batch size should be refactor in Data reader
# network topology reader_dict={images.name: 0,
('pixel', images.type), label.name: 1})
('label', label.type)],
reader_dict={'pixel':0, 'label':1}
)
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -18,6 +18,7 @@ import parameters ...@@ -18,6 +18,7 @@ import parameters
import trainer import trainer
import event import event
import data_type import data_type
import topology
import data_feeder import data_feeder
import attr import attr
import pooling import pooling
...@@ -25,7 +26,7 @@ import py_paddle.swig_paddle as api ...@@ -25,7 +26,7 @@ import py_paddle.swig_paddle as api
__all__ = [ __all__ = [
'optimizer', 'layer', 'activation', 'parameters', 'init', 'trainer', 'optimizer', 'layer', 'activation', 'parameters', 'init', 'trainer',
'event', 'data_type', 'attr', 'pooling', 'data_feeder' 'event', 'data_type', 'attr', 'pooling', 'data_feeder', 'topology'
] ]
......
...@@ -23,7 +23,7 @@ class DataFeeder(DataProviderConverter): ...@@ -23,7 +23,7 @@ class DataFeeder(DataProviderConverter):
""" """
DataFeeder converts the data returned by paddle.reader into a data structure DataFeeder converts the data returned by paddle.reader into a data structure
of Arguments which is defined in the API. The paddle.reader usually returns of Arguments which is defined in the API. The paddle.reader usually returns
a list of mini-batch data entries. Each data entry in the list is one sampe. a list of mini-batch data entries. Each data entry in the list is one sample.
Each sample is a list or a tuple with one feature or multiple features. Each sample is a list or a tuple with one feature or multiple features.
DataFeeder converts this mini-batch data entries into Arguments in order DataFeeder converts this mini-batch data entries into Arguments in order
to feed it to C++ interface. to feed it to C++ interface.
......
...@@ -13,10 +13,10 @@ ...@@ -13,10 +13,10 @@
# limitations under the License. # limitations under the License.
from paddle.trainer.PyDataProvider2 import \ from paddle.trainer.PyDataProvider2 import \
InputType, dense_vector, sparse_binary_vector,\ InputType, DataType, dense_vector, sparse_binary_vector,\
sparse_vector, integer_value, integer_value_sequence sparse_vector, integer_value, integer_value_sequence
__all__ = [ __all__ = [
'InputType', 'dense_vector', 'sparse_binary_vector', 'sparse_vector', 'InputType', 'DataType', 'dense_vector', 'sparse_binary_vector',
'integer_value', 'integer_value_sequence' 'sparse_vector', 'integer_value', 'integer_value_sequence'
] ]
"""
CIFAR Dataset.
URL: https://www.cs.toronto.edu/~kriz/cifar.html
the default train_creator, test_creator used for CIFAR-10 dataset.
"""
import cPickle
import itertools
import tarfile
import numpy
from config import download
__all__ = [
'cifar_100_train_creator', 'cifar_100_test_creator', 'train_creator',
'test_creator'
]
CIFAR10_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
CIFAR10_MD5 = 'c58f30108f718f92721af3b95e74349a'
CIFAR100_URL = 'https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz'
CIFAR100_MD5 = 'eb9058c3a382ffc7106e4002c42a8d85'
def __read_batch__(filename, sub_name):
def reader():
def __read_one_batch_impl__(batch):
data = batch['data']
labels = batch.get('labels', batch.get('fine_labels', None))
assert labels is not None
for sample, label in itertools.izip(data, labels):
yield (sample / 255.0).astype(numpy.float32), int(label)
with tarfile.open(filename, mode='r') as f:
names = (each_item.name for each_item in f
if sub_name in each_item.name)
for name in names:
batch = cPickle.load(f.extractfile(name))
for item in __read_one_batch_impl__(batch):
yield item
return reader
def cifar_100_train_creator():
fn = download(url=CIFAR100_URL, md5=CIFAR100_MD5)
return __read_batch__(fn, 'train')
def cifar_100_test_creator():
fn = download(url=CIFAR100_URL, md5=CIFAR100_MD5)
return __read_batch__(fn, 'test')
def train_creator():
"""
Default train reader creator. Use CIFAR-10 dataset.
"""
fn = download(url=CIFAR10_URL, md5=CIFAR10_MD5)
return __read_batch__(fn, 'data_batch')
def test_creator():
"""
Default test reader creator. Use CIFAR-10 dataset.
"""
fn = download(url=CIFAR10_URL, md5=CIFAR10_MD5)
return __read_batch__(fn, 'test_batch')
def unittest():
for _ in train_creator()():
pass
for _ in test_creator()():
pass
if __name__ == '__main__':
unittest()
import hashlib
import os import os
import shutil
import urllib2
__all__ = ['DATA_HOME'] __all__ = ['DATA_HOME', 'download']
DATA_HOME = os.path.expanduser('~/.cache/paddle_data_set') DATA_HOME = os.path.expanduser('~/.cache/paddle_data_set')
if not os.path.exists(DATA_HOME): if not os.path.exists(DATA_HOME):
os.makedirs(DATA_HOME) os.makedirs(DATA_HOME)
def download(url, md5):
filename = os.path.split(url)[-1]
assert DATA_HOME is not None
filepath = os.path.join(DATA_HOME, md5)
if not os.path.exists(filepath):
os.makedirs(filepath)
__full_file__ = os.path.join(filepath, filename)
def __file_ok__():
if not os.path.exists(__full_file__):
return False
md5_hash = hashlib.md5()
with open(__full_file__, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
md5_hash.update(chunk)
return md5_hash.hexdigest() == md5
while not __file_ok__():
response = urllib2.urlopen(url)
with open(__full_file__, mode='wb') as of:
shutil.copyfileobj(fsrc=response, fdst=of)
return __full_file__
import zipfile
from config import download
import re
import random
import functools
__all__ = ['train_creator', 'test_creator']
class MovieInfo(object):
def __init__(self, index, categories, title):
self.index = int(index)
self.categories = categories
self.title = title
def value(self):
return [
self.index, [CATEGORIES_DICT[c] for c in self.categories],
[MOVIE_TITLE_DICT[w.lower()] for w in self.title.split()]
]
class UserInfo(object):
def __init__(self, index, gender, age, job_id):
self.index = int(index)
self.is_male = gender == 'M'
self.age = [1, 18, 25, 35, 45, 50, 56].index(int(age))
self.job_id = int(job_id)
def value(self):
return [self.index, 0 if self.is_male else 1, self.age, self.job_id]
MOVIE_INFO = None
MOVIE_TITLE_DICT = None
CATEGORIES_DICT = None
USER_INFO = None
def __initialize_meta_info__():
fn = download(
url='http://files.grouplens.org/datasets/movielens/ml-1m.zip',
md5='c4d9eecfca2ab87c1945afe126590906')
global MOVIE_INFO
if MOVIE_INFO is None:
pattern = re.compile(r'^(.*)\((\d+)\)$')
with zipfile.ZipFile(file=fn) as package:
for info in package.infolist():
assert isinstance(info, zipfile.ZipInfo)
MOVIE_INFO = dict()
title_word_set = set()
categories_set = set()
with package.open('ml-1m/movies.dat') as movie_file:
for i, line in enumerate(movie_file):
movie_id, title, categories = line.strip().split('::')
categories = categories.split('|')
for c in categories:
categories_set.add(c)
title = pattern.match(title).group(1)
MOVIE_INFO[int(movie_id)] = MovieInfo(
index=movie_id, categories=categories, title=title)
for w in title.split():
title_word_set.add(w.lower())
global MOVIE_TITLE_DICT
MOVIE_TITLE_DICT = dict()
for i, w in enumerate(title_word_set):
MOVIE_TITLE_DICT[w] = i
global CATEGORIES_DICT
CATEGORIES_DICT = dict()
for i, c in enumerate(categories_set):
CATEGORIES_DICT[c] = i
global USER_INFO
USER_INFO = dict()
with package.open('ml-1m/users.dat') as user_file:
for line in user_file:
uid, gender, age, job, _ = line.strip().split("::")
USER_INFO[int(uid)] = UserInfo(
index=uid, gender=gender, age=age, job_id=job)
return fn
def __reader__(rand_seed=0, test_ratio=0.1, is_test=False):
fn = __initialize_meta_info__()
rand = random.Random(x=rand_seed)
with zipfile.ZipFile(file=fn) as package:
with package.open('ml-1m/ratings.dat') as rating:
for line in rating:
if (rand.random() < test_ratio) == is_test:
uid, mov_id, rating, _ = line.strip().split("::")
uid = int(uid)
mov_id = int(mov_id)
rating = float(rating) * 2 - 5.0
mov = MOVIE_INFO[mov_id]
usr = USER_INFO[uid]
yield usr.value() + mov.value() + [[rating]]
def __reader_creator__(**kwargs):
return lambda: __reader__(**kwargs)
train_creator = functools.partial(__reader_creator__, is_test=False)
test_creator = functools.partial(__reader_creator__, is_test=True)
def unittest():
for train_count, _ in enumerate(train_creator()()):
pass
for test_count, _ in enumerate(test_creator()()):
pass
print train_count, test_count
if __name__ == '__main__':
unittest()
...@@ -362,6 +362,7 @@ def mixed(size=0, ...@@ -362,6 +362,7 @@ def mixed(size=0,
return MixedLayerV2(size, input, name, act, bias_attr, layer_attr) return MixedLayerV2(size, input, name, act, bias_attr, layer_attr)
LayerV2 = Layer
data = DataLayerV2 data = DataLayerV2
AggregateLevel = conf_helps.layers.AggregateLevel AggregateLevel = conf_helps.layers.AggregateLevel
ExpandLevel = conf_helps.layers.ExpandLevel ExpandLevel = conf_helps.layers.ExpandLevel
......
import numpy as np import numpy as np
from . import layer as v2_layer
import py_paddle.swig_paddle as api import py_paddle.swig_paddle as api
from paddle.proto.ParameterConfig_pb2 import ParameterConfig from paddle.proto.ParameterConfig_pb2 import ParameterConfig
from topology import Topology
__all__ = ['Parameters', 'create'] __all__ = ['Parameters', 'create']
def create(*layers): def create(layers):
""" """
Create parameter pool by layers. In paddle, layer can be represent a Create parameter pool by topology.
model config.
:param layers: :param layers:
:return: :return:
""" """
for layer in layers: topology = Topology(layers)
if not isinstance(layer, v2_layer.Layer):
raise ValueError(
'create must pass a topologies which type is paddle.layer.Layer')
model_config = v2_layer.parse_network(*layers)
pool = Parameters() pool = Parameters()
for param in model_config.parameters: for param in topology.proto().parameters:
pool.__append_config__(param) pool.__append_config__(param)
return pool return pool
...@@ -224,7 +219,8 @@ class Parameters(object): ...@@ -224,7 +219,8 @@ class Parameters(object):
except ValueError: except ValueError:
# If no such parameter in gradient machine, then don't copy # If no such parameter in gradient machine, then don't copy
pass pass
self.__gradient_machines__.append(gradient_machine)
self.__gradient_machines__.append(gradient_machine)
def __get_parameter_in_gradient_machine__(gradient_machine, name): def __get_parameter_in_gradient_machine__(gradient_machine, name):
......
add_test(NAME test_v2_api
COMMAND bash ${PROJ_ROOT}/python/paddle/v2/tests/run_tests.sh ${PYTHON_EXECUTABLE})
add_test(NAME test_v2_layer add_test(NAME test_v2_layer
COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/
${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/v2/tests/test_layer.py ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/v2/tests/test_layer.py
...@@ -7,5 +10,8 @@ add_test(NAME test_v2_rnn_layer ...@@ -7,5 +10,8 @@ add_test(NAME test_v2_rnn_layer
COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/
${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/v2/tests/test_rnn_layer.py) ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/v2/tests/test_rnn_layer.py)
add_test(NAME test_v2_api
COMMAND bash ${PROJ_ROOT}/python/paddle/v2/tests/run_tests.sh ${PYTHON_EXECUTABLE}) add_test(NAME test_topology
COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/
${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/v2/tests/test_topology.py
WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle)
# Copyright PaddlePaddle contributors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle.v2.layer as layer
import paddle.v2.topology as topology
import paddle.v2.data_type as data_type
import paddle.trainer_config_helpers as conf_helps
class TestTopology(unittest.TestCase):
def test_data_type(self):
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
label = layer.data(name='label', type=data_type.integer_value(10))
hidden = layer.fc(input=pixel,
size=100,
act=conf_helps.SigmoidActivation())
inference = layer.fc(input=hidden,
size=10,
act=conf_helps.SoftmaxActivation())
cost = layer.classification_cost(input=inference, label=label)
topo = topology.Topology(cost)
data_types = topo.data_type()
self.assertEqual(len(data_types), 2)
pixel_data_type = filter(lambda type: type[0] == "pixel", data_types)
self.assertEqual(len(pixel_data_type), 1)
pixel_data_type = pixel_data_type[0]
self.assertEqual(pixel_data_type[1].type, data_type.DataType.Dense)
self.assertEqual(pixel_data_type[1].dim, 784)
label_data_type = filter(lambda type: type[0] == "label", data_types)
self.assertEqual(len(label_data_type), 1)
label_data_type = label_data_type[0]
self.assertEqual(label_data_type[1].type, data_type.DataType.Index)
self.assertEqual(label_data_type[1].dim, 10)
def test_get_layer(self):
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
label = layer.data(name='label', type=data_type.integer_value(10))
hidden = layer.fc(input=pixel,
size=100,
act=conf_helps.SigmoidActivation())
inference = layer.fc(input=hidden,
size=10,
act=conf_helps.SoftmaxActivation())
cost = layer.classification_cost(input=inference, label=label)
topo = topology.Topology(cost)
pixel_layer = topo.get_layer("pixel")
label_layer = topo.get_layer("label")
self.assertEqual(pixel_layer, pixel)
self.assertEqual(label_layer, label)
def test_parse(self):
pixel = layer.data(name='pixel', type=data_type.dense_vector(784))
label = layer.data(name='label', type=data_type.integer_value(10))
hidden = layer.fc(input=pixel,
size=100,
act=conf_helps.SigmoidActivation())
inference = layer.fc(input=hidden,
size=10,
act=conf_helps.SoftmaxActivation())
maxid = layer.max_id(input=inference)
cost1 = layer.classification_cost(input=inference, label=label)
cost2 = layer.cross_entropy_cost(input=inference, label=label)
topology.Topology(cost2).proto()
topology.Topology([cost1]).proto()
topology.Topology([cost1, cost2]).proto()
topology.Topology([inference, maxid]).proto()
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from paddle.proto.ModelConfig_pb2 import ModelConfig
import layer as v2_layer
__all__ = ['Topology']
class Topology(object):
"""
Topology is used to store the information about all layers
and network configs.
"""
def __init__(self, layers):
if not isinstance(layers, collections.Sequence):
__check_layer_type__(layers)
layers = [layers]
for layer in layers:
__check_layer_type__(layer)
self.layers = layers
self.__model_config__ = v2_layer.parse_network(*layers)
assert isinstance(self.__model_config__, ModelConfig)
def proto(self):
return self.__model_config__
def get_layer(self, name):
"""
get v2.Layer Class instance by layer name
:param name:
:return:
"""
result_layer = []
def find_layer_by_name(layer, layer_name):
if len(result_layer) == 1:
return
elif layer.name == layer_name:
result_layer.append(layer)
else:
for parent_layer in layer.__parent_layers__.values():
find_layer_by_name(parent_layer, layer_name)
for layer in self.layers:
find_layer_by_name(layer, name)
assert len(result_layer) == 1
return result_layer[0]
def data_layers(self):
"""
get all data layer
:return:
"""
data_layers = set()
def find_data_layer(layer):
if isinstance(layer, v2_layer.DataLayerV2):
data_layers.add(layer)
for parent_layer in layer.__parent_layers__.values():
find_data_layer(parent_layer)
for layer in self.layers:
find_data_layer(layer)
return data_layers
def data_type(self):
"""
get data_type from proto, such as:
[('image', dense_vector(768)), ('label', integer_value(10))]
"""
return [(data_layer.name, data_layer.type)
for data_layer in self.data_layers()]
def __check_layer_type__(layer):
if not isinstance(layer, v2_layer.LayerV2):
raise ValueError('layer should have type paddle.layer.Layer')
import collections import collections
import py_paddle.swig_paddle as api import py_paddle.swig_paddle as api
from paddle.proto.ModelConfig_pb2 import ModelConfig
from data_feeder import DataFeeder
from data_feeder import DataFeeder
from topology import Topology
from . import event as v2_event from . import event as v2_event
from . import layer as v2_layer
from . import optimizer as v2_optimizer from . import optimizer as v2_optimizer
from . import parameters as v2_parameters from . import parameters as v2_parameters
...@@ -30,7 +29,7 @@ class ITrainer(object): ...@@ -30,7 +29,7 @@ class ITrainer(object):
def train(self, def train(self,
train_data_reader, train_data_reader,
topology, cost,
parameters, parameters,
test_data_reader=None, test_data_reader=None,
event_handler=None): event_handler=None):
...@@ -38,7 +37,7 @@ class ITrainer(object): ...@@ -38,7 +37,7 @@ class ITrainer(object):
train method. train method.
:param train_data_reader: :param train_data_reader:
:param topology: :param cost:
:param parameters: :param parameters:
:param test_data_reader: :param test_data_reader:
:param event_handler: :param event_handler:
...@@ -63,19 +62,18 @@ class SGD(ITrainer): ...@@ -63,19 +62,18 @@ class SGD(ITrainer):
def train(self, def train(self,
train_data_reader, train_data_reader,
topology, cost,
parameters, parameters,
num_passes=1, num_passes=1,
test_data_reader=None, test_data_reader=None,
event_handler=None, event_handler=None,
batch_size=32, batch_size=32,
data_types=None,
reader_dict=None): reader_dict=None):
""" """
Training method. Will train num_passes of input data. Training method. Will train num_passes of input data.
:param train_data_reader: :param train_data_reader:
:param topology: Network Topology, use one or more Layers to represent it. :param cost: cost layers, to be optimized.
:param parameters: The parameter pools. :param parameters: The parameter pools.
:param num_passes: The total train passes. :param num_passes: The total train passes.
:param test_data_reader: :param test_data_reader:
...@@ -83,18 +81,18 @@ class SGD(ITrainer): ...@@ -83,18 +81,18 @@ class SGD(ITrainer):
occurred. occurred.
:type event_handler: (BaseEvent) => None :type event_handler: (BaseEvent) => None
:param batch_size: Not important, will be removed after data refactor. :param batch_size: Not important, will be removed after data refactor.
:param data_types: Not important, will be removed after data refactor.
:return: :return:
""" """
if event_handler is None: if event_handler is None:
event_handler = default_event_handler event_handler = default_event_handler
topology = v2_layer.parse_network(topology) topology = Topology(cost)
__check_train_args__(**locals()) __check_train_args__(**locals())
gm = api.GradientMachine.createFromConfigProto( gm = api.GradientMachine.createFromConfigProto(
topology, api.CREATE_MODE_NORMAL, self.__optimizer__.enable_types()) topology.proto(), api.CREATE_MODE_NORMAL,
self.__optimizer__.enable_types())
assert isinstance(gm, api.GradientMachine) assert isinstance(gm, api.GradientMachine)
parameters.append_gradient_machine(gm) parameters.append_gradient_machine(gm)
gm.randParameters() gm.randParameters()
...@@ -108,7 +106,7 @@ class SGD(ITrainer): ...@@ -108,7 +106,7 @@ class SGD(ITrainer):
assert isinstance(pass_evaluator, api.Evaluator) assert isinstance(pass_evaluator, api.Evaluator)
out_args = api.Arguments.createArguments(0) out_args = api.Arguments.createArguments(0)
feeder = DataFeeder(data_types, reader_dict) feeder = DataFeeder(topology.data_type(), reader_dict)
for pass_id in xrange(num_passes): for pass_id in xrange(num_passes):
event_handler(v2_event.BeginPass(pass_id)) event_handler(v2_event.BeginPass(pass_id))
...@@ -154,7 +152,7 @@ def __data_reader_to_batch__(reader, batch_size, topology): ...@@ -154,7 +152,7 @@ def __data_reader_to_batch__(reader, batch_size, topology):
def input_reorder(func): def input_reorder(func):
for item in func(): for item in func():
retv = [] retv = []
for __layer_name__ in topology.input_layer_names: for __layer_name__ in topology.proto().input_layer_names:
retv.append(item[__layer_name__]) retv.append(item[__layer_name__])
yield retv yield retv
...@@ -191,7 +189,7 @@ def __check_train_args__(train_data_reader, topology, parameters, ...@@ -191,7 +189,7 @@ def __check_train_args__(train_data_reader, topology, parameters,
raise ValueError('test_data_reader should be a function, which can ' raise ValueError('test_data_reader should be a function, which can '
'return a iterator') 'return a iterator')
if not isinstance(topology, ModelConfig): if not isinstance(topology, Topology):
raise ValueError('topology should be a model config') raise ValueError('topology should be a model config')
if not isinstance(parameters, v2_parameters.Parameters): if not isinstance(parameters, v2_parameters.Parameters):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册