提交 ee1a4aa6 编写于 作者: Y yangyaming

Merge branch 'develop' of github.com:PaddlePaddle/models into fix-630

此差异已折叠。
此差异已折叠。
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
import os
import cPickle
from utils import logger
"""
This script will output 2 files:
1. feature_dict.pkl
2. item_freq.pkl
"""
class FeatureGenerator(object):
"""
Encode feature values with low-frequency filtering.
"""
def __init__(self, feat_appear_limit=20):
"""
@feat_appear_limit: int
"""
self._dic = None # feature value --> id
self._count = None # numbers of appearances of feature values
self._feat_appear_limit = feat_appear_limit
def add_feat_val(self, feat_val):
"""
Add feature values and count numbers of its appearance.
"""
if self._count is None:
self._count = {'<unk>': 0}
if feat_val == "NULL":
feat_val = '<unk>'
if feat_val not in self._count:
self._count[feat_val] = 1
else:
self._count[feat_val] += 1
self._count['<unk>'] += 1
def _filter_feat(self):
"""
Filter low-frequency feature values.
"""
self._items = filter(lambda x: x[1] > self._feat_appear_limit,
self._count.items())
self._items.sort(key=lambda x: x[1], reverse=True)
def _build_dict(self):
"""
Build feature values --> ids dict.
"""
self._dic = {}
self._filter_feat()
for i in xrange(len(self._items)):
self._dic[self._items[i][0]] = i
self.dim = len(self._dic)
def get_feat_id(self, feat_val):
"""
Get id of feature value after encoding.
"""
# build dict
if self._dic is None:
self._build_dict()
# find id
if feat_val in self._dic:
return self._dic[feat_val]
else:
return self._dic['<unk>']
def get_dim(self):
"""
Get dim.
"""
# build dict
if self._dic is None:
self._build_dict()
return len(self._dic)
def get_dict(self):
"""
Get dict.
"""
# build dict
if self._dic is None:
self._build_dict()
return self._dic
def get_total_count(self):
"""
Compute total num of count.
"""
total_count = 0
for i in xrange(len(self._items)):
feat_val = self._items[i][0]
c = self._items[i][1]
total_count += c
return total_count
def count_iterator(self):
"""
Iterate feature values and its num of appearance.
"""
for i in xrange(len(self._items)):
yield self._items[i][0], self._items[i][1]
def __repr__(self):
"""
"""
return '<FeatureGenerator %d>' % self._dim
def scan_build_dict(data_path, features_dict):
"""
Scan the raw data and add all feature values.
"""
logger.info('scan data set')
with open(data_path, 'r') as f:
for (line_id, line) in enumerate(f):
fields = line.strip('\n').split('\t')
user_id = fields[0]
province = fields[1]
features_dict['province'].add_feat_val(province)
city = fields[2]
features_dict['city'].add_feat_val(city)
item_infos = fields[3]
phone = fields[4]
features_dict['phone'].add_feat_val(phone)
for item_info in item_infos.split(";"):
item_info_array = item_info.split(":")
item = item_info_array[0]
features_dict['history_clicked_items'].add_feat_val(item)
features_dict['user_id'].add_feat_val(user_id)
category = item_info_array[1]
features_dict['history_clicked_categories'].add_feat_val(
category)
tags = item_info_array[2]
for tag in tags.split("_"):
features_dict['history_clicked_tags'].add_feat_val(tag)
def parse_args():
"""
parse arguments
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--train_set_path',
type=str,
required=True,
help="path of the train set")
parser.add_argument(
'--output_dir', type=str, required=True, help="directory to output")
parser.add_argument(
'--feat_appear_limit',
type=int,
default=20,
help="the minimum number of feature values appears (default: 20)")
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
# check argument
assert os.path.exists(
args.train_set_path), 'The train set path does not exist.'
# features used
features = [
'user_id', 'province', 'city', 'phone', 'history_clicked_items',
'history_clicked_tags', 'history_clicked_categories'
]
# init feature generators
features_dict = {}
for feature in features:
features_dict[feature] = FeatureGenerator(
feat_appear_limit=args.feat_appear_limit)
# scan data for building dict
scan_build_dict(args.train_set_path, features_dict)
# generate feature_dict.pkl
feature_encoding_dict = {}
for feature in features:
d = features_dict[feature].get_dict()
feature_encoding_dict[feature] = d
logger.info('Feature:%s, dimension is %d' % (feature, len(d)))
output_dict_path = os.path.join(args.output_dir, 'feature_dict.pkl')
with open(output_dict_path, "w") as f:
cPickle.dump(feature_encoding_dict, f, -1)
# generate item_freq.pkl
item_freq_list = []
g = features_dict['history_clicked_items']
total_count = g.get_total_count()
for feat_val, feat_count in g.count_iterator():
item_freq_list.append(float(feat_count) / total_count)
logger.info('item_freq, dimension is %d' % (len(item_freq_list)))
output_item_freq_path = os.path.join(args.output_dir, 'item_freq.pkl')
with open(output_item_freq_path, "w") as f:
cPickle.dump(item_freq_list, f, -1)
logger.info('Complete!')
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import gzip
import paddle.v2 as paddle
import argparse
import cPickle
from reader import Reader
from network_conf import DNNmodel
from utils import logger
def parse_args():
"""
parse arguments
:return:
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--infer_set_path',
type=str,
required=True,
help="path of the infer set")
parser.add_argument(
'--model_path', type=str, required=True, help="path of the model")
parser.add_argument(
'--feature_dict',
type=str,
required=True,
help="path of feature_dict.pkl")
parser.add_argument(
'--batch_size',
type=int,
default=50,
help="size of mini-batch (default:50)")
return parser.parse_args()
def infer():
"""
infer
"""
args = parse_args()
# check argument
assert os.path.exists(
args.infer_set_path), 'The infer_set_path path does not exist.'
assert os.path.exists(
args.model_path), 'The model_path path does not exist.'
assert os.path.exists(
args.feature_dict), 'The feature_dict path does not exist.'
paddle.init(use_gpu=False, trainer_count=1)
with open(args.feature_dict) as f:
feature_dict = cPickle.load(f)
nid_dict = feature_dict['history_clicked_items']
nid_to_word = dict((v, k) for k, v in nid_dict.items())
# load the trained model.
with gzip.open(args.model_path) as f:
parameters = paddle.parameters.Parameters.from_tar(f)
# build model
prediction_layer, fc = DNNmodel(
dnn_layer_dims=[256, 31], feature_dict=feature_dict,
is_infer=True).model_cost
inferer = paddle.inference.Inference(
output_layer=[prediction_layer, fc], parameters=parameters)
reader = Reader(feature_dict)
test_batch = []
for idx, item in enumerate(reader.infer(args.infer_set_path)):
test_batch.append(item)
if len(test_batch) == args.batch_size:
infer_a_batch(inferer, test_batch, nid_to_word)
test_batch = []
if len(test_batch):
infer_a_batch(inferer, test_batch, nid_to_word)
def infer_a_batch(inferer, test_batch, nid_to_word):
"""
input a batch of data and infer
"""
feeding = {
'user_id': 0,
'province': 1,
'city': 2,
'history_clicked_items': 3,
'history_clicked_categories': 4,
'history_clicked_tags': 5,
'phone': 6
}
probs = inferer.infer(
input=test_batch,
feeding=feeding,
field=["value"],
flatten_result=False)
for i, res in enumerate(zip(test_batch, probs[0], probs[1])):
softmax_output = res[1]
sort_nid = res[1].argsort()
# print top 30 recommended item
ret = ""
for j in range(1, 30):
item_id = sort_nid[-1 * j]
item_id_to_word = nid_to_word[item_id]
ret += "%s:%.6f," \
% (item_id_to_word, softmax_output[item_id])
print ret.rstrip(",")
if __name__ == "__main__":
infer()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import gzip
import paddle.v2 as paddle
import argparse
import cPickle
from reader import Reader
from network_conf import DNNmodel
from utils import logger
import numpy as np
def parse_args():
"""
parse arguments
:return:
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--model_path', type=str, required=True, help="path of the model")
parser.add_argument(
'--feature_dict',
type=str,
required=True,
help="path of feature_dict.pkl")
return parser.parse_args()
def infer_user():
"""
infer_user
"""
args = parse_args()
# check argument
assert os.path.exists(
args.model_path), 'The model_path path does not exist.'
assert os.path.exists(
args.feature_dict), 'The feature_dict path does not exist.'
paddle.init(use_gpu=False, trainer_count=1)
with open(args.feature_dict) as f:
feature_dict = cPickle.load(f)
nid_dict = feature_dict['history_clicked_items']
nid_to_word = dict((v, k) for k, v in nid_dict.items())
# load the trained model.
with gzip.open(args.model_path) as f:
parameters = paddle.parameters.Parameters.from_tar(f)
parameters.set('_proj_province', \
np.zeros(shape=parameters.get('_proj_province').shape))
parameters.set('_proj_city', \
np.zeros(shape=parameters.get('_proj_city').shape))
parameters.set('_proj_phone', \
np.zeros(shape=parameters.get('_proj_phone').shape))
parameters.set('_proj_history_clicked_items', \
np.zeros(shape= parameters.get('_proj_history_clicked_items').shape))
parameters.set('_proj_history_clicked_categories', \
np.zeros(shape= parameters.get('_proj_history_clicked_categories').shape))
parameters.set('_proj_history_clicked_tags', \
np.zeros(shape= parameters.get('_proj_history_clicked_tags').shape))
# build model
prediction_layer, fc = DNNmodel(
dnn_layer_dims=[256, 31], feature_dict=feature_dict,
is_infer=True).model_cost
inferer = paddle.inference.Inference(
output_layer=[prediction_layer, fc], parameters=parameters)
reader = Reader(feature_dict)
test_batch = []
for idx, item in enumerate(
reader.infer_user(['USER_ID_0', 'USER_ID_981', 'USER_ID_310806'])):
test_batch.append(item)
infer_a_batch(inferer, test_batch, nid_to_word)
def infer_a_batch(inferer, test_batch, nid_to_word):
"""
input a batch of data and infer
"""
feeding = {
'user_id': 0,
'province': 1,
'city': 2,
'history_clicked_items': 3,
'history_clicked_categories': 4,
'history_clicked_tags': 5,
'phone': 6
}
probs = inferer.infer(
input=test_batch,
feeding=feeding,
field=["value"],
flatten_result=False)
for i, res in enumerate(zip(test_batch, probs[0], probs[1])):
softmax_output = res[1]
sort_nid = res[1].argsort()
# print top 30 recommended item
ret = ""
for j in range(1, 30):
item_id = sort_nid[-1 * j]
item_id_to_word = nid_to_word[item_id]
ret += "%s:%.6f," \
% (item_id_to_word, softmax_output[item_id])
print ret.rstrip(",")
if __name__ == "__main__":
infer_user()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import gzip
import paddle.v2 as paddle
import argparse
import cPickle
from reader import Reader
from network_conf import DNNmodel
from utils import logger
import numpy as np
import math
def parse_args():
"""
parse arguments
:return:
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--model_path', type=str, required=True, help="path of the model")
parser.add_argument(
'--feature_dict',
type=str,
required=True,
help="path of feature_dict.pkl")
return parser.parse_args()
def get_item_vec_from_softmax(nce_w, nce_b):
"""
get item vectors from softmax parameter
"""
if nce_w is None or nce_b is None:
return None
vector = []
total_items_num = nce_w.shape[0]
if total_items_num != nce_b.shape[1]:
return None
dim_vector = nce_w.shape[1] + 1
for i in range(0, total_items_num):
vector.append([])
vector[i].append(nce_b[0][i])
for j in range(1, dim_vector):
vector[i].append(nce_w[i][j - 1])
return vector
def convt_simple_lsh(vector):
"""
do simple lsh conversion
"""
max_norm = 0
num_of_vec = len(vector)
for i in range(0, num_of_vec):
norm = np.linalg.norm(vector[i])
if norm > max_norm:
max_norm = norm
for i in range(0, num_of_vec):
vector[i].append(
math.sqrt(
math.pow(max_norm, 2) - math.pow(np.linalg.norm(vector[i]), 2)))
return vector
def item_vector():
"""
get item vectors
"""
args = parse_args()
# check argument
assert os.path.exists(
args.model_path), 'The model_path path does not exist.'
assert os.path.exists(
args.feature_dict), 'The feature_dict path does not exist.'
paddle.init(use_gpu=False, trainer_count=1)
with open(args.feature_dict) as f:
feature_dict = cPickle.load(f)
# load the trained model.
with gzip.open(args.model_path) as f:
parameters = paddle.parameters.Parameters.from_tar(f)
nid_dict = feature_dict['history_clicked_items']
nid_to_word = dict((v, k) for k, v in nid_dict.items())
nce_w = parameters.get("nce_w")
nce_b = parameters.get("nce_b")
item_vector = convt_simple_lsh(get_item_vec_from_softmax(nce_w, nce_b))
for i in range(0, len(item_vector)):
itemid = nid_to_word[i]
print itemid + "\t" + ",".join(map(str, item_vector[i]))
if __name__ == "__main__":
item_vector()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import paddle.v2 as paddle
import cPickle
class DNNmodel(object):
"""
Deep Neural Networks for YouTube candidate generation
"""
def __init__(self,
dnn_layer_dims=None,
feature_dict=None,
item_freq=None,
is_infer=False):
"""
initialize model
@dnn_layer_dims: dimension of each hidden layer
@feature_dict: dictionary of encoded feature
@item_freq: dictionary of feature values and its frequency
@is_infer: if infer mode
"""
self._dnn_layer_dims = dnn_layer_dims
self._feature_dict = feature_dict
self._item_freq = item_freq
self._is_infer = is_infer
# build model
self._build_input_layer()
self._build_embedding_layer()
self.model_cost = self._build_dnn_model()
def _build_input_layer(self):
"""
build input layer
"""
self._history_clicked_items = paddle.layer.data(
name="history_clicked_items",
type=paddle.data_type.integer_value_sequence(
len(self._feature_dict['history_clicked_items'])))
self._history_clicked_categories = paddle.layer.data(
name="history_clicked_categories",
type=paddle.data_type.integer_value_sequence(
len(self._feature_dict['history_clicked_categories'])))
self._history_clicked_tags = paddle.layer.data(
name="history_clicked_tags",
type=paddle.data_type.integer_value_sequence(
len(self._feature_dict['history_clicked_tags'])))
self._user_id = paddle.layer.data(
name="user_id",
type=paddle.data_type.integer_value(
len(self._feature_dict['user_id'])))
self._province = paddle.layer.data(
name="province",
type=paddle.data_type.integer_value(
len(self._feature_dict['province'])))
self._city = paddle.layer.data(
name="city",
type=paddle.data_type.integer_value(
len(self._feature_dict['city'])))
self._phone = paddle.layer.data(
name="phone",
type=paddle.data_type.integer_value(
len(self._feature_dict['phone'])))
self._target_item = paddle.layer.data(
name="target_item",
type=paddle.data_type.integer_value(
len(self._feature_dict['history_clicked_items'])))
def _create_emb_attr(self, name):
"""
create embedding parameter
"""
return paddle.attr.Param(
name=name,
initial_std=0.001,
learning_rate=1,
l2_rate=0,
sparse_update=False)
def _build_embedding_layer(self):
"""
build embedding layer
"""
self._user_id_emb = paddle.layer.embedding(
input=self._user_id,
size=64,
param_attr=self._create_emb_attr('_proj_user_id'))
self._province_emb = paddle.layer.embedding(
input=self._province,
size=8,
param_attr=self._create_emb_attr('_proj_province'))
self._city_emb = paddle.layer.embedding(
input=self._city,
size=16,
param_attr=self._create_emb_attr('_proj_city'))
self._phone_emb = paddle.layer.embedding(
input=self._phone,
size=16,
param_attr=self._create_emb_attr('_proj_phone'))
self._history_clicked_items_emb = paddle.layer.embedding(
input=self._history_clicked_items,
size=64,
param_attr=self._create_emb_attr('_proj_history_clicked_items'))
self._history_clicked_categories_emb = paddle.layer.embedding(
input=self._history_clicked_categories,
size=8,
param_attr=self._create_emb_attr(
'_proj_history_clicked_categories'))
self._history_clicked_tags_emb = paddle.layer.embedding(
input=self._history_clicked_tags,
size=64,
param_attr=self._create_emb_attr('_proj_history_clicked_tags'))
def _build_dnn_model(self):
"""
build dnn model
"""
self._rnn_cell = paddle.networks.simple_lstm(
input=self._history_clicked_items_emb, size=64)
self._lstm_last = paddle.layer.pooling(
input=self._rnn_cell, pooling_type=paddle.pooling.Max())
self._avg_emb_cats = paddle.layer.pooling(
input=self._history_clicked_categories_emb,
pooling_type=paddle.pooling.Avg())
self._avg_emb_tags = paddle.layer.pooling(
input=self._history_clicked_tags_emb,
pooling_type=paddle.pooling.Avg())
self._fc_0 = paddle.layer.fc(
name="Relu1",
input=[
self._lstm_last, self._user_id_emb, self._province_emb,
self._city_emb, self._avg_emb_cats, self._avg_emb_tags,
self._phone_emb
],
size=self._dnn_layer_dims[0],
act=paddle.activation.Relu())
self._fc_1 = paddle.layer.fc(name="Relu2",
input=self._fc_0,
size=self._dnn_layer_dims[1],
act=paddle.activation.Relu())
if not self._is_infer:
return paddle.layer.nce(
input=self._fc_1,
label=self._target_item,
num_classes=len(self._feature_dict['history_clicked_items']),
param_attr=paddle.attr.Param(name="nce_w"),
bias_attr=paddle.attr.Param(name="nce_b"),
num_neg_samples=5,
neg_distribution=self._item_freq)
else:
self.prediction_layer = paddle.layer.mixed(
size=len(self._feature_dict['history_clicked_items']),
input=paddle.layer.trans_full_matrix_projection(
self._fc_1, param_attr=paddle.attr.Param(name="nce_w")),
act=paddle.activation.Softmax(),
bias_attr=paddle.attr.Param(name="nce_b"))
return self.prediction_layer, self._fc_1
if __name__ == "__main__":
# this is to test and debug the network topology defination.
# please set the hyper-parameters as needed.
item_freq_path = "./output/item_freq.pkl"
with open(item_freq_path) as f:
item_freq = cPickle.load(f)
feature_dict_path = "./output/feature_dict.pkl"
with open(feature_dict_path) as f:
feature_dict = cPickle.load(f)
a = DNNmodel(
dnn_layer_dims=[256, 31],
feature_dict=feature_dict,
item_freq=item_freq,
is_infer=False)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from utils import logger
from utils import TaskMode
class Reader(object):
"""
Reader
"""
def __init__(self, feature_dict=None, window_size=20):
"""
init
@window_size: window_size
"""
self._feature_dict = feature_dict
self._window_size = window_size
def train(self, path):
"""
load train set
@path: train set path
"""
logger.info("start train reader from %s" % path)
mode = TaskMode.create_train()
return self._reader(path, mode)
def test(self, path):
"""
load test set
@path: test set path
"""
logger.info("start test reader from %s" % path)
mode = TaskMode.create_test()
return self._reader(path, mode)
def infer(self, path):
"""
load infer set
@path: infer set path
"""
logger.info("start infer reader from %s" % path)
mode = TaskMode.create_infer()
return self._reader(path, mode)
def infer_user(self, user_list):
"""
load user set to infer
@user_list: user list
"""
return self._reader_user(user_list)
def _reader(self, path, mode):
"""
parse data set
"""
USER_ID_UNK = self._feature_dict['user_id'].get('<unk>')
PROVINCE_UNK = self._feature_dict['province'].get('<unk>')
CITY_UNK = self._feature_dict['city'].get('<unk>')
ITEM_UNK = self._feature_dict['history_clicked_items'].get('<unk>')
CATEGORY_UNK = self._feature_dict['history_clicked_categories'].get(
'<unk>')
TAG_UNK = self._feature_dict['history_clicked_tags'].get('<unk>')
PHONE_UNK = self._feature_dict['phone'].get('<unk>')
with open(path) as f:
for line in f:
fields = line.strip('\n').split('\t')
user_id = self._feature_dict['user_id'].get(fields[0],
USER_ID_UNK)
province = self._feature_dict['province'].get(fields[1],
PROVINCE_UNK)
city = self._feature_dict['city'].get(fields[2], CITY_UNK)
item_infos = fields[3]
phone = self._feature_dict['phone'].get(fields[4], PHONE_UNK)
history_clicked_items_all = []
history_clicked_tags_all = []
history_clicked_categories_all = []
for item_info in item_infos.split(';'):
item_info_array = item_info.split(':')
item = item_info_array[0]
item_encoded_id = self._feature_dict['history_clicked_items'].get(\
item, ITEM_UNK)
if item_encoded_id != ITEM_UNK:
history_clicked_items_all.append(item_encoded_id)
category = item_info_array[1]
history_clicked_categories_all.append(
self._feature_dict['history_clicked_categories'].get(\
category, CATEGORY_UNK))
tags = item_info_array[2]
tag_split = map(str, [self._feature_dict['history_clicked_tags'].get(\
tag, TAG_UNK) \
for tag in tags.strip().split("_")])
history_clicked_tags_all.append("_".join(tag_split))
if not mode.is_infer():
history_clicked_items_all.insert(0, 0)
history_clicked_tags_all.insert(0, "0")
history_clicked_categories_all.insert(0, 0)
for i in range(1, len(history_clicked_items_all)):
start = max(0, i - self._window_size)
history_clicked_items = history_clicked_items_all[start:
i]
history_clicked_categories = history_clicked_categories_all[
start:i]
history_clicked_tags_str = history_clicked_tags_all[
start:i]
history_clicked_tags = []
for tags_a in history_clicked_tags_str:
for tag in tags_a.split("_"):
history_clicked_tags.append(int(tag))
target_item = history_clicked_items_all[i]
yield user_id, province, city, \
history_clicked_items, history_clicked_categories, \
history_clicked_tags, phone, target_item
else:
history_clicked_items = history_clicked_items_all
history_clicked_categories = history_clicked_categories_all
history_clicked_tags_str = history_clicked_tags_all
history_clicked_tags = []
for tags_a in history_clicked_tags_str:
for tag in tags_a.split("_"):
history_clicked_tags.append(int(tag))
yield user_id, province, city, \
history_clicked_items, history_clicked_categories, \
history_clicked_tags, phone
def _reader_user(self, user_list):
"""
parse user list
"""
USER_ID_UNK = self._feature_dict['user_id'].get('<unk>')
for user in user_list:
user_id = self._feature_dict['user_id'].get(user, USER_ID_UNK)
yield user_id, 0, 0, [0], [0], [0], 0
if __name__ == "__main__":
# this is to test and debug reader function
train_data = sys.argv[1]
feature_dict = sys.argv[2]
window_size = int(sys.argv[3])
import cPickle
with open(feature_dict) as f:
feature_dict = cPickle.load(f)
r = Reader(feature_dict, window_size)
for dat in r.train(train_data):
print dat
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import gzip
import paddle.v2 as paddle
import argparse
import cPickle
from reader import Reader
from network_conf import DNNmodel
from utils import logger
def parse_args():
"""
parse arguments
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--train_set_path',
type=str,
required=True,
help="path of the train set")
parser.add_argument(
'--test_set_path', type=str, required=True, help="path of the test set")
parser.add_argument(
'--model_output_dir',
type=str,
required=True,
help="directory to output")
parser.add_argument(
'--feature_dict',
type=str,
required=True,
help="path of feature_dict.pkl")
parser.add_argument(
'--item_freq', type=str, required=True, help="path of item_freq.pkl ")
parser.add_argument(
'--window_size', type=int, default=20, help="window size(default: 20)")
parser.add_argument(
'--num_passes', type=int, default=1, help="number of passes to train")
parser.add_argument(
'--batch_size',
type=int,
default=50,
help="size of mini-batch (default:50)")
return parser.parse_args()
def train():
"""
train
"""
args = parse_args()
# check argument
assert os.path.exists(
args.train_set_path), 'The train_set_path path does not exist.'
assert os.path.exists(
args.test_set_path), 'The test_set_path path does not exist.'
assert os.path.exists(
args.feature_dict), 'The feature_dict path does not exist.'
assert os.path.exists(args.item_freq), 'The item_freq path does not exist.'
assert os.path.exists(
args.model_output_dir), 'The model_output_dir path does not exist.'
paddle.init(use_gpu=False, trainer_count=1)
with open(args.feature_dict) as f:
feature_dict = cPickle.load(f)
with open(args.item_freq) as f:
item_freq = cPickle.load(f)
feeding = {
'user_id': 0,
'province': 1,
'city': 2,
'history_clicked_items': 3,
'history_clicked_categories': 4,
'history_clicked_tags': 5,
'phone': 6,
'target_item': 7
}
optimizer = paddle.optimizer.AdaGrad(
learning_rate=1e-1,
regularization=paddle.optimizer.L2Regularization(rate=1e-3))
cost = DNNmodel(
dnn_layer_dims=[256, 31],
feature_dict=feature_dict,
item_freq=item_freq,
is_infer=False).model_cost
parameters = paddle.parameters.create(cost)
trainer = paddle.trainer.SGD(cost, parameters, optimizer)
def event_handler(event):
"""
event handler
"""
if isinstance(event, paddle.event.EndIteration):
if event.batch_id and not event.batch_id % 10:
logger.info("Pass %d, Batch %d, Cost %f" %
(event.pass_id, event.batch_id, event.cost))
elif isinstance(event, paddle.event.EndPass):
save_path = os.path.join(args.model_output_dir,
"model_pass_%05d.tar.gz" % event.pass_id)
logger.info("Save model into %s ..." % save_path)
with gzip.open(save_path, "w") as f:
trainer.save_parameter_to_tar(f)
reader = Reader(feature_dict, args.window_size)
trainer.train(
paddle.batch(
paddle.reader.shuffle(
lambda: reader.train(args.train_set_path), buf_size=7000),
args.batch_size),
num_passes=args.num_passes,
feeding=feeding,
event_handler=event_handler)
if __name__ == "__main__":
train()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import gzip
import paddle.v2 as paddle
import argparse
import cPickle
from reader import Reader
from network_conf import DNNmodel
from utils import logger
import numpy as np
def parse_args():
"""
parse arguments
"""
parser = argparse.ArgumentParser(
description="PaddlePaddle Youtube Recall Model Example")
parser.add_argument(
'--infer_set_path',
type=str,
required=True,
help="path of the infer set")
parser.add_argument(
'--model_path', type=str, required=True, help="path of the model")
parser.add_argument(
'--feature_dict',
type=str,
required=True,
help="path of feature_dict.pkl")
parser.add_argument(
'--batch_size',
type=int,
default=50,
help="size of mini-batch (default:50)")
return parser.parse_args()
def user_vector():
"""
get user vectors
"""
args = parse_args()
# check argument
assert os.path.exists(
args.infer_set_path), 'The infer_set_path path does not exist.'
assert os.path.exists(
args.model_path), 'The model_path path does not exist.'
assert os.path.exists(
args.feature_dict), 'The feature_dict path does not exist.'
paddle.init(use_gpu=False, trainer_count=1)
with open(args.feature_dict) as f:
feature_dict = cPickle.load(f)
# load the trained model.
with gzip.open(args.model_path) as f:
parameters = paddle.parameters.Parameters.from_tar(f)
# build model
prediction_layer, fc = DNNmodel(
dnn_layer_dims=[256, 31], feature_dict=feature_dict,
is_infer=True).model_cost
inferer = paddle.inference.Inference(
output_layer=[prediction_layer, fc], parameters=parameters)
reader = Reader(feature_dict)
test_batch = []
for idx, item in enumerate(reader.infer(args.infer_set_path)):
test_batch.append(item)
if len(test_batch) == args.batch_size:
get_a_batch_user_vector(inferer, test_batch)
test_batch = []
if len(test_batch):
get_a_batch_user_vector(inferer, test_batch)
def get_a_batch_user_vector(inferer, test_batch):
"""
input a batch of data and get user vectors
"""
feeding = {
'user_id': 0,
'province': 1,
'city': 2,
'history_clicked_items': 3,
'history_clicked_categories': 4,
'history_clicked_tags': 5,
'phone': 6
}
probs = inferer.infer(
input=test_batch,
feeding=feeding,
field=["value"],
flatten_result=False)
for i, res in enumerate(zip(probs[1])):
# do simple lsh conversion
user_vector = [1.000]
for i in res[0]:
user_vector.append(i)
user_vector.append(0.000)
norm = np.linalg.norm(user_vector)
user_vector_norm = [str(_ / norm) for _ in user_vector]
print ",".join(user_vector_norm)
if __name__ == "__main__":
user_vector()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
logging.basicConfig()
logger = logging.getLogger("paddle")
logger.setLevel(logging.INFO)
class TaskMode(object):
"""
TaskMode
"""
TRAIN_MODE = 0
TEST_MODE = 1
INFER_MODE = 2
def __init__(self, mode):
"""
:param mode:
"""
self.mode = mode
def is_train(self):
"""
:return:
"""
return self.mode == self.TRAIN_MODE
def is_test(self):
"""
:return:
"""
return self.mode == self.TEST_MODE
def is_infer(self):
"""
:return:
"""
return self.mode == self.INFER_MODE
@staticmethod
def create_train():
"""
:return:
"""
return TaskMode(TaskMode.TRAIN_MODE)
@staticmethod
def create_test():
"""
:return:
"""
return TaskMode(TaskMode.TEST_MODE)
@staticmethod
def create_infer():
"""
:return:
"""
return TaskMode(TaskMode.INFER_MODE)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册