提交 38828686 编写于 作者: X xujiaqi01

Merge branch 'develop' into 'develop'

add slot reader

See merge request !53
...@@ -38,6 +38,34 @@ class Model(object): ...@@ -38,6 +38,34 @@ class Model(object):
self._namespace = "train.model" self._namespace = "train.model"
self._platform = envs.get_platform() self._platform = envs.get_platform()
def _init_slots(self):
sparse_slots = envs.get_global_env("sparse_slots", None, "train.reader")
dense_slots = envs.get_global_env("dense_slots", None, "train.reader")
if sparse_slots is not None or dense_slots is not None:
sparse_slots = sparse_slots.strip().split(" ")
dense_slots = dense_slots.strip().split(" ")
dense_slots_shape = [[int(j) for j in i.split(":")[1].strip("[]").split(",")] for i in dense_slots]
dense_slots = [i.split(":")[0] for i in dense_slots]
self._dense_data_var = []
for i in range(len(dense_slots)):
l = fluid.layers.data(name=dense_slots[i], shape=dense_slots_shape[i], dtype="float32")
self._data_var.append(l)
self._dense_data_var.append(l)
self._sparse_data_var = []
for name in sparse_slots:
l = fluid.layers.data(name=name, shape=[1], lod_level=1, dtype="int64")
self._data_var.append(l)
self._sparse_data_var.append(l)
dataset_class = envs.get_global_env("dataset_class", None, "train.reader")
if dataset_class == "DataLoader":
self._init_dataloader()
def _init_dataloader(self):
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
def get_inputs(self): def get_inputs(self):
return self._data_var return self._data_var
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
from __future__ import print_function from __future__ import print_function
import sys
import abc import abc
import os import os
...@@ -44,3 +45,58 @@ class Reader(dg.MultiSlotDataGenerator): ...@@ -44,3 +45,58 @@ class Reader(dg.MultiSlotDataGenerator):
@abc.abstractmethod @abc.abstractmethod
def generate_sample(self, line): def generate_sample(self, line):
pass pass
class SlotReader(dg.MultiSlotDataGenerator):
__metaclass__ = abc.ABCMeta
def __init__(self, config):
dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
envs.set_global_envs(_config)
envs.update_workspace()
def init(self, sparse_slots, dense_slots, padding=0):
from operator import mul
self.sparse_slots = sparse_slots.strip().split(" ")
self.dense_slots = dense_slots.strip().split(" ")
self.dense_slots_shape = [reduce(mul, [int(j) for j in i.split(":")[1].strip("[]").split(",")]) for i in self.dense_slots]
self.dense_slots = [i.split(":")[0] for i in self.dense_slots]
self.slots = self.dense_slots + self.sparse_slots
self.slot2index = {}
self.visit = {}
for i in range(len(self.slots)):
self.slot2index[self.slots[i]] = i
self.visit[self.slots[i]] = False
self.padding = padding
def generate_sample(self, l):
def reader():
line = l.strip().split(" ")
output = [(i, []) for i in self.slots]
for i in line:
slot_feasign = i.split(":")
slot = slot_feasign[0]
if slot not in self.slots:
continue
if slot in self.sparse_slots:
feasign = int(slot_feasign[1])
else:
feasign = float(slot_feasign[1])
output[self.slot2index[slot]][1].append(feasign)
self.visit[slot] = True
for i in self.visit:
slot = i
if not self.visit[slot]:
if i in self.dense_slots:
output[self.slot2index[i]][1].extend([self.padding] * self.dense_slots_shape[self.slot2index[i]])
else:
output[self.slot2index[i]][1].extend([self.padding])
else:
self.visit[slot] = False
yield output
return reader
...@@ -23,6 +23,7 @@ from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import f ...@@ -23,6 +23,7 @@ from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import f
from paddlerec.core.trainer import Trainer from paddlerec.core.trainer import Trainer
from paddlerec.core.utils import envs from paddlerec.core.utils import envs
from paddlerec.core.utils import dataloader_instance from paddlerec.core.utils import dataloader_instance
from paddlerec.core.reader import SlotReader
class TranspileTrainer(Trainer): class TranspileTrainer(Trainer):
...@@ -50,14 +51,22 @@ class TranspileTrainer(Trainer): ...@@ -50,14 +51,22 @@ class TranspileTrainer(Trainer):
namespace = "evaluate.reader" namespace = "evaluate.reader"
class_name = "EvaluateReader" class_name = "EvaluateReader"
sparse_slots = envs.get_global_env("sparse_slots", None, namespace)
dense_slots = envs.get_global_env("dense_slots", None, namespace)
batch_size = envs.get_global_env("batch_size", None, namespace) batch_size = envs.get_global_env("batch_size", None, namespace)
reader_class = envs.get_global_env("class", None, namespace)
print("batch_size: {}".format(batch_size)) print("batch_size: {}".format(batch_size))
if sparse_slots is None and dense_slots is None:
reader_class = envs.get_global_env("class", None, namespace)
reader = dataloader_instance.dataloader( reader = dataloader_instance.dataloader(
reader_class, state, self._config_yaml) reader_class, state, self._config_yaml)
reader_class = envs.lazy_instance_by_fliename(reader_class, class_name) reader_class = envs.lazy_instance_by_fliename(reader_class, class_name)
reader_ins = reader_class(self._config_yaml) reader_ins = reader_class(self._config_yaml)
else:
reader = dataloader_instance.slotdataloader("", state, self._config_yaml)
reader_ins = SlotReader(self._config_yaml)
if hasattr(reader_ins, 'generate_batch_from_trainfiles'): if hasattr(reader_ins, 'generate_batch_from_trainfiles'):
dataloader.set_sample_list_generator(reader) dataloader.set_sample_list_generator(reader)
else: else:
...@@ -93,13 +102,23 @@ class TranspileTrainer(Trainer): ...@@ -93,13 +102,23 @@ class TranspileTrainer(Trainer):
train_data_path = envs.get_global_env( train_data_path = envs.get_global_env(
"test_data_path", None, namespace) "test_data_path", None, namespace)
sparse_slots = envs.get_global_env("sparse_slots", None, namespace)
dense_slots = envs.get_global_env("dense_slots", None, namespace)
threads = int(envs.get_runtime_environ("train.trainer.threads")) threads = int(envs.get_runtime_environ("train.trainer.threads"))
batch_size = envs.get_global_env("batch_size", None, namespace) batch_size = envs.get_global_env("batch_size", None, namespace)
reader_class = envs.get_global_env("class", None, namespace) reader_class = envs.get_global_env("class", None, namespace)
abs_dir = os.path.dirname(os.path.abspath(__file__)) abs_dir = os.path.dirname(os.path.abspath(__file__))
reader = os.path.join(abs_dir, '../utils', 'dataset_instance.py') reader = os.path.join(abs_dir, '../utils', 'dataset_instance.py')
if sparse_slots is None and dense_slots is None:
pipe_cmd = "python {} {} {} {}".format( pipe_cmd = "python {} {} {} {}".format(
reader, reader_class, state, self._config_yaml) reader, reader_class, state, self._config_yaml)
else:
padding = envs.get_global_env("padding", 0, namespace)
pipe_cmd = "python {} {} {} {} {} {} {} {}".format(
reader, "slot", "slot", self._config_yaml, namespace, \
sparse_slots.replace(" ", "#"), dense_slots.replace(" ", "#"), str(padding))
if train_data_path.startswith("paddlerec::"): if train_data_path.startswith("paddlerec::"):
package_base = envs.get_runtime_environ("PACKAGE_BASE") package_base = envs.get_runtime_environ("PACKAGE_BASE")
...@@ -147,9 +166,6 @@ class TranspileTrainer(Trainer): ...@@ -147,9 +166,6 @@ class TranspileTrainer(Trainer):
if not need_save(epoch_id, save_interval, False): if not need_save(epoch_id, save_interval, False):
return return
# print("save inference model is not supported now.")
# return
feed_varnames = envs.get_global_env( feed_varnames = envs.get_global_env(
"save.inference.feed_varnames", None, namespace) "save.inference.feed_varnames", None, namespace)
fetch_varnames = envs.get_global_env( fetch_varnames = envs.get_global_env(
......
...@@ -18,6 +18,7 @@ import os ...@@ -18,6 +18,7 @@ import os
from paddlerec.core.utils.envs import lazy_instance_by_fliename from paddlerec.core.utils.envs import lazy_instance_by_fliename
from paddlerec.core.utils.envs import get_global_env from paddlerec.core.utils.envs import get_global_env
from paddlerec.core.utils.envs import get_runtime_environ from paddlerec.core.utils.envs import get_runtime_environ
from paddlerec.core.reader import SlotReader
def dataloader(readerclass, train, yaml_file): def dataloader(readerclass, train, yaml_file):
...@@ -62,3 +63,49 @@ def dataloader(readerclass, train, yaml_file): ...@@ -62,3 +63,49 @@ def dataloader(readerclass, train, yaml_file):
if hasattr(reader, 'generate_batch_from_trainfiles'): if hasattr(reader, 'generate_batch_from_trainfiles'):
return gen_batch_reader() return gen_batch_reader()
return gen_reader return gen_reader
def slotdataloader(readerclass, train, yaml_file):
if train == "TRAIN":
reader_name = "SlotReader"
namespace = "train.reader"
data_path = get_global_env("train_data_path", None, namespace)
else:
reader_name = "SlotReader"
namespace = "evaluate.reader"
data_path = get_global_env("test_data_path", None, namespace)
if data_path.startswith("paddlerec::"):
package_base = get_runtime_environ("PACKAGE_BASE")
assert package_base is not None
data_path = os.path.join(package_base, data_path.split("::")[1])
files = [str(data_path) + "/%s" % x for x in os.listdir(data_path)]
sparse = get_global_env("sparse_slots", None, namespace)
dense = get_global_env("dense_slots", None, namespace)
padding = get_global_env("padding", 0, namespace)
reader = SlotReader(yaml_file)
reader.init(sparse, dense, int(padding))
def gen_reader():
for file in files:
with open(file, 'r') as f:
for line in f:
line = line.rstrip('\n')
iter = reader.generate_sample(line)
for parsed_line in iter():
if parsed_line is None:
continue
else:
values = []
for pased in parsed_line:
values.append(pased[1])
yield values
def gen_batch_reader():
return reader.generate_batch_from_trainfiles(files)
if hasattr(reader, 'generate_batch_from_trainfiles'):
return gen_batch_reader()
return gen_reader
...@@ -16,19 +16,33 @@ from __future__ import print_function ...@@ -16,19 +16,33 @@ from __future__ import print_function
import sys import sys
from paddlerec.core.utils.envs import lazy_instance_by_fliename from paddlerec.core.utils.envs import lazy_instance_by_fliename
from paddlerec.core.reader import SlotReader
from paddlerec.core.utils import envs
if len(sys.argv) != 4: if len(sys.argv) < 4:
raise ValueError("reader only accept 3 argument: 1. reader_class 2.train/evaluate 3.yaml_abs_path") raise ValueError("reader only accept 3 argument: 1. reader_class 2.train/evaluate/slotreader 3.yaml_abs_path")
reader_package = sys.argv[1] reader_package = sys.argv[1]
if sys.argv[2] == "TRAIN": if sys.argv[2].upper() == "TRAIN":
reader_name = "TrainReader" reader_name = "TrainReader"
else: elif sys.argv[2].upper() == "EVALUATE":
reader_name = "EvaluateReader" reader_name = "EvaluateReader"
else:
reader_name = "SlotReader"
namespace = sys.argv[4]
sparse_slots = sys.argv[5].replace("#", " ")
dense_slots = sys.argv[6].replace("#", " ")
padding = int(sys.argv[7])
yaml_abs_path = sys.argv[3] yaml_abs_path = sys.argv[3]
reader_class = lazy_instance_by_fliename(reader_package, reader_name)
reader = reader_class(yaml_abs_path) if reader_name != "SlotReader":
reader.init() reader_class = lazy_instance_by_fliename(reader_package, reader_name)
reader.run_from_stdin() reader = reader_class(yaml_abs_path)
reader.init()
reader.run_from_stdin()
else:
reader = SlotReader(yaml_abs_path)
reader.init(sparse_slots, dense_slots, padding)
reader.run_from_stdin()
# PaddleRec 推荐数据集格式
当你的数据集格式为[slot:feasign]*这种模式,或者可以预处理为这种格式时,可以直接使用PaddleRec内置的Reader。
好处是不用自己写Reader了,各个model之间的数据格式也都可以统一成一样的格式。
## 数据格式说明
假如你的原始数据格式为
```bash
<label> <integer feature 1> ... <integer feature 13> <categorical feature 1> ... <categorical feature 26>
```
其中```<label>```表示广告是否被点击,点击用1表示,未点击用0表示。```<integer feature>```代表数值特征(连续特征),共有13个连续特征。
并且每个特征有一个特征值。
```<categorical feature>```代表分类特征(离散特征),共有26个离散特征。相邻两个特征用```\t```分隔。
假设这13个连续特征(dense slot)的name如下:
```
D1 D2 D3 D4 D4 D6 D7 D8 D9 D10 D11 D12 D13
```
这26个离散特征(sparse slot)的name如下:
```
S1 S2 S3 S4 S5 S6 S7 S8 S9 S10 S11 S12 S13 S14 S15 S16 S17 S18 S19 S20 S21 S22 S23 S24 S25 S26
```
那么下面这条样本(1个label + 13个dense值 + 26个feasign)
```
1 0.1 0.4 0.2 0.3 0.5 0.8 0.3 0.2 0.1 0.5 0.6 0.3 0.9 60 16 91 50 52 52 28 69 63 33 87 69 48 59 27 12 95 36 37 41 17 3 86 19 88 60
```
可以转换成:
```
label:1 D1:0.1 D2:0.4 D3:0.2 D4:0.3 D5:0.5 D6:0.8 D7:0.3 D8:0.2 D9:0.1 D10:0.5 D11:0.6 D12:0.3 D13:0.9 S14:60 S15:16 S16:91 S17:50 S18:52 S19:52 S20:28 S21:69 S22:63 S23:33 S24:87 S25:69 S26:48 S27:59 S28:27 S29:12 S30:95 S31:36 S32:37 S33:41 S34:17 S35:3 S36:86 S37:19 S38:88 S39:60
```
注意:上面各个slot:feasign字段之间的顺序没有要求,比如```D1:0.1 D2:0.4```改成```D2:0.4 D1:0.1```也可以。
## 配置
reader中需要配置```sparse_slots```与```dense_slots```,例如
```
workspace: xxxx
reader:
batch_size: 2
train_data_path: "{workspace}/data/train_data"
sparse_slots: "label S1 S2 S3 S4 S5 S6 S7 S8 S9 S10 S11 S12 S13 S14 S15 S16 S17 S18 S19 S20 S21 S22 S23 S24 S25 S26"
dense_slots: "D1:1 D2:1 D3:1 D4:1 D4:1 D6:1 D7:1 D8:1 D9:1 D10:1 D11:1 D12:1 D13:1"
model:
xxxxx
```
sparse_slots表示稀疏特征的列表,以空格分开。
dense_slots表示稠密特征的列表,以空格分开。每个字段的格式是```[dense_slot_name]:[dim1,dim2,dim3...]```,其中```dim1,dim2,dim3...```表示shape
配置好了之后,这些slot对应的variable就可以在model中的如下变量啦:
```
self._sparse_data_var
self._dense_data_var
```
# PaddleRec 自定义数据集及Reader # PaddleRec 自定义数据集及Reader
用户自定义数据集及配置异步Reader,需要关注以下几个步骤: 用户自定义数据集及配置异步Reader,需要关注以下几个步骤:
......
...@@ -71,13 +71,13 @@ python text2paddle.py raw_big_train_data/ raw_big_test_data/ train_big_data test ...@@ -71,13 +71,13 @@ python text2paddle.py raw_big_train_data/ raw_big_test_data/ train_big_data test
### 训练 ### 训练
``` ```
python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification -d cpu -e single python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
``` ```
### 预测 ### 预测
``` ```
python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification -d cpu -e single python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
``` ```
## 效果对比 ## 效果对比
...@@ -88,18 +88,3 @@ python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification ...@@ -88,18 +88,3 @@ python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
| ag news dataset | TagSpace | -- | -- | -- | -- | | ag news dataset | TagSpace | -- | -- | -- | -- |
| -- | Classification | -- | -- | -- | -- | | -- | Classification | -- | -- | -- | -- |
## 分布式
### 模型训练性能 (样本/s)
| 数据集 | 模型 | 单机 | 同步 (4节点) | 同步 (8节点) | 同步 (16节点) | 同步 (32节点) |
| :------------------: | :--------------------: | :---------: |:---------: |:---------: |:---------: |:---------: |
| -- | TagSpace | -- | -- | -- | -- | -- |
| -- | Classification | -- | -- | -- | -- | -- |
----
| 数据集 | 模型 | 单机 | 异步 (4节点) | 异步 (8节点) | 异步 (16节点) | 异步 (32节点) |
| :------------------: | :--------------------: | :---------: |:---------: |:---------: |:---------: |:---------: |
| -- | TagSpace | -- | -- | -- | -- | -- |
| -- | Classification | -- | -- | -- | -- | -- |
...@@ -22,9 +22,10 @@ train: ...@@ -22,9 +22,10 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
class: "{workspace}/criteo_reader.py" train_data_path: "{workspace}/data/slot_train"
train_data_path: "{workspace}/data/train"
feat_dict_name: "{workspace}/data/vocab" feat_dict_name: "{workspace}/data/vocab"
sparse_slots: "label C1 C2 C3 C4 C5 C6 C7 C8 C9 C10 C11 C12 C13 C14 C15 C16 C17 C18 C19 C20 C21 C22 C23 C24 C25 C26"
dense_slots: "I1:1 I2:1 I3:1 I4:1 I5:1 I6:1 I7:1 I8:1 I9:1 I10:1 I11:1 I12:1 I13:1"
model: model:
models: "{workspace}/model.py" models: "{workspace}/model.py"
......
...@@ -11,21 +11,32 @@ ...@@ -11,21 +11,32 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import math
from __future__ import print_function import sys
import yaml
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
import math import math
import os import os
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
from collections import Counter
import os
import paddle.fluid.incubate.data_generator as dg
from paddlerec.core.reader import Reader class TrainReader(dg.MultiSlotDataGenerator):
from paddlerec.core.utils import envs
def __init__(self, config):
dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
class TrainReader(Reader):
def init(self): def init(self):
self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.cont_max_ = [ self.cont_max_ = [
...@@ -48,7 +59,7 @@ class TrainReader(Reader): ...@@ -48,7 +59,7 @@ class TrainReader(Reader):
self.cat_feat_idx_dict_list = [{} for _ in range(26)] self.cat_feat_idx_dict_list = [{} for _ in range(26)]
# TODO: set vocabulary dictionary # TODO: set vocabulary dictionary
vocab_dir = envs.get_global_env("feat_dict_name", None, "train.reader") vocab_dir = "./vocab/"
for i in range(26): for i in range(26):
lookup_idx = 1 # remain 0 for default value lookup_idx = 1 # remain 0 for default value
for line in open( for line in open(
...@@ -87,6 +98,17 @@ class TrainReader(Reader): ...@@ -87,6 +98,17 @@ class TrainReader(Reader):
def data_iter(): def data_iter():
label_feat_list = self._process_line(line) label_feat_list = self._process_line(line)
yield list(zip(self.label_feat_names, label_feat_list)) s = ""
for i in list(zip(self.label_feat_names, label_feat_list)):
k = i[0]
v = i[1]
for j in v:
s += " " + k + ":" + str(j)
print s.strip()
yield None
return data_iter return data_iter
reader = TrainReader("../config.yaml")
reader.init()
reader.run_from_stdin()
python download.py
python preprocess.py
mkdir slot_train
for i in `ls ./train`
do
cat train/$i | python get_slot_data.py > slot_train/$i
done
mkdir slot_test_valid
for i in `ls ./test_valid`
do
cat test_valid/$i | python get_slot_data.py > slot_test_valid/$i
done
...@@ -31,6 +31,11 @@ class Model(ModelBase): ...@@ -31,6 +31,11 @@ class Model(ModelBase):
self.dnn_use_bn = envs.get_global_env("hyper_parameters.dnn_use_bn", None, self._namespace) self.dnn_use_bn = envs.get_global_env("hyper_parameters.dnn_use_bn", None, self._namespace)
self.clip_by_norm = envs.get_global_env("hyper_parameters.clip_by_norm", None, self._namespace) self.clip_by_norm = envs.get_global_env("hyper_parameters.clip_by_norm", None, self._namespace)
cat_feat_num = envs.get_global_env("hyper_parameters.cat_feat_num", None, self._namespace) cat_feat_num = envs.get_global_env("hyper_parameters.cat_feat_num", None, self._namespace)
self.sparse_inputs = self._sparse_data_var[1:]
self.dense_inputs = self._dense_data_var
self.target_input = self._sparse_data_var[0]
cat_feat_dims_dict = OrderedDict() cat_feat_dims_dict = OrderedDict()
for line in open(cat_feat_num): for line in open(cat_feat_num):
spls = line.strip().split() spls = line.strip().split()
...@@ -40,8 +45,8 @@ class Model(ModelBase): ...@@ -40,8 +45,8 @@ class Model(ModelBase):
) )
self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse", None, self._namespace) self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse", None, self._namespace)
self.dense_feat_names = ['I' + str(i) for i in range(1, 14)] self.dense_feat_names = [i.name for i in self.dense_inputs]
self.sparse_feat_names = ['C' + str(i) for i in range(1, 27)] self.sparse_feat_names = [i.name for i in self.sparse_inputs]
# {feat_name: dims} # {feat_name: dims}
self.feat_dims_dict = OrderedDict( self.feat_dims_dict = OrderedDict(
...@@ -51,21 +56,17 @@ class Model(ModelBase): ...@@ -51,21 +56,17 @@ class Model(ModelBase):
self.net_input = None self.net_input = None
self.loss = None self.loss = None
def _create_embedding_input(self, data_dict): def _create_embedding_input(self):
# sparse embedding # sparse embedding
sparse_emb_dict = OrderedDict((name, fluid.embedding( sparse_emb_dict = OrderedDict()
input=fluid.layers.cast( for var in self.sparse_inputs:
data_dict[name], dtype='int64'), sparse_emb_dict[var.name] = fluid.embedding(input=var,
size=[ size=[self.feat_dims_dict[var.name] + 1,
self.feat_dims_dict[name] + 1, 6 * int(pow(self.feat_dims_dict[var.name], 0.25))
6 * int(pow(self.feat_dims_dict[name], 0.25)) ],is_sparse=self.is_sparse)
],
is_sparse=self.is_sparse)) for name in self.sparse_feat_names)
# combine dense and sparse_emb # combine dense and sparse_emb
dense_input_list = [ dense_input_list = self.dense_inputs
data_dict[name] for name in data_dict if name.startswith('I')
]
sparse_emb_list = list(sparse_emb_dict.values()) sparse_emb_list = list(sparse_emb_dict.values())
sparse_input = fluid.layers.concat(sparse_emb_list, axis=-1) sparse_input = fluid.layers.concat(sparse_emb_list, axis=-1)
...@@ -111,15 +112,10 @@ class Model(ModelBase): ...@@ -111,15 +112,10 @@ class Model(ModelBase):
return fluid.layers.reduce_sum(fluid.layers.square(w)) return fluid.layers.reduce_sum(fluid.layers.square(w))
def train_net(self): def train_net(self):
self.model._init_slots()
self.init_network() self.init_network()
self.target_input = fluid.data(
name='label', shape=[None, 1], dtype='float32')
data_dict = OrderedDict()
for feat_name in self.feat_dims_dict:
data_dict[feat_name] = fluid.data(
name=feat_name, shape=[None, 1], dtype='float32')
self.net_input = self._create_embedding_input(data_dict) self.net_input = self._create_embedding_input()
deep_out = self._deep_net(self.net_input, self.dnn_hidden_units, self.dnn_use_bn, False) deep_out = self._deep_net(self.net_input, self.dnn_hidden_units, self.dnn_use_bn, False)
...@@ -130,9 +126,6 @@ class Model(ModelBase): ...@@ -130,9 +126,6 @@ class Model(ModelBase):
logit = fluid.layers.fc(last_out, 1) logit = fluid.layers.fc(last_out, 1)
self.prob = fluid.layers.sigmoid(logit) self.prob = fluid.layers.sigmoid(logit)
self._data_var = [self.target_input] + [
data_dict[dense_name] for dense_name in self.dense_feat_names
] + [data_dict[sparse_name] for sparse_name in self.sparse_feat_names]
# auc # auc
prob_2d = fluid.layers.concat([1 - self.prob, self.prob], 1) prob_2d = fluid.layers.concat([1 - self.prob, self.prob], 1)
...@@ -143,7 +136,7 @@ class Model(ModelBase): ...@@ -143,7 +136,7 @@ class Model(ModelBase):
self._metrics["BATCH_AUC"] = batch_auc_var self._metrics["BATCH_AUC"] = batch_auc_var
# logloss # logloss
logloss = fluid.layers.log_loss(self.prob, self.target_input) logloss = fluid.layers.log_loss(self.prob, fluid.layers.cast(self.target_input, dtype='float32'))
self.avg_logloss = fluid.layers.reduce_mean(logloss) self.avg_logloss = fluid.layers.reduce_mean(logloss)
# reg_coeff * l2_reg_cross # reg_coeff * l2_reg_cross
...@@ -157,4 +150,5 @@ class Model(ModelBase): ...@@ -157,4 +150,5 @@ class Model(ModelBase):
return optimizer return optimizer
def infer_net(self, parameter_list): def infer_net(self, parameter_list):
self.model._init_slots()
self.deepfm_net() self.deepfm_net()
...@@ -22,9 +22,10 @@ train: ...@@ -22,9 +22,10 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
class: "{workspace}/criteo_reader.py" train_data_path: "{workspace}/data/slot_train_data"
train_data_path: "{workspace}/data/train_data" feat_dict_name: "{workspace}/data/feat_dict_10.pkl2"
feat_dict_name: "{workspace}/data/aid_data/feat_dict_10.pkl2" sparse_slots: "label feat_idx"
dense_slots: "feat_value:39"
model: model:
models: "{workspace}/model.py" models: "{workspace}/model.py"
......
...@@ -12,18 +12,24 @@ ...@@ -12,18 +12,24 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import print_function import yaml
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
class TrainReader(dg.MultiSlotDataGenerator):
from paddlerec.core.reader import Reader def __init__(self, config):
from paddlerec.core.utils import envs dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
class TrainReader(Reader):
def init(self): def init(self):
self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.cont_max_ = [ self.cont_max_ = [
...@@ -37,7 +43,7 @@ class TrainReader(Reader): ...@@ -37,7 +43,7 @@ class TrainReader(Reader):
self.continuous_range_ = range(1, 14) self.continuous_range_ = range(1, 14)
self.categorical_range_ = range(14, 40) self.categorical_range_ = range(14, 40)
# load preprocessed feature dict # load preprocessed feature dict
self.feat_dict_name = envs.get_global_env("feat_dict_name", None, "train.reader") self.feat_dict_name = "aid_data/feat_dict_10.pkl2"
self.feat_dict_ = pickle.load(open(self.feat_dict_name, 'rb')) self.feat_dict_ = pickle.load(open(self.feat_dict_name, 'rb'))
def _process_line(self, line): def _process_line(self, line):
...@@ -70,6 +76,16 @@ class TrainReader(Reader): ...@@ -70,6 +76,16 @@ class TrainReader(Reader):
def data_iter(): def data_iter():
feat_idx, feat_value, label = self._process_line(line) feat_idx, feat_value, label = self._process_line(line)
yield [('feat_idx', feat_idx), ('feat_value', feat_value), ('label', label)] s = ""
for i in [('feat_idx', feat_idx), ('feat_value', feat_value), ('label', label)]:
k = i[0]
v = i[1]
for j in v:
s += " " + k + ":" + str(j)
print s.strip()
yield None
return data_iter return data_iter
reader = TrainReader("../config.yaml")
reader.init()
reader.run_from_stdin()
python download_preprocess.py
mkdir slot_train_data
for i in `ls ./train_data`
do
cat train_data/$i | python get_slot_data.py > slot_train_data/$i
done
mkdir slot_test_data
for i in `ls ./test_data`
do
cat test_data/$i | python get_slot_data.py > slot_test_data/$i
done
...@@ -33,23 +33,13 @@ class Model(ModelBase): ...@@ -33,23 +33,13 @@ class Model(ModelBase):
# ------------------------- network input -------------------------- # ------------------------- network input --------------------------
num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace) num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace)
raw_feat_idx = fluid.data(name='feat_idx', shape=[None, num_field],
dtype='int64') # None * num_field(defalut:39)
raw_feat_value = fluid.data(name='feat_value', shape=[None, num_field], dtype='float32') # None * num_field
self.label = fluid.data(name='label', shape=[None, 1], dtype='float32') # None * 1
feat_idx = fluid.layers.reshape(raw_feat_idx, [-1, 1]) # (None * num_field) * 1
feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1
# ------------------------- set _data_var --------------------------
self._data_var.append(raw_feat_idx) raw_feat_idx = self._sparse_data_var[1]
self._data_var.append(raw_feat_value) raw_feat_value = self._dense_data_var[0]
self._data_var.append(self.label) self.label = self._sparse_data_var[0]
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
# ------------------------- first order term -------------------------- feat_idx = raw_feat_idx
feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1
reg = envs.get_global_env("hyper_parameters.reg", 1e-4, self._namespace) reg = envs.get_global_env("hyper_parameters.reg", 1e-4, self._namespace)
first_weights_re = fluid.embedding( first_weights_re = fluid.embedding(
...@@ -134,11 +124,12 @@ class Model(ModelBase): ...@@ -134,11 +124,12 @@ class Model(ModelBase):
self.predict = fluid.layers.sigmoid(y_first_order + y_second_order + y_dnn) self.predict = fluid.layers.sigmoid(y_first_order + y_second_order + y_dnn)
def train_net(self): def train_net(self):
self.model._init_slots()
self.deepfm_net() self.deepfm_net()
# ------------------------- Cost(logloss) -------------------------- # ------------------------- Cost(logloss) --------------------------
cost = fluid.layers.log_loss(input=self.predict, label=self.label) cost = fluid.layers.log_loss(input=self.predict, label=fluid.layers.cast(self.label, "float32"))
avg_cost = fluid.layers.reduce_sum(cost) avg_cost = fluid.layers.reduce_sum(cost)
self._cost = avg_cost self._cost = avg_cost
...@@ -159,4 +150,5 @@ class Model(ModelBase): ...@@ -159,4 +150,5 @@ class Model(ModelBase):
return optimizer return optimizer
def infer_net(self, parameter_list): def infer_net(self, parameter_list):
self.model._init_slots()
self.deepfm_net() self.deepfm_net()
...@@ -23,9 +23,10 @@ train: ...@@ -23,9 +23,10 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
class: "{workspace}/../criteo_reader.py" train_data_path: "{workspace}/data/slot_train_data"
train_data_path: "{workspace}/data/train"
reader_debug_mode: False reader_debug_mode: False
sparse_slots: "click 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26"
dense_slots: "dense_var:13"
model: model:
models: "{workspace}/model.py" models: "{workspace}/model.py"
......
wget --no-check-certificate https://fleet.bj.bcebos.com/ctr_data.tar.gz
tar -zxvf ctr_data.tar.gz
mv ./raw_data ./train_data_full
mkdir train_data && cd train_data
cp ../train_data_full/part-0 ../train_data_full/part-1 ./ && cd ..
mv ./test_data ./test_data_full
mkdir test_data && cd test_data
cp ../test_data_full/part-220 ./ && cd ..
echo "Complete data download."
echo "Full Train data stored in ./train_data_full "
echo "Full Test data stored in ./test_data_full "
echo "Rapid Verification train data stored in ./train_data "
echo "Rapid Verification test data stored in ./test_data "
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle.fluid.incubate.data_generator as dg
cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
cont_max_ = [20, 600, 100, 50, 64000, 500, 100, 50, 500, 10, 10, 10, 50]
cont_diff_ = [20, 603, 100, 50, 64000, 500, 100, 50, 500, 10, 10, 10, 50]
hash_dim_ = 1000001
continuous_range_ = range(1, 14)
categorical_range_ = range(14, 40)
class CriteoDataset(dg.MultiSlotDataGenerator):
"""
DacDataset: inheritance MultiSlotDataGeneratior, Implement data reading
Help document: http://wiki.baidu.com/pages/viewpage.action?pageId=728820675
"""
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def reader():
"""
This function needs to be implemented by the user, based on data format
"""
features = line.rstrip('\n').split('\t')
dense_feature = []
sparse_feature = []
for idx in continuous_range_:
if features[idx] == "":
dense_feature.append(0.0)
else:
dense_feature.append(
(float(features[idx]) - cont_min_[idx - 1]) /
cont_diff_[idx - 1])
for idx in categorical_range_:
sparse_feature.append(
[hash(str(idx) + features[idx]) % hash_dim_])
label = [int(features[0])]
process_line = dense_feature, sparse_feature, label
feature_name = ["dense_feature"]
for idx in categorical_range_:
feature_name.append("C" + str(idx - 13))
feature_name.append("label")
s = "click:" + str(label[0])
for i in dense_feature:
s += " dense_feature:" + str(i)
for i in range(1, 1 + len(categorical_range_)):
s += " " + str(i) + ":" + str(sparse_feature[i-1][0])
print s.strip()
yield None
return reader
d = CriteoDataset()
d.run_from_stdin()
sh download.sh
mkdir slot_train_data_full
for i in `ls ./train_data_full`
do
cat train_data_full/$i | python get_slot_data.py > slot_train_data_full/$i
done
mkdir slot_test_data_full
for i in `ls ./test_data_full`
do
cat test_data_full/$i | python get_slot_data.py > slot_test_data_full/$i
done
mkdir slot_train_data
for i in `ls ./train_data`
do
cat train_data/$i | python get_slot_data.py > slot_train_data/$i
done
mkdir slot_test_data
for i in `ls ./test_data`
do
cat test_data/$i | python get_slot_data.py > slot_test_data/$i
done
0 1 1 26 30 0 4 2 37 152 1 2 2 05db9164 38d50e09 ed5e4936 612ccfd4 25c83c98 38eb9cf4 1f89b562 a73ee510 2462946f 7f8ffe57 1d5d5b6e 46f42a63 b28479f6 7501d6be 6083e1d5 07c540c4 f855e3f0 21ddcdc9 5840adea 782e846e 32c7478e b2f178a3 001f3601 c4304c4b
0 20 3 4 40479 444 0 1 157 0 4 68fd1e64 09e68b86 aa8c1539 85dd697c 25c83c98 fe6b92e5 e56a4862 5b392875 a73ee510 3b08e48b 5e183c58 d8c29807 1eb0f8f0 8ceecbc8 d2f03b75 c64d548f 07c540c4 63cdbb21 cf99e5de 5840adea 5f957280 55dd3565 1793a828 e8b83407 b7d9c3bc
0 6 70 1 22 312 25 52 44 144 1 3 1 22 05db9164 04e09220 b1ecc6c4 5dff9b29 4cf72387 7e0ccccf d5f62b87 1f89b562 a73ee510 ce92c282 434d6c13 2436ff75 7301027a 07d13a8f f6b23a53 f4ead43c 3486227d 6fc84bfb 4f1aa25f c9d4222a 55dd3565 ded4aac9
0 0 0 110 7 3251 44 1 32 39 0 1 31 05db9164 80e26c9b ba1947d0 85dd697c 25c83c98 85f287b3 0b153874 a73ee510 89270478 7c53dc69 34a238e0 4fd35e8f 1adce6ef 0f942372 da441c7e d4bb7bd8 005c6740 21ddcdc9 5840adea 8717ea07 423fab69 1793a828 e8b83407 9904c656
0 0 29 19490 0 68fd1e64 287130e0 ba4559ea 33a72095 25c83c98 fbad5c96 ffdbd799 5b392875 a73ee510 60badee3 c72ca7a4 ebfb225c b9be5035 cfef1c29 655fad18 a9dcda12 d4bb7bd8 891589e7 419b4cef 5840adea 76ef8858 32c7478e 135c8b41 ea9a246c e3a60438
0 2 2 20 43197 0 26 23 0 25 05db9164 9b6b8959 9c1c85e7 fd4d6dc3 25c83c98 7e0ccccf d2d741ca 0b153874 a73ee510 4e2d1b78 ea4adb47 cc239583 05781932 64c94865 de781d57 efd92064 e5ba7672 cac48684 4b0ac19f c9d4222a 3a171ecb 22dd4e42
1 5 12 871 0 27 1 21 1 4 0 05db9164 e112a9de 29bb7bea d3e15e1a 25c83c98 7e0ccccf fd3483f3 0b153874 a73ee510 880e2781 9d7e66c3 bd5829ab df957573 07d13a8f 290e3042 390b7737 8efede7f 808e7bc3 af16dda0 ad3062eb 423fab69 a0ab2ce0
0 6 263 41 53 0 44 42 0 42 05db9164 71ca0a25 ad876a43 0481f0ba 4cf72387 7e0ccccf bb0f47fb 5b392875 a73ee510 3b08e48b da3f45ff fde18531 a9fda8f5 07d13a8f a8e0f0c6 06f4ae56 776ce399 9bf8ffef 21ddcdc9 5840adea f5f07930 be7c41b4 62aa24c6 001f3601 1d5d3a57
0 14 4301 48 2 3 51 2 68fd1e64 95e2d337 95c48c52 30b862e7 25c83c98 7e0ccccf b06857f8 0b153874 a73ee510 8228dde1 e4eb05d4 f0d5cc59 a4c5d6dd 1adce6ef 559cd202 e9194f3c 07c540c4 7b06fafe 21ddcdc9 a458ea53 cb105f80 423fab69 16bb3de8 2bf691b1 d3b2f8c3
0 58 42 39 100 0 40 40 0 40 05db9164 207b2d81 25c37040 e8b2aee5 25c83c98 fe6b92e5 6e6e841b 1f89b562 a73ee510 3b08e48b dcc0e16b a04dc78a b093e98d b28479f6 c6438ddb 31da84fc 776ce399 fa0643ee 21ddcdc9 b1252a9d 931d653d c9d4222a be7c41b4 46f5e7df 001f3601 0e25d9c4
0 0 2 3 46065 0 5 9 0 0 3 68fd1e64 b961056b 05eefcc3 65e58ae6 25c83c98 fbad5c96 68fbb662 0b153874 7cc72ec2 4aead435 922bbb91 10239ea6 ad61640d 1adce6ef 8187184a 551eb463 e5ba7672 5a6878f5 00018438 32c7478e 71292dbb
1 1 0 224 0 4 0 3 4 27 1 2 0 05db9164 09e68b86 aa8c1539 85dd697c 25c83c98 7e0ccccf a4a8fd5a 0b153874 a73ee510 43a9b300 d13e1160 d8c29807 45820f61 b28479f6 2d49999f c64d548f e5ba7672 63cdbb21 cf99e5de 5840adea 5f957280 bcdee96c 1793a828 e8b83407 b7d9c3bc
1 10 310 6 5 3 75 4 702 2 21 3 68fd1e64 3f0d3f28 4cf72387 7e0ccccf a097ff18 062b5529 a73ee510 ae07e31d 3407cf7b f0fe287d 1adce6ef 14108df6 27c07bd6 88416823 ad3062eb 3a171ecb
0 0 0 19 2898 145 4 20 370 0 2 43 05db9164 38a947a1 0797f900 1da94763 25c83c98 fbad5c96 ba0ca6c5 64523cfa a73ee510 56ae5fb0 7ca01a9d c8ea9acc 97d749c9 1adce6ef a3dc522e d1079e54 e5ba7672 492bb129 828187a0 32c7478e 171ccf3e
0 16 4 2 46248 0 2 49 0 2 05db9164 942f9a8d feafff7d d7b693da 25c83c98 7e0ccccf d9aa9d97 5b392875 7cc72ec2 3b08e48b c4adf918 4ebd8ffe 85dbe138 b28479f6 ac182643 48292aa0 776ce399 1f868fdd 21ddcdc9 b1252a9d be7cac53 32c7478e e3edc57b 9d93af03 7dfad416
1 66 136 11 12 15 12 963 26 258 3 73 0 12 05db9164 89ddfee8 c314b537 e88cbfb4 4cf72387 7e0ccccf 1c86e0eb 0b153874 a73ee510 e9c971a2 755e4a50 bc8b54c7 5978055e b28479f6 25753fb1 fadc3903 e5ba7672 5bb2ec8e 5b1d6ed9 b1252a9d 8a903c79 32c7478e 7cb5b4d7 e8b83407 ec01bf7b
0 1 34 29 2 10 2 1 2 2 1 1 2 05db9164 b80912da 7b467545 d0cbe447 0942e0a7 fbad5c96 fc8f52a9 0b153874 a73ee510 3b08e48b ad39ba86 dd94da95 751c7a99 b28479f6 79fcb5cb 169d489d e5ba7672 7119e567 3014a4b1 5840adea 23fcd679 3a171ecb de1e9c76 e8b83407 ce0bf6fc
1 1 1 22 1 138 7 16 22 114 1 8 0 7 7e5c2ff4 287130e0 67fa93b5 1fa34039 43b19349 13718bbd f828f7fb 0b153874 a73ee510 b883655e ab066900 2eb927aa 5d4198ed 07d13a8f 10040656 6f930046 e5ba7672 891589e7 21ddcdc9 5840adea fce0d6a4 3a171ecb 1793a828 e8b83407 63093459
0 2 8 4 4 2 4 8 6 55 2 5 0 4 05db9164 b80912da 02391f51 b9c629a9 b0530c50 7e0ccccf fd10f30e 0b153874 a73ee510 bfc44ba9 e3ee9d2e 2397259a 0d60a93e 07d13a8f ee76936d d37efe8c e5ba7672 30f25b5e 21ddcdc9 5840adea b6119319 423fab69 45ab94c8 ce62e669 b13f4ade
1 15 2 88 27 4 1 21 49 124 1 3 1 5a9ed9b0 4f25e98b aee80afd ae78390d 25c83c98 fbad5c96 f00bddf8 6c41e35e a73ee510 16a81a6c 55795b33 12d1b214 39795005 1adce6ef fb2772ea 121f992b e5ba7672 bc5a0ff7 dfc341f8 a458ea53 b4847d32 32c7478e e7bc1058 001f3601 6b208992
1 0 8 14551 26 2 0 22 2 0 87552397 80e26c9b 431913c5 85dd697c 25c83c98 fbad5c96 b46e01f1 0b153874 a73ee510 39cda501 7c53dc69 5798519c 4fd35e8f 07d13a8f e8f4b767 2d0bbe92 3486227d 005c6740 21ddcdc9 5840adea 91404954 3a171ecb 1793a828 e8b83407 b9809574
0 0 12 9 4430 21 2 11 11 1 9 05db9164 333137d9 22fbf56a b92573a3 25c83c98 fe6b92e5 ad9b2639 0b153874 a73ee510 9c4dd39e e4034ebf 878d3428 ea089f5d b28479f6 a46bf7c6 7401a802 07c540c4 c61e82d7 21ddcdc9 a458ea53 634363f7 c9d4222a 32c7478e a2752662 445bbe3b fc1f43e7
0 21 904 7 30 79 39 87 20 251 2 8 0 39 05db9164 f0cf0024 20009f96 73fec7fb 4cf72387 fbad5c96 a98972ab 0b153874 a73ee510 06363d2d a523f48a 57c08194 5cc21877 b28479f6 fdb1071f 054b386f 3486227d cc693e93 21ddcdc9 b1252a9d 0dd41d11 c9d4222a 32c7478e f9f7eb22 f0f449dd a3a8e8f4
1 1 1 9 35 5 6 17 10 912 1 9 6 05db9164 09e68b86 21f56260 7cc584ad 89ff5705 fbad5c96 69b885a7 5b392875 a73ee510 b6900243 208d9dd6 252752f5 59dd51b4 07d13a8f 36721ddc e20cfabe e5ba7672 5aed7436 db0b20dc b1252a9d 3572f92c 423fab69 869261fd f0f449dd fb52e815
0 0 3 47 10 1494 153 6 11 269 0 4 10 5a9ed9b0 39dfaa0d 86d9f7e6 77b5e5ed b2241560 7e0ccccf afa309bd 0b153874 a73ee510 c54560e0 77212bd7 04d776a9 7203f04e 07d13a8f 60fa10e5 465ae0d6 e5ba7672 df4fffb7 21ddcdc9 5840adea 8b9756be c9d4222a c7dc6720 c88bdcee 010f6491 4e7af834
0 1 0 44 24 4 24 6 43 232 1 4 24 05db9164 c44e8a72 93655629 1b9f91ce 25c83c98 fbad5c96 a25cceac 67b76963 a73ee510 0b16773a 5bee5497 f0f6a9c1 a57cffd3 1adce6ef d6c04afa 6dc8c52c e5ba7672 456d734d 05e4794e a458ea53 dc1b605a bcdee96c 79fc7b8a 724b04da 0cc1543a
1 18 0 37 20 28 20 18 19 20 1 1 0 20 05db9164 ad61f1c8 b64ac9a3 1df4d824 25c83c98 7e0ccccf ac2d4799 0b153874 a73ee510 da500e68 434d6c13 71d55d49 7301027a b28479f6 3403e98c ed6d847a e5ba7672 84eb7a34 1d0aeb7a ad3062eb c7dc6720 786a0db5
1 4 88 4 20 14 27 357 31 874 2 41 13 05db9164 0eb070fa e75647d9 50912373 43b19349 7e0ccccf 1c86e0eb 0b153874 a73ee510 e7ba2569 755e4a50 a2337f7c 5978055e 07d13a8f 733cd612 2873175e e5ba7672 7ba9340b b4c77ec9 32c7478e 55cf97a5
0 0 0 20 9 7441 13 4 9 12 0 1 9 05db9164 46320fff de0cea78 66d81227 25c83c98 604312d3 0b153874 a73ee510 3b08e48b 0f6f1a80 51f94b83 9077501d 07d13a8f 4b572351 3ec13e49 e5ba7672 d981a095 21ddcdc9 5840adea b1bb8218 32c7478e 4f272e57 c9f3bea7 25ae1dcc
0 1 1 7 2 331 62 2 5 72 1 2 0 2 05db9164 8947f767 59f8a22b 16e92bee 25c83c98 7e0ccccf b471ac4f 1f89b562 a73ee510 4e56c58e e1ba038b 92352c1e e65a5fc3 07d13a8f 2c14c412 57ac7fda e5ba7672 bd17c3da 4b367914 b1252a9d e68624bc 3a171ecb c77fdeda 010f6491 0a798839
1 25 16 11 11545 56 1 20 51 1 11 05db9164 8f5b4275 b009d929 c7043c4b 5a3e1872 fbad5c96 e76a087f 0b153874 a73ee510 3b08e48b 50349a3f 3563ab62 370eceb9 1adce6ef a6bf53df b688c8cc d4bb7bd8 65c9624a 21ddcdc9 5840adea 2754aaf1 93bad2c0 3b183c5c e8b83407 adb5d234
0 1 20 3 6 1 2 2 8 8 1 2 2 5a9ed9b0 e5fb1af3 77f9d96e bc87885b 25c83c98 3bf701e7 6772d022 0b153874 a73ee510 9f7517e0 e0c3cae0 4ce8091c e8df3343 1adce6ef 60403b20 8fb0be40 07c540c4 13145934 21ddcdc9 b1252a9d c3f827f4 423fab69 f0f123e9 c243e98b 63ef9236
0 -1 8 5 11535 32 0 7 13 0 5 5a9ed9b0 f8c8e8f8 74e1a23a 9a6888fb 25c83c98 fe6b92e5 93955fc0 1f89b562 a73ee510 7dab1649 5215184e fb8fab62 b8ae7766 07d13a8f d4696a42 c6b1e1b2 07c540c4 d2f0bce2 21ddcdc9 5840adea 99c09e97 3a171ecb 335a6a1e f55c04b6 68a2a837
0 0 1 1 1 3755 124 6 8 198 0 3 1 5a9ed9b0 9819deea 533b1a61 f922efad 25c83c98 fe6b92e5 a4bbd4f4 0b153874 a73ee510 3b76bfa9 8d5ad79c b99ddbc8 4809d853 b28479f6 1150f5ed 87acb535 e5ba7672 7e32f7a4 a4b7004c 93bad2c0 b34f3128
0 0 15 7 1 2652 57 5 40 55 0 1 1 8cf07265 8947f767 37722a24 8802788f 25c83c98 fda1a50f 0b153874 a73ee510 3b08e48b d2b7c44b e3caf087 68637c0d 64c94865 d120f347 42bc62e3 e5ba7672 bd17c3da 21ddcdc9 a458ea53 1891824e 32c7478e b7bf6986 010f6491 a6115607
0 5 176 1 1 627 61 109 17 118 2 11 1 05db9164 38a947a1 1646cf1d fcdc5174 25c83c98 fe6b92e5 6fa3c1a7 1f89b562 a73ee510 5f50c86b b8deab54 c30bbcd1 efbb2435 07d13a8f 927edf61 ffb61047 e5ba7672 e73433e0 122d6055 423fab69 d8e17d82
1 108 20 403 0 1 0 109 0 7 1 2 0 05db9164 942f9a8d 871b4299 25dd4760 4cf72387 7e0ccccf d70c05b1 7b6fecd5 a73ee510 7edea927 c4adf918 2f1be242 85dbe138 1adce6ef ae97ecc3 c637ec94 e5ba7672 1f868fdd 2e30f394 a458ea53 140ec002 ad3062eb bcdee96c b50e18f9 001f3601 f99af3bd
0 15 0 14 10 609 35 29 12 419 1 3 3 10 05db9164 09e68b86 c86b9e6a e4fd0a5b 25c83c98 7e0ccccf a90a99c5 0b153874 a73ee510 e6003298 e9561d8b 906b3727 1cc9ac51 b28479f6 6f73304a a10da4c7 8efede7f 479030a6 7a1c9aad 5840adea c06c3736 32c7478e 41be4766 e8b83407 d8a062c4
0 8 0 10 12 46 12 8 10 12 1 1 12 05db9164 b7ca2abd ee96fc95 68ad052c 25c83c98 7e0ccccf 968a6688 5b392875 a73ee510 e851ff7b f25fe7e9 ce875433 dd183b4c 64c94865 5f2d5a3a 5f92b84a e5ba7672 4771e483 95b757a6 3a171ecb 41be4766
0 0 5 6 2 3021 151 6 10 18 0 1 2 be589b51 207b2d81 d0484442 68637816 25c83c98 7e0ccccf 12c61956 45f7c2dd a73ee510 29e50671 94d2aad8 3b9ae062 f23a3825 07d13a8f 0c67c4ca 3a1a0a65 07c540c4 395856b0 21ddcdc9 a458ea53 1720a38e 32c7478e 4de83b96 001f3601 8f16a3b8
0 4 7954 19 2 6 17 1 68fd1e64 78ccd99e 0a1435c1 bdcfffba 25c83c98 7e0ccccf c4939891 0b153874 a73ee510 fbbf2c95 7d4bba07 5a276398 2fad1153 8ceecbc8 d5adea3d 4da40ea2 07c540c4 e7e991cb 21ddcdc9 5840adea 290c14f6 3a171ecb ded4aac9 2bf691b1 bdf46dce
1 7 89 14 3 2 2 47 31 341 2 10 0 2 05db9164 421b43cd ced9477f 29998ed1 25c83c98 7e0ccccf 6bf83cdb 0b153874 a73ee510 89ff09ee 60adb56e 6aaba33c 53b60829 b28479f6 2d0bb053 b041b04a e5ba7672 2804effd 723b4dfd dbb486d7 b34f3128
1 -1 27180 12 2 0 5 1 05db9164 46b01795 4cf72387 1dcabd2a 0b153874 a73ee510 1d56e466 9cf09d42 f66b043c 1adce6ef c830dc5e 07c540c4 e3a5430f 32c7478e
0 1 1 39 15 119 18 1 18 15 1 1 15 05db9164 4f25e98b 01fefe29 e86b1560 25c83c98 7e0ccccf 0038e65c 0b153874 a73ee510 3b08e48b 7e728ed1 4676ac97 1ddad6aa 1adce6ef 17d9b759 3581aa7f d4bb7bd8 7ef5affa 9437f62f b1252a9d 745c79e6 bcdee96c 3fdb382b 001f3601 49d68486
0 0 2 5 1284 0 23 24 0 5 05db9164 8084ee93 02cf9876 c18be181 0942e0a7 7e0ccccf 0b72a0e8 5b392875 a73ee510 3b08e48b 4950c85b 8fe001f4 1d27b635 b28479f6 16d2748c 36103458 776ce399 003d4f4f e587c466 bcdee96c 3b183c5c
1 0 74 36 4 36375 8 0 4 68fd1e64 0468d672 08266a1d a3fc4871 4cf72387 7e0ccccf 5fd3419b 37e4aa92 a73ee510 972359d0 f69fd509 692521c3 c7176043 b28479f6 234191d3 dc3c41ba d4bb7bd8 9880032b 21ddcdc9 5840adea 10738086 3a171ecb e43a3efc ea9a246c 4e7af834
1 4 5 8 35 1398 64 19 9 703 1 4 59 05db9164 2a69d406 30b6e3ea 13508380 4cf72387 7e0ccccf 579c293b 0b153874 a73ee510 b38bac58 f66047e5 4551eab3 13c89cc4 07d13a8f 3b2d8705 48f5ae81 e5ba7672 642f2610 55dd3565 b1252a9d de95351a c9d4222a 423fab69 45ab94c8 2bf691b1 c84c4aec
0 7 48 41035 3 05db9164 6e638bbc 49a1cd79 cca79e1e 25c83c98 fe6b92e5 8f4478fe 0b153874 a73ee510 8ba6af1c 1cd8b8ae 0acdf55c 86b6351d b28479f6 c11477f0 f541ee61 d4bb7bd8 f6a2fc70 21ddcdc9 b1252a9d 1afb7d8e bcdee96c 75cfed80 445bbe3b e2f05ce0
1 -1 14752 0 2 4 0 5bfa8ab5 38a947a1 e710f9eb ae6e2a08 25c83c98 fe6b92e5 56f361f1 0b153874 a73ee510 3b08e48b 6d91e005 d0649cfd 34098dd6 b28479f6 7160a164 6ffcab68 776ce399 82103027 9487db01 be7c41b4 f57138a8
0 210 6 2 9072 0 2 12 0 2 05db9164 a07503cc 5d260103 13508380 25c83c98 987da766 0b153874 a73ee510 a9271c40 f37be5c0 519590f0 a59ea816 07d13a8f 77660bba 884b33b5 e5ba7672 912c7e21 1d1eb838 b1252a9d 353846c9 c7dc6720 45ab94c8 445bbe3b c84c4aec
0 3 45 6 7 18 6 52 7 177 1 9 0 6 f5796c5b 80e26c9b 6e5bddab d3e92866 25c83c98 7e0ccccf 24e8ca9f 0b153874 a73ee510 5fd7dd92 94a1f0fa bf413137 153f0382 07d13a8f f3635baf af6fc4b8 3486227d f54016b9 21ddcdc9 5840adea a3405885 423fab69 b0fb6a50 e8b83407 61556511
0 0 38 2 3 11664 0 6 3 0 0 0 3 68fd1e64 2c16a946 849cf586 b180f466 25c83c98 7e0ccccf 5547e1f4 0b153874 a73ee510 5db9788f 087dfcfd 48fc0800 5317f239 07d13a8f 18231224 9fbd58f8 e5ba7672 74ef3502 51c0191c 3a171ecb 9117a34a
0 11 6 18 1632 0 19 21 0 19 5a9ed9b0 58e67aaf 381d8ea3 76bbce8c 25c83c98 7e0ccccf 9b7f373a 7b6fecd5 a73ee510 597e2a48 ec2b795a 732c8db2 a5975b1d 07d13a8f 10935a85 03f89a73 1e88c74f c21c3e4c 21ddcdc9 a458ea53 d83181ad c7dc6720 3fdb382b b9266ff0 25bf05c2
0 180 35 1 31780 0 1 1 0 1 8cf07265 421b43cd bc27bcef 29998ed1 f281d2a7 fbad5c96 1d94dd40 0b153874 a73ee510 efea433b ccfdca2f 6aaba33c d76cea6e b28479f6 e1ac77f7 b041b04a d4bb7bd8 2804effd 723b4dfd 32c7478e b34f3128
1 2 4 0 4 0 12 0 49 1 3 0 68fd1e64 38a947a1 cc9e717b 9ca2c15d 25c83c98 d5141a06 5b392875 a73ee510 af94b16c f2a5d7d2 37dfef2b a3b89afc b28479f6 a5118040 1cb7075e e5ba7672 b6b880ec 42dbeba8 32c7478e 88422d4d
1 -1 6223 2 22 0 20 3 68fd1e64 38a947a1 6847b3c1 6cd6e51f 25c83c98 fbad5c96 93ec533b f0298c3c a73ee510 3b08e48b 9ffb3655 eed4a04f a0874a81 1adce6ef 4a591230 d4ca38be e5ba7672 e3c6d69d ba703820 32c7478e c50d808e
1 3 153 3 3 1 0 4 4 13 1 2 0 05db9164 421b43cd 24146df6 29998ed1 25c83c98 7e0ccccf 4aa938fc 5b392875 a73ee510 451bd4e4 2b9c7071 6aaba33c 1aa94af3 b28479f6 e1ac77f7 b041b04a e5ba7672 2804effd 723b4dfd 3a171ecb b34f3128
0 4 45 41 31 5 11 156 32 185 1 25 0 11 68fd1e64 89ddfee8 9732b11b 4c0dcfee 25c83c98 fbad5c96 1c86e0eb 5b392875 a73ee510 e7ba2569 755e4a50 ccb8af7d 5978055e b28479f6 25753fb1 19637c17 e5ba7672 5bb2ec8e ae44ba4c b1252a9d 0db71b18 32c7478e 5c960292 f0f449dd 45b5a9e7
1 1 21 13 12 8 5 8 20 69 1 4 5 05db9164 e3db0bac 9cc6a4f1 9cd2a845 25c83c98 ab1ad103 0b153874 a73ee510 63c8d3d5 859b343f e68fa129 20819d96 07d13a8f 618b0ee5 3004a5f2 e5ba7672 a7ccaded 21ddcdc9 5840adea dc135e3f 8ec974f4 423fab69 08b0ce98 b9266ff0 b29c74dc
0 2 3 14 9 5 9 2 10 9 1 1 9 8c6ba407 09e68b86 b976df14 0b839026 25c83c98 fbad5c96 cc5ed2f1 5b392875 a73ee510 3b08e48b e216a695 ab02884f 9f16a973 b28479f6 52baadf5 5fa439a6 e5ba7672 5aed7436 2aa4575d b1252a9d 32dcf845 32c7478e f8d85724 e8b83407 f643b6c5
0 88 73 41 4420 0 46 47 0 46 05db9164 73a46ff0 c19a1e7a b7802d6b 25c83c98 fe6b92e5 28639f10 0b153874 a73ee510 3b08e48b 3a5bf2d6 0761d1a2 155ff7d9 b28479f6 4f648a87 079f48c0 776ce399 da507f45 21ddcdc9 b1252a9d a1fdd170 c9d4222a 3a171ecb a455dffb ea9a246c aa99435d
0 2644 4 1 26246 0 1 14 0 1 05db9164 80e26c9b 7df8ac19 42cc30a8 25c83c98 fbad5c96 d2d741ca 0b153874 a73ee510 3b08e48b ea4adb47 6cf704b2 05781932 1adce6ef 8ba8b39a dbdb2c16 e5ba7672 f54016b9 21ddcdc9 a458ea53 a92be8d2 c9d4222a 3a171ecb 3037ff6a e8b83407 b112057a
0 139 1 13556 79 1 13 59 1 0 1 68fd1e64 38a947a1 4fc317a6 6a14f9b9 25c83c98 fbad5c96 282b88fc 0b153874 a73ee510 0f1a2599 3e2feacf 9ff86c51 0e5bc979 07d13a8f 46df822a f8b34416 3486227d c9ac134a f3ddd519 32c7478e b34f3128
0 1 13 2 12026 535 8 26 308 3 3 05db9164 90081f33 36e97f3a e96617b3 25c83c98 fbad5c96 7f9907fe 5b392875 a73ee510 a3e2e7a5 a7b606c4 ba5aae2e eae197fd 64c94865 eec7af60 23b497d2 d4bb7bd8 ef981aa1 36a4f6c3 3a171ecb 3e022f4d
1 2 10 14 20 577 142 3 39 42 1 2 26 05db9164 08d6d899 9143c832 f56b7dd5 25c83c98 7e0ccccf dc7659bd 0b153874 a73ee510 efea433b e51ddf94 ae1bb660 3516f6e6 b28479f6 bfef54b3 bad5ee18 e5ba7672 87c6f83c 0429f84b 32c7478e c0d61a5c
1 0 45 6 1584 37 10 28 228 0 6 11 5a9ed9b0 bce95927 b46f1f1d 13508380 25c83c98 fbad5c96 737174dc 0b153874 a73ee510 3b08e48b 3b0a3499 35dfe2c5 c8e4b0c1 07d13a8f fec218c0 9720e154 e5ba7672 04d863d5 b7380686 b1252a9d 2b0e5756 c9d4222a 32c7478e 45ab94c8 e8b83407 c84c4aec
1 0 1214 4 20 2131 159 4 11 580 0 3 0 72 05db9164 4f25e98b 2d1ef417 68a5fcbb 4cf72387 7e0ccccf 5e64ce5f 0b153874 a73ee510 3ccfe0c0 4618e030 975c1c17 025225f2 b28479f6 8ab5b746 6720b72e 27c07bd6 7ef5affa 21ddcdc9 b1252a9d 722d167c 32c7478e 3fdb382b e8b83407 49d68486
0 0 3 4553 49 1 0 0 1 5a9ed9b0 38a947a1 a16966ab 65803e5f 43b19349 fbad5c96 3b16ebba 0b153874 a73ee510 8edcd037 6803595d fc0ad095 2a2faae1 b28479f6 b593a63b fd97a107 d4bb7bd8 1263c077 392cde4b 32c7478e af55e227
0 316 5 234 0 0 0 0 05db9164 38a947a1 3f5a37fe 1032bac8 25c83c98 7e0ccccf 1760a525 37e4aa92 a73ee510 3b08e48b 2d6f299a ce406f01 f0e0f335 b28479f6 77ef1e58 67f512fb 776ce399 b6b880ec c2b62b88 be7c41b4 c86755ff
1 2040 14 54675 0 2 6 0 da4eff0f 09e68b86 5b8662c6 5bad2804 25c83c98 8c28e5b5 6a698541 7cc72ec2 feccf887 ae4c531b 8ee18973 01c2bbc7 b28479f6 52baadf5 d93ba614 e5ba7672 5aed7436 75916440 a458ea53 2554eed2 32c7478e 47577e42 e8b83407 89fa8140
0 0 0 15 6 1512 18 15 10 215 0 6 6 05db9164 09e68b86 aa8c1539 85dd697c 43b19349 7e0ccccf af84702c c8ddd494 a73ee510 fa7d0797 ae19a197 d8c29807 7f0d7407 b28479f6 2d49999f c64d548f e5ba7672 63cdbb21 cf99e5de 5840adea 5f957280 3a171ecb 1793a828 e8b83407 b7d9c3bc
0 39 9 9 3814 82 1 9 82 1 9 68fd1e64 421b43cd 3983c24c 29998ed1 4cf72387 fe6b92e5 dcc1b63d 1f89b562 a73ee510 d04aae7d 731cd88c 6aaba33c 34d253f7 b28479f6 2d0bb053 b041b04a d4bb7bd8 2804effd 723b4dfd 3a171ecb b34f3128
0 0 32 13 35317 0 15 30 0 13 5a9ed9b0 09e68b86 39cbb726 afc54bd9 25c83c98 13718bbd d2d741ca 5b392875 a73ee510 3b08e48b ea4adb47 4f5c5791 05781932 07d13a8f 36721ddc 2f6bcbc0 d4bb7bd8 5aed7436 2442feac a458ea53 b215bc2d 3a171ecb 1793a828 e8b83407 02fa3dea
0 45 11 15 40 44 1 15 44 1 15 64e77ae7 38d50e09 92eb3174 88e439d9 25c83c98 6f6d9be8 fc6b47d9 5b392875 a73ee510 5080de78 b3410e99 604f499b 0d2cad4c 07d13a8f e2275836 8e662061 d4bb7bd8 fffe2a63 21ddcdc9 b1252a9d 872c22d6 32c7478e df487a73 001f3601 c27f155b
1 1122 41211 499 0 0 10 0 05db9164 207b2d81 d0484442 68637816 f281d2a7 12c61956 0b153874 a73ee510 48af2ba2 94d2aad8 3b9ae062 f23a3825 07d13a8f 0c67c4ca 3a1a0a65 d4bb7bd8 395856b0 21ddcdc9 a458ea53 1720a38e 32c7478e 4de83b96 001f3601 8f16a3b8
1 1 -1 696 1 22 1 81 1 7 0 68fd1e64 537e899b 5037b88e 9dde01fd 25c83c98 7e0ccccf 17024f49 f504a6f4 a73ee510 f2a8242b ba0f9e8a 680d7261 4e4dd817 07d13a8f 6d68e99c c0673b44 e5ba7672 b34aa802 e049c839 c7dc6720 6095f986
0 18 3 1480 340 9 3 26 2 0 3 05db9164 a796837e 08de7b18 97ce69e9 30903e74 7e0ccccf 12343fcc 0b153874 a73ee510 547c0ffe 9bcaeafe c5011072 46f42a63 cfef1c29 98eddd86 5a9431f3 27c07bd6 e90118d1 e754c5e1 3a171ecb 8fc66e78
0 2 59 3 3 11 3 2 3 3 1 1 3 05db9164 09e68b86 27685115 a35ea34f 25c83c98 7e0ccccf 9b4ad590 1f89b562 a73ee510 3b08e48b 75b8e15e 92e9af0d ed43e458 1adce6ef dbc5e126 dc52e604 07c540c4 5aed7436 21ddcdc9 5840adea e5835dfb bcdee96c f89ffef1 e8b83407 a9637a08
0 0 -1 5937 29 1 1 60 0 1 05db9164 09e68b86 d49019a8 8d5aa295 43b19349 13718bbd 89391314 0b153874 a73ee510 9372d502 608452cc 615e62e7 cbb8fa8b b28479f6 52baadf5 e606c6b3 e5ba7672 5aed7436 2b558521 b1252a9d 7440d805 32c7478e 18038694 e8b83407 7048bfb1
1 0 0 2 2875 245 2 2 243 0 2 0 05db9164 86d4fccc 697f4e85 f2159098 4cf72387 fbad5c96 dc7659bd 5b392875 a73ee510 efea433b e51ddf94 35641a0a 3516f6e6 07d13a8f e87e1df4 c1eba210 e5ba7672 e727949e 21ddcdc9 5840adea 47e2c032 32c7478e 3b183c5c 001f3601 afd260f5
0 1 0 70 6 135 27 14 2 45 1 2 0 6 68fd1e64 80e26c9b ba1947d0 85dd697c 4cf72387 16a2e9cb 1f89b562 a73ee510 1ce1e29d 44fa9a7f 34a238e0 f27ed3ab 1adce6ef 0f942372 da441c7e e5ba7672 005c6740 21ddcdc9 5840adea 8717ea07 423fab69 1793a828 e8b83407 9904c656
1 80 25 2 3 2 3 80 3 3 1 1 1 3 05db9164 0b8e9caf 9b9cd1bb 5974d6bc 25c83c98 fbad5c96 4b815add 0b153874 a73ee510 3b08e48b 7cb56051 7364e701 1ac91ec9 b28479f6 5340cb84 1ab2aab4 3486227d ca6a63cf 91311aa2 bcdee96c 08b0ce98
0 0 1 1 1801 14 0 0 05db9164 5dac953d d032c263 c18be181 384874ce 7e0ccccf 8363bee7 0b153874 a73ee510 efea433b bf09be0e dfbb09fb 3516f6e6 1adce6ef 32330105 84898b2a e5ba7672 24de59c1 0014c32a 32c7478e 3b183c5c
0 1 0 6 8 18 8 1 8 8 1 1 8 5a9ed9b0 0468d672 c48cd8f8 24d89f30 25c83c98 24a360aa 5b392875 a73ee510 c8a342b9 2c9174a6 f25a8037 7eda22c5 b28479f6 234191d3 9ca51d92 d4bb7bd8 9880032b 21ddcdc9 5840adea 17b90ef0 32c7478e da89b7d5 ea9a246c 984e0db0
1 7 1 5 1 1311 58 50 2 200 1 6 0 1 05db9164 6887a43c 9b792af9 9c6d05a0 25c83c98 7e0ccccf f367d44f 0b153874 a73ee510 3e3375c9 f68c5128 6532318c d86616b0 1adce6ef ef6b7bdf 2c9d222f 3486227d 8f0f692f 21ddcdc9 a458ea53 cc6a9262 ad3062eb 423fab69 a5862ce8 445bbe3b 0b89ae9f
1 1 0 1 378 41 4 16 100 1 2 68fd1e64 38a947a1 75df6d36 b1c1e580 25c83c98 7e0ccccf 14ad5567 1f89b562 a73ee510 9dc8b302 9ddd72e9 6fbed051 37e99bb7 07d13a8f 6d74487d f10a7996 07c540c4 b3e92443 c576dc74 3a171ecb 67d37917
0 183 3 3 27395 0 3 67 0 3 be589b51 f3139f76 1c8c8a04 bf0b19a8 30903e74 7e0ccccf 6d389dca 0b153874 a73ee510 98bd7a24 e4eb05d4 5b5ab0a8 a4c5d6dd b28479f6 28c50c84 5131d930 e5ba7672 df5475ca 3b226dea 3a171ecb 4fcc135f
0 1 17 3 0 7 3 0 3 05db9164 083aa75b 88bd9da3 c235950d 25c83c98 7e0ccccf 0697a6a6 0b153874 7cc72ec2 3b08e48b 7fb7db93 f3ba84a1 208257bb 1adce6ef 84203dfc 30129ae3 2005abd1 06747363 21ddcdc9 b1252a9d 9ad721d6 be7c41b4 993d6982 f0f449dd 7eaed4be
0 6 7 2 3003 0 42 8 0 0 9 241546e0 a796837e 42db3232 e3cc371a 25c83c98 7e0ccccf 11ffbf5b 37e4aa92 a73ee510 7ad4ea2c f2313205 c9669737 9c7a975e cfef1c29 f0bf9094 c4de5bba 8efede7f 1cdbd1c5 288eaded ad3062eb 3a171ecb 8fc66e78
0 1 36771 112 1 0 77 1 05db9164 f3139f76 9d3adacf 28d926b8 43b19349 fe6b92e5 0cd2f08f 0b153874 a73ee510 3b08e48b 7592da6b 7b93a4c7 18f84563 b28479f6 28c50c84 fc53f85c d4bb7bd8 df5475ca ed35ed93 32c7478e 4fcc135f
0 20 1 1 4841 20 3 5 16 2 1 68fd1e64 38d50e09 948ee031 b7ab56a2 4cf72387 fbad5c96 7d733ece 0b153874 a73ee510 3753b9eb 30b2a438 42bee2f2 aebdb575 b28479f6 06373944 67b3c631 07c540c4 fffe2a63 21ddcdc9 b1252a9d bd074856 32c7478e df487a73 001f3601 c27f155b
1 7 1095 3 37 1 7 3 3 2 2 1 05db9164 85af3139 d032c263 c18be181 384874ce fe6b92e5 7195046d 1f89b562 a73ee510 f1b45aab 4d8549da dfbb09fb 51b97b8f b28479f6 af8db00e 84898b2a e5ba7672 d4328054 0014c32a bcdee96c 3b183c5c
1 0 0 19 7 2193 41 9 18 199 0 4 0 9 05db9164 ef69887a 7007f08d f6131df0 4cf72387 7e0ccccf e8fc728b 0b153874 a73ee510 603ff749 e7ce7f20 2d936711 f522015f 07d13a8f b98be2c0 1c332795 e5ba7672 4bcc9449 abfaf938 a458ea53 caad4ae9 32c7478e 3fdb382b e8b83407 49d68486
1 0 0 1 1 7571 57 19 1 16 0 7 0 1 05db9164 38a947a1 72e5eac0 eee0e446 25c83c98 fbad5c96 66a728c4 0b153874 a73ee510 d0ff5b05 dab547a5 673768e2 7aab7990 07d13a8f 613de492 d617f1ff 3486227d 7abb2837 72a8c407 ad3062eb 423fab69 375c3609
0 156 2 25905 0 11 39 0 2 05db9164 08d6d899 9143c832 f56b7dd5 25c83c98 7e0ccccf 8ce3a35f 0b153874 a73ee510 3b08e48b c8e7f509 ae1bb660 6e8ef725 b28479f6 bffbd637 bad5ee18 776ce399 bbf70d82 0429f84b 32c7478e c0d61a5c
0 0 102404 0 9a89b36c 38a947a1 b89c82b4 c10a6e59 25c83c98 7e0ccccf 04679a14 0b153874 7cc72ec2 975342c2 19a2ded8 15820680 90c7f9d1 64c94865 fd056e92 911ebe1c 07c540c4 b2e570f5 00cfee60 ad3062eb 3a171ecb 4904c5a1
1 46 614 210 0 10 0 71 0 257 1 5 4 0 5a9ed9b0 942f9a8d d61e0f0a c2fcecf6 4cf72387 7e0ccccf 3f4ec687 45f7c2dd a73ee510 0e9ead52 c4adf918 f6f14c38 85dbe138 07d13a8f a8e962af 64c4c290 27c07bd6 1f868fdd 21ddcdc9 b1252a9d 06316f4c ad3062eb 32c7478e 38be899f e8b83407 9bef54fd
1 0 9 2 1576 29 3 4 14 0 1 05db9164 6887a43c bce3f26f 1d8a14d0 43b19349 fe6b92e5 675e81f6 0b153874 a73ee510 a5bb26cf 4a77ddca 381dd9fd dc1d72e4 64c94865 004dd4ed c26ce5c1 1e88c74f 36a1d942 21ddcdc9 b1252a9d e22e102f c9d4222a 32c7478e 47c5aea3 445bbe3b 12d4e9a4
0 -1 101295 0 05db9164 2ae0a573 b7810abb 65b2bfc7 25c83c98 fe6b92e5 ccbac4d9 0b153874 7cc72ec2 3b08e48b c012107d 82665b78 c8dca410 07d13a8f 413cc8c6 6399ea39 07c540c4 f2fc99b1 ea03ca8b ad3062eb be7c41b4 d91ea8bd
0 -1 0 0 32 0 87552397 a8b6b751 25c83c98 7e0ccccf d9aa9d97 5b392875 7cc72ec2 3b08e48b 6e647667 85dbe138 b28479f6 694e45e3 2005abd1 d787f192 21ddcdc9 5840adea 32c7478e 001f3601 99f4f64c
\ No newline at end of file
0 0 5 4 13275 14 35 4 41 4 0 4 05db9164 f0cf0024 6f67f7e5 41274cd7 25c83c98 fbad5c96 25c8362c 0b153874 a73ee510 0e97bb27 ba0f9e8a 623049e6 4e4dd817 b28479f6 e6c5b5cd c92f3b61 3486227d b04e4670 21ddcdc9 b1252a9d 60f6221e 32c7478e 43f13e8b ea9a246c 731c3655
1 0 559 2 7 2532 164 98 6 943 0 18 0 7 68fd1e64 bc478804 b96e826a 13508380 43b19349 7e0ccccf 8363bee7 0b153874 a73ee510 f322117a bf09be0e f53c5949 3516f6e6 07d13a8f 0af7c64c 170db6b2 e5ba7672 65a2ac26 21ddcdc9 b1252a9d f0ce5c73 c7dc6720 45ab94c8 001f3601 c84c4aec
0 2 8 3 9040 38 5 11 104 2 3 05db9164 e5fb1af3 2c003e73 6eaa3680 25c83c98 7e0ccccf 860f347d 1f89b562 a73ee510 4b8a7639 9f0003f4 0962e10a 5afd9e51 f862f261 2a079683 59c31b64 e5ba7672 13145934 21ddcdc9 a458ea53 24a384ae bcdee96c f11826cf 3a6f6b59 25cb8912
0 72 2 2 0 4 12 0 2 8cf07265 b0660259 31567fba 1a1efaf8 25c83c98 fbad5c96 88002ee1 0b153874 7cc72ec2 3b08e48b f1b78ab4 b6d5a886 6e5da64f 1adce6ef bd5431ee 7b977dd1 2005abd1 8ec3405f f0474b68 ad3062eb 32c7478e 53c37c32
0 2 6 34 16 1051 49 4 48 101 1 2 16 5a9ed9b0 80e26c9b 09275b26 f2ee08c0 25c83c98 7e0ccccf 372a0c4c 0b153874 a73ee510 a08eee5a ec88dd34 4e99cf84 94881fc3 1adce6ef 91f5e393 6bc40863 e5ba7672 ce25450e 21ddcdc9 b1252a9d 5dc70c60 423fab69 1793a828 e8b83407 91116abe
0 2 13 2 15757 54 5 2 15 1 0 2 05db9164 09e68b86 eb76bef2 804f7741 43b19349 13718bbd cc5ed2f1 0b153874 a73ee510 3b08e48b facf05cc f282fc98 9f16a973 b28479f6 52baadf5 eb62e551 07c540c4 5aed7436 c361c1be b1252a9d be7ab5d2 32c7478e 1793a828 e8b83407 e9938fed
0 0 14 1572 8 4 6 8 0 1 05db9164 e112a9de 9db30a48 b3dbc908 4cf72387 fbad5c96 f2530a89 0b153874 a73ee510 671ae88f 2181d913 2598d8eb 1e750733 ad1cc976 f1e1df0a 9ab4d6b1 e5ba7672 fdbdefe6 bbf96cac c3dc6cef 8f079aa5
0 0 195 14 5941 285 6 20 200 2 20 5bfa8ab5 80e26c9b 36984eba 85dd697c 4cf72387 fbad5c96 f6619575 1f89b562 a73ee510 73be1da8 d5cf9352 db02a7b5 09e3bbd5 07d13a8f e8f4b767 2d0bbe92 e5ba7672 005c6740 21ddcdc9 b1252a9d 6e55e022 ad3062eb 3a171ecb 1793a828 e8b83407 9904c656
0 0 52 1 1 4240 9 5 3 49 0 4 2 5a9ed9b0 b961056b 2fe61b6b 3642dc05 4cf72387 fe6b92e5 81bb0302 062b5529 a73ee510 8b7e21f6 b7094596 4ab3cda1 1f9d2c38 b28479f6 7eaf5074 a4b0914f e5ba7672 5742e45c 789fddf7 32c7478e 3b047130
1 1 1 1 1378 46 5 34 236 3 1 05db9164 38a947a1 a50fea16 0a8cd7bc 25c83c98 a601d936 0b153874 a73ee510 3fb38a44 348e21cb 2b2be35d 1d8cfec6 b28479f6 66106852 31cf393e e5ba7672 0458e647 58d08d44 32c7478e 355b6af8
0 0 0 5 2 5512 49 15 3 114 0 2 2 8cf07265 78ccd99e 01d1b993 20bb14e7 4cf72387 fbad5c96 a1eeac3d 0b153874 a73ee510 5f49e872 2e9d5aa6 500d0b9a 0a9ac04c 07d13a8f 162f3329 f24599ab e5ba7672 e7e991cb 4b1019ff a458ea53 b49094cd 423fab69 dc73316d fd2fe0bd 60a86ddf
1 2 -1 2 1 50 1 73 4 127 1 14 0 1 68fd1e64 2aee75a8 32c8cb11 c04614ba 25c83c98 3bf701e7 407438c8 0b153874 a73ee510 213889cd 755e4a50 fa20173a 5978055e 32813e21 6aa1d799 de53b24a 3486227d ad19d8d8 64c766b8 3a171ecb 747559ec
0 83 4 5 5666 14 1 5 14 1 5 05db9164 85af3139 d032c263 c18be181 25c83c98 fbad5c96 7195046d 0b153874 a73ee510 686e97b9 4d8549da dfbb09fb 51b97b8f b28479f6 af8db00e 84898b2a d4bb7bd8 d4328054 0014c32a ad3062eb bcdee96c 3b183c5c
0 3 11 1612 0 40 91 0 42 05db9164 537e899b 5037b88e 9dde01fd 25c83c98 3bf701e7 ac07b602 0b153874 a73ee510 3b08e48b 7ce882d2 680d7261 f5ff33d9 1adce6ef c535a0ec c0673b44 776ce399 b34aa802 e049c839 423fab69 6095f986
0 6 52 5 400098 0 15 15 0 5 5a9ed9b0 38d50e09 d032c263 c18be181 384874ce 7e0ccccf 6cd97108 0b153874 7cc72ec2 3b08e48b 62cdafdf dfbb09fb 2e551bbe 1adce6ef e2c18d5a 84898b2a 776ce399 582152eb 21ddcdc9 5840adea 0014c32a be7c41b4 3b183c5c 001f3601 99f4f64c
0 26 12 11 34669 531 1 12 27 1 0 11 05db9164 98159f6d 3cc4baf5 7b110c65 25c83c98 fe6b92e5 c03eb803 0b153874 a73ee510 3b08e48b d700703a 169e9533 bccbbffe b28479f6 b2db654e 16c48bd2 3486227d 4854928e 114ff696 3a171ecb 3599e91f
0 7 6 13416 0 0 45 0 05db9164 247a1a11 896e7bb3 c2fcecf6 25c83c98 fbad5c96 c31847f5 0b153874 a73ee510 3b08e48b a12fca95 7fa9c0a1 9b9e44d2 07d13a8f 2559d9b6 ef01918c 776ce399 51360aab 5cc5adb2 c9d4222a be7c41b4 38be899f
0 1 5 7 14509 60 5 7 56 1 7 75ac2fe6 3e25b403 7c7b6098 f00503da 25c83c98 fe6b92e5 ef0d76b7 51d76abe a73ee510 82bb4986 529e8447 c1b3491a 50a56f08 07d13a8f ae1edc05 ab50786f e5ba7672 1c381aea f6801a20 c7dc6720 1793a828
0 0 2 30 10 1363 415 20 28 561 0 5 0 10 68fd1e64 95e2d337 8d85271d 69040d07 25c83c98 7e0ccccf 3603d925 0b153874 a73ee510 0065486b 7934c105 8b7685bd 4840c1ab 64c94865 7de4908b b1f23afa e5ba7672 701d695d 712d530c a458ea53 da0adeef c9d4222a 423fab69 4921c033 2bf691b1 80b0aeb9
1 1 6 3 12 0 4 40 31 410 1 14 4 68fd1e64 38a947a1 8962afa9 28625509 25c83c98 7e0ccccf 5fbd9170 0b153874 a73ee510 dc9f749b 2bcfb78f 662d25fe e6fc496d 07d13a8f 022e018a 2dd4e74f e5ba7672 f5508183 c5cea7f6 32c7478e 38255568
0 0 0 11 4 8657 213 6 3 210 0 1 4 05db9164 80e26c9b 0bd844ef aae30d38 25c83c98 7e0ccccf d2d741ca 0b153874 a73ee510 18139a78 ea4adb47 38a37d81 05781932 07d13a8f 856b2bc1 00c7a1bf 07c540c4 fdf644e0 21ddcdc9 a458ea53 45d05ca3 dbb486d7 3e1eed85 e8b83407 6a4b2388
0 47 35575 159 3 0 10 1 68fd1e64 999aae35 79bc99b4 e5e453f3 4cf72387 7e0ccccf c88e8d4f 0b153874 a73ee510 3b08e48b a21d2994 424e28fe 2e94d3f7 243a4e68 39a6addf a6a69939 07c540c4 63aa00dd 424af181 3a171ecb 869caea3
1 1 512 1 2 11 2 1 2 2 1 1 2 05db9164 b26462db 9d1d0933 ebdba02b f281d2a7 fbad5c96 12343fcc 0b153874 a73ee510 f6f942d1 7f8ffe57 c6a076d2 46f42a63 64c94865 f93f84eb d9e8fb80 d4bb7bd8 195c811d 306c202e 3a171ecb 340d03c3
0 -1 28922 24 1 8 22 1 05db9164 c8687797 5c7d8ff6 902872c9 4cf72387 fbad5c96 3833f734 0b153874 a73ee510 3b08e48b c05bd0b8 79b87c55 e411c4db b28479f6 dc96c4b0 5627d7e0 d4bb7bd8 a7e06874 21ddcdc9 b1252a9d 4063500f ad3062eb be7c41b4 54baf4d1 010f6491 ba676e3c
0 1 51 17212 0 1 3 0 5a9ed9b0 4f25e98b b5044e29 a311963e 307e775a fe6b92e5 fe4dce68 a6d156f4 a73ee510 75542289 68357db6 1290fbf4 768f6658 07d13a8f dfab705f d5a1b8fe 1e88c74f 7ef5affa 2b558521 b1252a9d b5074db5 c9d4222a 32c7478e c832486f 001f3601 f0353f67
0 0 162 4253 26 4 3 5 0 1 05db9164 b961056b 502bedec 81b1d519 384874ce fe6b92e5 c52b5f8e 5b392875 a73ee510 8b349795 419d31d4 e23a52b4 08961fd0 1adce6ef addd37ac b4df7a81 e5ba7672 43de85d3 fdb27279 423fab69 71640730
0 67 43 18 61 0 18 18 0 18 05db9164 38d50e09 c4205697 bbc8d361 25c83c98 fe6b92e5 165cb289 5b392875 a73ee510 3b08e48b b94c0f2d d8c2300e b9fa764b b28479f6 7501d6be bf300501 776ce399 f855e3f0 21ddcdc9 5840adea b59344cd 3a171ecb 17f458f7 001f3601 984e0db0
0 51 18 1 19 93 23 111 22 1156 2 11 0 23 287e684f a796837e 42db3232 e3cc371a 25c83c98 fe6b92e5 ff493eb4 25239412 a73ee510 efea433b 0983d89c c9669737 1aa94af3 cfef1c29 0d054fb9 c4de5bba e5ba7672 70e5bba7 288eaded 32c7478e 8fc66e78
0 0 84 43 11 198 75 14 27 76 0 2 1 11 05db9164 4f25e98b 23edf366 e4889f1e 25c83c98 7e0ccccf ac28d9ec 0b153874 a73ee510 6cb0e696 bc0819f7 92107e36 c9059ff0 cfef1c29 dddd963f 4e447cf7 3486227d 7ef5affa 55dd3565 a458ea53 9a91ae21 c9d4222a 32c7478e 54a607b7 001f3601 d568f27d
0 5 40 15 23322 746 7 15 524 3 15 05db9164 dda1caf9 f83418e0 a44d75e2 25c83c98 7e0ccccf 7d09e065 0b153874 a73ee510 3b08e48b bf2008fa a9165671 c9ae71af 07d13a8f 24c5daaf 839572dd e5ba7672 65cebfa5 cd746367 3a171ecb a9a2ac1a
0 4 1 0 8cf07265 38c81d1a 27388f4d 539558b1 25c83c98 ce8217f8 0b153874 7cc72ec2 3b08e48b 9d12ce9b cc83e10f 9dfda2b9 b28479f6 558590b3 ed956dff 2005abd1 a5ac4b1e 21ddcdc9 b1252a9d 1061dd07 be7c41b4 a79557ea b9266ff0 6ddc02f9
1 18 3 1 4233 3 17 1 118 5 1 5a9ed9b0 78ccd99e ced2e736 13508380 25c83c98 fbad5c96 c8b3d034 0b153874 a73ee510 3275d09a 80da9312 c7fe806a d14c9212 07d13a8f 162f3329 d274b433 e5ba7672 e7e991cb 55dd3565 b1252a9d b46cb608 c7dc6720 45ab94c8 e8b83407 c84c4aec
0 0 2788 7 1451 0 1 0 0 0 1 1464facd 8947f767 64a350ad 5e369129 25c83c98 fe6b92e5 a13be9ad 0b153874 a73ee510 4e56c58e 62aedd5c 70aaa25e e65a5fc3 b28479f6 a473257f 9bb1dfa5 d4bb7bd8 bd17c3da 083e89d9 b1252a9d d3a891c1 ad3062eb 3a171ecb b6b5bc47 010f6491 c4510344
0 1 6 1 4402 22 1 11 22 1 1 05db9164 207b2d81 d52980aa b66d15e3 25c83c98 fbad5c96 6ce84868 1f89b562 a73ee510 3b08e48b 609032c1 b519c595 437a58d6 b28479f6 3c767806 7c8ae841 07c540c4 395856b0 21ddcdc9 b1252a9d 605305ee 32c7478e f090fae7 001f3601 6024c307
0 125 2 14 7259 30 2 14 97 2 14 8cf07265 04e09220 b1ecc6c4 5dff9b29 4cf72387 7e0ccccf 543f351f 1f89b562 a73ee510 3b08e48b be8a7bc2 2436ff75 7d1f1fa0 07d13a8f cae64906 f4ead43c d4bb7bd8 e161d23a 4f1aa25f ad3062eb 3a171ecb ded4aac9
0 610 1 1 7526 40 2 1 12 1 1 5a9ed9b0 207b2d81 8a48553d 1e10bd9f 25c83c98 fe6b92e5 12343fcc 0b153874 a73ee510 547c0ffe bc8c9f21 6803e296 46f42a63 64c94865 11b2ae92 ff48ade9 e5ba7672 395856b0 21ddcdc9 b1252a9d c3d093fb ad3062eb 3a171ecb 84a27184 001f3601 8d2deb5a
0 0 1 59 3 2766 96 2 4 7 0 1 3 5a9ed9b0 38a947a1 cf1b3029 36b520dc 4cf72387 7e0ccccf 5e64ce5f 0b153874 a73ee510 d4a82fb9 8b94178b 0d74ab27 025225f2 b28479f6 77ef1e58 5dcf110f 07c540c4 b6b880ec dd70b3ec 32c7478e 8f282db5
0 34 18859 106 26 0 17 1 0 05db9164 c1c79489 66fa4409 bdc253c8 5a3e1872 fbad5c96 8d51595c 0b153874 a73ee510 216f775a 7110c233 7e4627d4 bb7a2c12 32813e21 59b212e4 5f231427 e5ba7672 7549f127 50798fce c7dc6720 0f9697f0
1 1 321 1 1189 8 16 11 96 1 3 1 05db9164 a796837e 5c05f1ab 97ce69e9 25c83c98 fe6b92e5 81b62616 0b153874 a73ee510 06ee81ba fa1b06e6 50ec33a6 0eb69562 07d13a8f 47a431f5 5a9431f3 e5ba7672 f1a8f10f e9672021 ad3062eb 423fab69 8fc66e78
0 2 1 16 13 326 61 3 47 263 1 1 0 55 8cf07265 8947f767 999b4cd3 f862f65d 25c83c98 7e0ccccf 9e8dab66 0b153874 a73ee510 fbbf2c95 46febd4d ea486dc7 949ea585 07d13a8f 2c14c412 e51f35a7 e5ba7672 bd17c3da 83236299 b1252a9d 19bea55f 32c7478e 75aae369 010f6491 08e0e995
0 0 9 8 7182 255 9 24 44 4 8 05db9164 38a947a1 7a2ffaba 8dcfa982 25c83c98 7e0ccccf c519c54d 0b153874 a73ee510 19fd5a0e 59cd5ae7 842e9873 8b216f7b b28479f6 67596d53 1e8e1075 e5ba7672 3fb55a52 8b5b9b68 32c7478e 10edf4e4
0 107 1 1 7 489 1 1 31 1 1 24eda356 2c16a946 adf23330 17a25a2e 25c83c98 7e0ccccf 12343fcc 0b153874 a73ee510 f6f942d1 7f8ffe57 8a390857 46f42a63 b28479f6 3628a186 64f2ada9 07c540c4 e4ca448c 467f6a77 3a171ecb 9117a34a
0 0 280 20 10 4512 51 5 11 97 0 1 0 10 68fd1e64 6c713117 f6f030bc 19d6ddb8 25c83c98 fe6b92e5 7c59aadb 5b392875 a73ee510 c5a978c5 ff78732c 7bea4a04 9b656adc b28479f6 73b98472 7afa5706 3486227d bf6b118a 21ddcdc9 b1252a9d aef05b30 c7dc6720 1caea946 445bbe3b 69a06689
0 41 6 2 359699 3 2 87552397 4f25e98b 16958dc8 8dfe2376 25c83c98 7e0ccccf 8025502e 0b153874 7cc72ec2 b118f931 29e4ad33 ea50fad8 80467802 b28479f6 8ab5b746 0e4c86f8 d4bb7bd8 7ef5affa 5b885066 b1252a9d e103da3e 3a171ecb fcb2509d 001f3601 24488670
1 1 13 7 11 1 4 4 18 44 1 3 4 05db9164 09e68b86 aa8c1539 85dd697c 25c83c98 41e1828d 0b153874 a73ee510 3b08e48b b6358cf2 d8c29807 61c65daf 8ceecbc8 d2f03b75 c64d548f 07c540c4 63cdbb21 cf99e5de 5840adea 5f957280 32c7478e 1793a828 e8b83407 b7d9c3bc
1 2 25 2 38 2 2 2 2 1 1 2 5a9ed9b0 207b2d81 fb47f7d0 6c02aa53 4cf72387 7e0ccccf 6fb62f1a 0b153874 a73ee510 4f6357b0 e51ddf94 d9fc673a 3516f6e6 b28479f6 0739b998 7e14b290 07c540c4 934abd6e 21ddcdc9 b1252a9d 0a47a519 ad3062eb 32c7478e 47620345 001f3601 c36f2d3c
0 11 2 3 92 2 36 0 21 1 4 f473b8dc 80e26c9b 1c791144 51d55e9c 384874ce fbad5c96 57b4bd89 0b153874 a73ee510 3b08e48b 71fd20d9 b49c9404 ddd66ce1 1adce6ef 8ba8b39a 2cbed9f7 e5ba7672 f54016b9 21ddcdc9 5840adea 80f3703a 3a171ecb 1793a828 e8b83407 dbd4e512
1 0 1 1 1 5715 181 23 2 169 0 4 0 1 5a9ed9b0 7182b361 b2aa5dce 462749d8 43b19349 7de93965 37e4aa92 a73ee510 28c6ef79 9ba53fcc 05ce35fd 42156eb4 07d13a8f 47367e94 1a5c540a 3486227d ec9b0866 437ad2af c9d4222a c7dc6720 73338ee2
0 54 62 12 7578 184 7 12 72 1 24 05db9164 38a947a1 bc2aea05 ac975db6 25c83c98 13718bbd 80162d04 5b392875 a73ee510 3b08e48b 5b97686e b67ac327 47727147 07d13a8f 22223d6c d388d33c e5ba7672 97b81540 0ac4575d 32c7478e d28d80ac
0 6 0 12 6 185 37 6 7 37 1 1 37 05db9164 09e68b86 9596aa6c b26d2eda 4cf72387 7e0ccccf 1a95b4d0 0b153874 a73ee510 995f172b 507605d4 bb2b1b19 5f3a0c1b 07d13a8f 36721ddc 872b1c96 e5ba7672 5aed7436 338f20de b1252a9d 8f7b9fe2 32c7478e cad46f36 e8b83407 58a43195
0 104 1 1 2679 0 8 18 0 1 9684fd4d 8dbd550a 4cf72387 7e0ccccf 8cf87048 c8ddd494 a73ee510 3b08e48b a12fca95 9b9e44d2 f862f261 b13d160c 776ce399 53d8aa6f be7c41b4
0 3384 5 2803 151 13 11 150 1 11 65aada8c 537e899b 5037b88e 9dde01fd 25c83c98 fbad5c96 7bcc368f 062b5529 a73ee510 f26b2389 60d2afd7 680d7261 155ff7d9 07d13a8f 73c54e3e c0673b44 e5ba7672 86b4fc22 e049c839 3a171ecb 6095f986
1 -1 5967 11 4 1 10 2 0 68fd1e64 510b40a5 d03e7c24 eb1fd928 25c83c98 ac902434 062b5529 a73ee510 e5da7278 1294fec1 951fe4a9 7bbf93ce 07d13a8f 67daf98c 8ec71479 e5ba7672 03364bd3 0e63fca0 32c7478e 0e8fe315
0 78 7 10 0 7 7 0 7 05db9164 9f7e1d07 e3818eb2 a4456f7e 25c83c98 02d72eea 5b392875 a73ee510 c9e11adf e09c447b 3bec5d45 8dab0422 b28479f6 08812651 72d1790f 1e88c74f 6a58e423 21ddcdc9 5840adea 950d91c1 32c7478e 2f7e98de ea9a246c e7ecb821
1 -1 1509 0 4 0 23 3 05db9164 cc8e236e 1c239854 cf3dc9c2 4cf72387 fe6b92e5 81bb0302 0b153874 a73ee510 983552b8 b7094596 98d78b2b 1f9d2c38 07d13a8f 3a8c68b7 da1333b6 e5ba7672 775e80fe 21ddcdc9 5840adea 3ee29a07 ad3062eb c7dc6720 c83e0347 ea9a246c 2fede552
1 2 -1 550 3 155 8 30 2 16 0 ae82ea21 3f0d3f28 c2b2b3f5 77a160bd f281d2a7 fbad5c96 3625ff87 6c41e35e a73ee510 67eea4ef 755e4a50 db21b797 5978055e 32813e21 e8d4033b fae7560f e5ba7672 744ad4a0 a17a10b3 3a171ecb e5fca70a
1 5 113 6 18 0 10 9 21 21 2 3 3 0 05db9164 6887a43c 1e361e58 825b2615 43b19349 fbad5c96 6d0ca8d7 0017bc7c a73ee510 666a1d31 6939835e 9b62c79b dc1d72e4 b28479f6 9cc57c4d fd420402 27c07bd6 2ae4f30d 21ddcdc9 b1252a9d d12542f8 32c7478e 488d4283 445bbe3b d20e4b7a
1 3 21 2 2 1 0 6 2 13 1 3 0 68fd1e64 38a947a1 756e3a77 bd47cb50 25c83c98 fe6b92e5 09e42cac 5b392875 a73ee510 79877583 30b2a438 74a6216c aebdb575 b28479f6 b3547943 9e33c845 e5ba7672 04fdc63f 77cd58fc 3a171ecb 69e7316d
0 0 1 5 4 9947 275 8 12 865 0 3 4 05db9164 6887a43c 9b792af9 9c6d05a0 25c83c98 84c427f0 0b153874 a73ee510 9bc1a7c1 41b3f655 6532318c ce5114a2 8ceecbc8 4e06592a 2c9d222f e5ba7672 8f0f692f 21ddcdc9 b1252a9d cc6a9262 32c7478e a5862ce8 445bbe3b c4c8f547
0 0 21 6 6 7270 275 3 6 93 0 2 6 05db9164 2c16a946 f7ef15ea 6ad68ce1 25c83c98 7e0ccccf 5ff926ae 25239412 a73ee510 4497acf2 864d33c2 bfe72c91 34786fb9 b28479f6 3628a186 2d08259c 07c540c4 e4ca448c 96739728 ad3062eb 32c7478e 9117a34a
1 5 1 3 2 5 0 48 3 2 2 13 0 05db9164 2efdbb44 88f1ca30 aece8ab6 25c83c98 3bf701e7 1c86e0eb 1f89b562 a73ee510 f7ab55a0 755e4a50 4cf7f85a 5978055e 32813e21 ff824c52 5a58ab6d e5ba7672 42076ccd 3991fb63 55dd3565 4721fd29
0 0 1 4 1 7025 101 13 1 39 0 1 1 1 05db9164 207b2d81 d52980aa b66d15e3 25c83c98 7e0ccccf f6d03c1b 5b392875 a73ee510 fe687d88 30b2a438 b519c595 aebdb575 07d13a8f 0c67c4ca 7c8ae841 e5ba7672 395856b0 21ddcdc9 b1252a9d 605305ee 32c7478e f090fae7 001f3601 77e5b96c
0 1 95 28 8 67 14 103 42 267 1 23 14 05db9164 89ddfee8 d8f59a85 f1d06e8a 25c83c98 7e0ccccf 1c86e0eb 5b392875 a73ee510 213889cd 755e4a50 64f3690c 5978055e 1adce6ef 34cce7d2 eeb76d70 e5ba7672 5bb2ec8e 0053530c b1252a9d 7f6bcbee ad3062eb 32c7478e 43fe299c f0f449dd f3b1f00d
0 4 54 2 9 1349 23 52 22 90 1 8 0 11 be589b51 38a947a1 e28faa26 f44af879 25c83c98 fbad5c96 407438c8 0b153874 a73ee510 5df036eb 755e4a50 defeb71b 5978055e 07d13a8f 92b9a831 d4aed6bf 27c07bd6 2a870f7f fecb5e8c c9d4222a 32c7478e a9313cb6
1 3 145 6 12 2 8 6 36 1 4 2 be589b51 0c0567c2 9424724f fa30ea43 25c83c98 fe6b92e5 52b4e012 0b153874 a73ee510 5ba3608f a739bbee f400be52 79128231 b28479f6 1e82594c efbacdc0 e5ba7672 98c4d3e0 cd86ac29 78e2e389 32c7478e 7fb4ff91
0 38 2 1 12810 8 3 1 7 1 1 05db9164 af447d7a fc39fe56 3197d543 25c83c98 7e0ccccf 9aba5215 5b392875 a73ee510 46c32c26 8cfaeec1 ebf6ae0a ef800ef3 b28479f6 f0d27586 38fc4d35 07c540c4 98ff11f4 11e4edec 32c7478e 0ff91809
1 1 0 4 11 1019 11 4 20 91 1 2 0 11 05db9164 09e68b86 b1ffdff4 aff068bc 25c83c98 b87f4a4a 5b392875 a73ee510 e70742b0 319687c9 ee94532f 62036f49 cfef1c29 18847041 17442b68 e5ba7672 5aed7436 21ddcdc9 5840adea acf8bce8 32c7478e 1793a828 e8b83407 63093459
1 3 15 24 3288 0 3 278 0 25 05db9164 38a947a1 4470baf4 8c8a4c47 25c83c98 7e0ccccf 0dbf2675 0b153874 a73ee510 48a94b2e 88196a93 bb669e25 1211c647 b28479f6 59621a99 2b2ce127 e5ba7672 b133fcd4 2b796e4a 32c7478e 8d365d3b
1 0 175 59061 539 0 0 56 0 f473b8dc 1cfdf714 43b964ee f089159e 25c83c98 3bf701e7 c86e8c6b 37e4aa92 7cc72ec2 eab78bab 4d8549da 031b2b3a 51b97b8f 051219e6 af56328b e162466d e5ba7672 e88ffc9d 5b885066 a458ea53 9f7d1d43 ad3062eb 3a171ecb b15e807d e8b83407 fcefd6a4
1 39 2 8 5 1202 22 42 12 179 1 2 1 13 05db9164 942f9a8d 8658d326 2b884b66 4cf72387 7e0ccccf 3f4ec687 0b153874 a73ee510 0e9ead52 c4adf918 cf9c76af 85dbe138 07d13a8f a8e962af 12ff41b8 3486227d 1f868fdd 1d04f4a4 a458ea53 9e55b62d ad3062eb 32c7478e 3fdb382b 9d93af03 49d68486
0 -1 24422 11 18 0 8 1 05db9164 9e5ce894 02391f51 b9c629a9 25c83c98 3bf701e7 22fd2464 0b153874 a73ee510 5aca218f d9085127 2397259a ef7e2c01 07d13a8f 8cf98699 d37efe8c e5ba7672 a5bb7b8a 21ddcdc9 5840adea b6119319 32c7478e 45ab94c8 ea9a246c b13f4ade
0 1 8 4936 0 0 15 0 05db9164 ea3a5818 e33cc329 7a5aa046 25c83c98 fbad5c96 4a45f6c5 0b153874 a73ee510 fe01516c 2a0b79f8 aaa493f6 25512dff b28479f6 0a069322 ba35244c e5ba7672 a1d0cc4f 21ddcdc9 a458ea53 c1a3607e c7dc6720 a7084d70 1575c75f ef4df1dd
0 124 1 2648 0 4 13 0 05db9164 38a947a1 7b9e7a93 49afffac 25c83c98 7e0ccccf bddc9773 0b153874 a73ee510 3b08e48b ff2333c8 5f12b145 140595a0 b28479f6 7c5bcff3 e699400f d4bb7bd8 876521e0 6d6ae2d8 32c7478e b258af68
1 4 -1 282 18 4 15 15 1 1 68fd1e64 38a947a1 840eeb3a f7263320 25c83c98 7e0ccccf 44fb02c7 6c41e35e a73ee510 3b08e48b 2386466b 317bfd7d 45db6793 07d13a8f 6f1ab4eb 1689e4de e5ba7672 5d961bca dc55d6df 3a171ecb aa0115d2
1 -1 14834 111 6 0 204 2 68fd1e64 08d6d899 6a8a1217 14bfebf4 25c83c98 7e0ccccf 9e0ed189 0b153874 a73ee510 f68bc089 c3e44774 5b355b50 c278016c 64c94865 a8e4fe6e 0ded9094 e5ba7672 9dde83ca 831d5286 32c7478e 9e9a60e4
0 3 276 8 11 10 11 3 26 11 1 1 0 11 5e53cc38 b26462db b6025941 06b1cf6e 4cf72387 13718bbd 65ae2219 0b153874 a73ee510 fbbf2c95 447a6784 72e65cea 9be66b48 cfef1c29 fc8350a5 25b075e4 07c540c4 35ee3e9e ad6ee353 3a171ecb 0ff91809
0 -1 11615 30 1 0 28 1 5a9ed9b0 95e2d337 086df0da 6262590b 4cf72387 7e0ccccf 72cf945c 0b153874 a73ee510 ef2fbb20 7b61aa9b 547c3f98 7f5bf282 07d13a8f 4e505ea3 7ac9f411 d4bb7bd8 7b06fafe 21ddcdc9 a458ea53 29ac833e 32c7478e 7c28ef9f 2bf691b1 b288bc0b
0 0 8 306565 0 14 8 0 10 05db9164 38a947a1 4470baf4 8c8a4c47 25c83c98 7e0ccccf 2e85de94 0b153874 7cc72ec2 3b08e48b 8d6d03a0 bb669e25 86c652c6 b28479f6 091737ad 2b2ce127 776ce399 ade68c22 2b796e4a ad3062eb be7c41b4 8d365d3b
0 0 1992 2 1451 31 1 20 31 0 1 2 be589b51 d833535f b00d1501 d16679b9 25c83c98 fbad5c96 9a75d128 0b153874 a73ee510 3b08e48b 90bf7fef e0d76380 a70d1580 b28479f6 a733d362 1203a270 e5ba7672 281769c2 73d06dde c9d4222a 32c7478e aee52b6f
0 2 1 8 13233 164 13 8 88 7 0 8 05db9164 13f25995 0b0f3952 35d9e6fe 25c83c98 7e0ccccf 87e29668 0b153874 a73ee510 3b08e48b 0bc0e6ed ff8c6fd9 abd69a9d 07d13a8f 7cad642c 5015d391 8efede7f c7cf2414 3db17de9 32c7478e 4fe18e82
0 50 15 6661 0 40 49 0 21 5bfa8ab5 08d6d899 77f2f2e5 d16679b9 25c83c98 7e0ccccf 7f2c5a6e a61cc0ef a73ee510 3b08e48b d21494f8 9f32b866 f47f13e4 b28479f6 bffbd637 31ca40b6 1e88c74f bbf70d82 dfcfc3fa c9d4222a 32c7478e aee52b6f
1 0 13 2 10320 72 0 2 45 0 2 05db9164 09e68b86 e95580ff 653ee14f 25c83c98 fe6b92e5 26a81064 5b392875 a73ee510 dcbc7c2b 9e511730 49a381fa 04e4a7e0 1adce6ef dbc5e126 cf6ed269 d4bb7bd8 5aed7436 21ddcdc9 a458ea53 c9fcf5fd 3a171ecb 5e22c595 e8b83407 8e27cf04
0 -1 11066 0 0 1 0 05db9164 38a947a1 a64c7bd9 67a8407c 25c83c98 fe6b92e5 71fd6dcd 0b153874 a73ee510 3b08e48b e5cd3d61 0a8d756f 08ba5c35 b28479f6 b7815e37 ef7d43b0 776ce399 a6bfeb0a 455f53fb 93bad2c0 928e948f
0 107 8939 0 1 2 0 05db9164 a244fe99 25c83c98 7e0ccccf c8e48a82 0b153874 a73ee510 c6c8dd7c ae4c531b 01c2bbc7 07d13a8f 2f5df569 d4bb7bd8 35901cfb ad3062eb 423fab69
0 0 2 15 12 2504 365 1 10 77 0 1 12 68fd1e64 08d6d899 03942b3f afe92929 25c83c98 7e0ccccf f4b9d7ad 0b153874 a73ee510 663eefea c1ee56d0 e977ae2f ebd756bd 07d13a8f 1a277242 82f06a35 d4bb7bd8 87c6f83c 08119c8b 55dd3565 f96a556f
1 13 2153 1 25 37 3 13 9 29 2 2 3 68fd1e64 4f25e98b de211a17 a8925441 25c83c98 5e64ce5f 1f89b562 a73ee510 be630248 8b94178b fcaae253 025225f2 b28479f6 8ab5b746 3b58b07a e5ba7672 7ef5affa 9437f62f b1252a9d ce247dc1 32c7478e 3fdb382b 001f3601 0fd820a6
1 37 72 2 3 4 2 49 42 222 1 5 2 05db9164 3f0d3f28 d73310fa b40012b1 4cf72387 fbad5c96 ad3508b1 0b153874 a73ee510 08658f3b ad757a5a 0e466d8f 93b18cb5 32813e21 3440b690 f4219d4b e5ba7672 7da064fc 0471db05 ad3062eb c7dc6720 e5fca70a
0 0 5 11541 0 0 7 0 05db9164 89ddfee8 15d7420a ff441594 25c83c98 7e0ccccf bdaf7920 0b153874 a73ee510 fbbf2c95 4c074d2a 5f27bc59 f948ca5d 051219e6 d5223973 e2b64862 1e88c74f 5bb2ec8e 0053530c a458ea53 2f4978df 32c7478e 75c8ca05 f0f449dd d21d0b82
0 15 2 2 87297 0 3 23 0 3 05db9164 a8b6b751 3e67fbbb 10056215 25c83c98 7e0ccccf d9aa9d97 5b392875 7cc72ec2 3b08e48b c4adf918 d9f32d8d 85dbe138 b28479f6 694e45e3 345db5a2 776ce399 d787f192 21ddcdc9 5840adea 7463465b ad3062eb 32c7478e 3d236c54 001f3601 984e0db0
0 30 1 12 5 11 5 608 19 286 1 47 1 5 05db9164 89ddfee8 ab2fe4c8 428cff52 43b19349 3bf701e7 407438c8 1f89b562 a73ee510 0a164266 755e4a50 3989acff 5978055e b28479f6 25753fb1 cf445916 8efede7f 5bb2ec8e 21ddcdc9 b1252a9d d64ee25a 78e2e389 32c7478e 0b351a52 e8b83407 b1c17344
1 5 7 2 2 414 21 83 33 925 1 36 2 68fd1e64 421b43cd 06ded108 29998ed1 43b19349 7e0ccccf 4aa938fc 5b392875 a73ee510 03ed27e7 2b9c7071 6aaba33c 1aa94af3 b28479f6 2d0bb053 b041b04a e5ba7672 2804effd 723b4dfd c9d4222a 3a171ecb b34f3128
0 1 6 21905 0 15 49 0 6 05db9164 62e9e9bf 91c52fd6 89085a81 43b19349 fe6b92e5 e88f1cec 45f7c2dd a73ee510 3b08e48b 8f410860 5ad710aa b8eec0b1 cfef1c29 9a7936cb 9decb3fe 776ce399 d2651d6e c7d10c5e be7c41b4 6f90ebe1
0 0 174 5 14718 10 0 5 5a9ed9b0 2fe85f57 b61789da 230aba50 25c83c98 fe6b92e5 3a6d4c08 0b153874 a73ee510 d108fc83 41656eae 24604d0c 66815d59 07d13a8f d8524628 78d9f0d0 e5ba7672 f4373605 ab303097 c9d4222a 32c7478e fab2a151
0 0 7 1 15780 12 6 1 1 1 1 05db9164 8ab240be cedcacac 7967fcf5 25c83c98 7e0ccccf 5f29da0e 0b153874 a73ee510 f476fbe3 0ad37b4b 553e02c3 f9d99d81 1adce6ef 28883800 91a6eec5 1e88c74f ca533012 21ddcdc9 5840adea a97b62ca 423fab69 727a7cc7 445bbe3b 6935065e
0 0 2 1 1540 44 4 4 268 0 4 5 05db9164 68b3edbf 77f2f2e5 d16679b9 25c83c98 7e0ccccf fcf0132a 1f89b562 a73ee510 aed3d80e d650f1bd 9f32b866 863f8f8a b28479f6 f511c49f 31ca40b6 e5ba7672 752d8b8a dfcfc3fa c7dc6720 aee52b6f
0 7 31 1 239 1 8 9 49 1 2 0 1 68fd1e64 8084ee93 d032c263 c18be181 43b19349 fe6b92e5 cee47266 0b153874 a73ee510 14781fa9 87fe3e10 dfbb09fb 3bd6c21d b28479f6 16d2748c 84898b2a 27c07bd6 003d4f4f 0014c32a 32c7478e 3b183c5c
0 -1 12674 4 26 0 73 2 05db9164 09e68b86 eecaacb9 d268ac84 25c83c98 13718bbd 33cca6fa 0b153874 a73ee510 401ced54 683e14e9 ce76d69d 2b9fb512 b28479f6 52baadf5 7bf10350 e5ba7672 5aed7436 55dd3565 b1252a9d 3d7cfd1b 3a171ecb 3fdb382b 3d2bedd7 49d68486
0 259 4 103468 0 0 14 0 05db9164 8947f767 d8ec4c68 ac1667dd 4cf72387 7e0ccccf 3527bb7c 0b153874 7cc72ec2 3b08e48b 2b9f131d 2a63b3ee aca10c14 07d13a8f 2c14c412 11b43c2e 8efede7f bd17c3da 21ddcdc9 a458ea53 79a05ba5 32c7478e 4fb9fee0 010f6491 004f1180
1 3 145 4 108 6 4 4 31 1 2 4 8cf07265 6c2cbbdc a42bd759 8b3b6b2e 25c83c98 f00bddf8 062b5529 a73ee510 0d538fca 55795b33 6bb7b021 39795005 64c94865 af094307 c3815fe3 e5ba7672 fb299884 987d0b7a 32c7478e 145ae095
1 147 1 159966 0 1 1 0 1 68fd1e64 38d50e09 c86b2d8d 657dc3b9 25c83c98 7e0ccccf bc324536 1f89b562 7cc72ec2 474773a7 2bcfb78f 1ca7a526 e6fc496d b28479f6 06373944 ba46c3a1 e5ba7672 fffe2a63 21ddcdc9 b1252a9d eb0fc6f8 ad3062eb 32c7478e df487a73 001f3601 c27f155b
\ No newline at end of file
...@@ -25,43 +25,9 @@ class Model(ModelBase): ...@@ -25,43 +25,9 @@ class Model(ModelBase):
ModelBase.__init__(self, config) ModelBase.__init__(self, config)
def input(self): def input(self):
def sparse_inputs(): self.sparse_inputs = self._sparse_data_var[1:]
ids = envs.get_global_env("hyper_parameters.sparse_inputs_slots", None, self._namespace) self.dense_input = self._dense_data_var[0]
self.label_input = self._sparse_data_var[0]
sparse_input_ids = [
fluid.layers.data(name="S" + str(i),
shape=[1],
lod_level=1,
dtype="int64") for i in range(1, ids)
]
return sparse_input_ids
def dense_input():
dim = envs.get_global_env("hyper_parameters.dense_input_dim", None, self._namespace)
dense_input_var = fluid.layers.data(name="D",
shape=[dim],
dtype="float32")
return dense_input_var
def label_input():
label = fluid.layers.data(name="click", shape=[1], dtype="int64")
return label
self.sparse_inputs = sparse_inputs()
self.dense_input = dense_input()
self.label_input = label_input()
self._data_var.append(self.dense_input)
for input in self.sparse_inputs:
self._data_var.append(input)
self._data_var.append(self.label_input)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
def net(self): def net(self):
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
...@@ -122,6 +88,7 @@ class Model(ModelBase): ...@@ -122,6 +88,7 @@ class Model(ModelBase):
self._metrics["BATCH_AUC"] = batch_auc self._metrics["BATCH_AUC"] = batch_auc
def train_net(self): def train_net(self):
self.model._init_slots()
self.input() self.input()
self.net() self.net()
self.avg_loss() self.avg_loss()
...@@ -133,5 +100,6 @@ class Model(ModelBase): ...@@ -133,5 +100,6 @@ class Model(ModelBase):
return optimizer return optimizer
def infer_net(self): def infer_net(self):
self.model._init_slots()
self.input() self.input()
self.net() self.net()
...@@ -59,6 +59,11 @@ ...@@ -59,6 +59,11 @@
## 使用教程 ## 使用教程
### 数据处理 ### 数据处理
参考每个模型目录数据下载&预处理脚本 参考每个模型目录数据下载&预处理脚本
```
sh run.sh
```
### 训练 ### 训练
``` ```
python -m paddlerec.run -m paddlerec.models.rank.dnn # 以DNN为例 python -m paddlerec.run -m paddlerec.models.rank.dnn # 以DNN为例
......
...@@ -22,8 +22,9 @@ train: ...@@ -22,8 +22,9 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
class: "{workspace}/reader.py" train_data_path: "{workspace}/data/slot_train_data"
train_data_path: "{workspace}/data/train_data" sparse_slots: "label"
dense_slots: "wide_input:8 deep_input:58"
model: model:
models: "{workspace}/model.py" models: "{workspace}/model.py"
......
mkdir train_data mkdir train_data
mkdir test_data mkdir test_data
mkdir data train_path="adult.data"
train_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/data/adult.data" test_path="adult.test"
test_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/data/adult.test" train_data_path="./train_data/train_data.csv"
train_data_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/train_data/train_data.csv" test_data_path="./test_data/test_data.csv"
test_data_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/test_data/test_data.csv"
#pip install -r requirements.txt pip install -r requirements.txt
#wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data
#wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test
python data_preparation.py --train_path ${train_path} \ python data_preparation.py --train_path ${train_path} \
--test_path ${test_path} \ --test_path ${test_path} \
......
import os
import io
import args
import pandas as pd
from sklearn import preprocessing
def _clean_file(source_path,target_path):
"""makes changes to match the CSV format."""
with io.open(source_path, 'r') as temp_eval_file:
with io.open(target_path, 'w') as eval_file:
for line in temp_eval_file:
line = line.strip()
line = line.replace(', ', ',')
if not line or ',' not in line:
continue
if line[-1] == '.':
line = line[:-1]
line += '\n'
eval_file.write(line)
def build_model_columns(train_data_path, test_data_path):
# The column names are from
# https://www2.1010data.com/documentationcenter/prod/Tutorials/MachineLearningExamples/CensusIncomeDataSet.html
column_names = [
'age', 'workclass', 'fnlwgt', 'education', 'education_num',
'marital_status', 'occupation', 'relationship', 'race', 'gender',
'capital_gain', 'capital_loss', 'hours_per_week', 'native_country',
'income_bracket'
]
# Load the dataset in Pandas
train_df = pd.read_csv(
train_data_path,
delimiter=',',
header=None,
index_col=None,
names=column_names)
test_df = pd.read_csv(
test_data_path,
delimiter=',',
header=None,
index_col=None,
names=column_names)
# First group of tasks according to the paper
#label_columns = ['income_50k', 'marital_stat']
categorical_columns = ['education','marital_status','relationship','workclass','occupation']
for col in categorical_columns:
label_train = preprocessing.LabelEncoder()
train_df[col]= label_train.fit_transform(train_df[col])
label_test = preprocessing.LabelEncoder()
test_df[col]= label_test.fit_transform(test_df[col])
bins = [18, 25, 30, 35, 40, 45, 50, 55, 60, 65]
train_df['age_buckets'] = pd.cut(train_df['age'].values.tolist(), bins,labels=False)
test_df['age_buckets'] = pd.cut(test_df['age'].values.tolist(), bins,labels=False)
base_columns = ['education', 'marital_status', 'relationship', 'workclass', 'occupation', 'age_buckets']
train_df['education_occupation'] = train_df['education'].astype(str) + '_' + train_df['occupation'].astype(str)
test_df['education_occupation'] = test_df['education'].astype(str) + '_' + test_df['occupation'].astype(str)
train_df['age_buckets_education_occupation'] = train_df['age_buckets'].astype(str) + '_' + train_df['education'].astype(str) + '_' + train_df['occupation'].astype(str)
test_df['age_buckets_education_occupation'] = test_df['age_buckets'].astype(str) + '_' + test_df['education'].astype(str) + '_' + test_df['occupation'].astype(str)
crossed_columns = ['education_occupation','age_buckets_education_occupation']
for col in crossed_columns:
label_train = preprocessing.LabelEncoder()
train_df[col]= label_train.fit_transform(train_df[col])
label_test = preprocessing.LabelEncoder()
test_df[col]= label_test.fit_transform(test_df[col])
wide_columns = base_columns + crossed_columns
train_df_temp = pd.get_dummies(train_df[categorical_columns],columns=categorical_columns)
test_df_temp = pd.get_dummies(test_df[categorical_columns], columns=categorical_columns)
train_df = train_df.join(train_df_temp)
test_df = test_df.join(test_df_temp)
deep_columns = list(train_df_temp.columns)+ ['age','education_num','capital_gain','capital_loss','hours_per_week']
train_df['label'] = train_df['income_bracket'].apply(lambda x : 1 if x == '>50K' else 0)
test_df['label'] = test_df['income_bracket'].apply(lambda x : 1 if x == '>50K' else 0)
with io.open('train_data/columns.txt','w') as f:
write_str = str(len(wide_columns)) + '\n' + str(len(deep_columns)) + '\n'
f.write(write_str)
f.close()
with io.open('test_data/columns.txt','w') as f:
write_str = str(len(wide_columns)) + '\n' + str(len(deep_columns)) + '\n'
f.write(write_str)
f.close()
train_df[wide_columns + deep_columns + ['label']].fillna(0).to_csv(train_data_path,index=False)
test_df[wide_columns + deep_columns + ['label']].fillna(0).to_csv(test_data_path,index=False)
def clean_file(train_path, test_path, train_data_path, test_data_path):
_clean_file(train_path, train_data_path)
_clean_file(test_path, test_data_path)
if __name__ == '__main__':
args = args.parse_args()
clean_file(args.train_path, args.test_path, args.train_data_path, args.test_data_path)
build_model_columns(args.train_data_path, args.test_data_path)
...@@ -11,18 +11,25 @@ ...@@ -11,18 +11,25 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import yaml
from __future__ import print_function from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
import paddle.fluid.incubate.data_generator as dg
from paddlerec.core.reader import Reader class TrainReader(dg.MultiSlotDataGenerator):
def __init__(self, config):
dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
class TrainReader(Reader):
def init(self): def init(self):
pass pass
...@@ -41,6 +48,18 @@ class TrainReader(Reader): ...@@ -41,6 +48,18 @@ class TrainReader(Reader):
def data_iter(): def data_iter():
wide_feat, deep_deat, label = self._process_line(line) wide_feat, deep_deat, label = self._process_line(line)
yield [('wide_input', wide_feat), ('deep_input', deep_deat), ('label', label)]
s = ""
for i in [('wide_input', wide_feat), ('deep_input', deep_deat), ('label', label)]:
k = i[0]
v = i[1]
for j in v:
s += " " + k + ":" + str(j)
print s.strip()
yield None
return data_iter return data_iter
reader = TrainReader("../config.yaml")
reader.init()
reader.run_from_stdin()
sh create_data.sh
mkdir slot_train_data
for i in `ls ./train_data`
do
cat train_data/$i | python get_slot_data.py > slot_train_data/$i
done
mkdir slot_test_data
for i in `ls ./test_data`
do
cat test_data/$i | python get_slot_data.py > slot_test_data/$i
done
...@@ -57,12 +57,10 @@ class Model(ModelBase): ...@@ -57,12 +57,10 @@ class Model(ModelBase):
return l3 return l3
def train_net(self): def train_net(self):
wide_input = fluid.data(name='wide_input', shape=[None, 8], dtype='float32') self.model._init_slots()
deep_input = fluid.data(name='deep_input', shape=[None, 58], dtype='float32') wide_input = self._dense_data_var[0]
label = fluid.data(name='label', shape=[None, 1], dtype='float32') deep_input = self._dense_data_var[1]
self._data_var.append(wide_input) label = self._sparse_data_var[0]
self._data_var.append(deep_input)
self._data_var.append(label)
hidden1_units = envs.get_global_env("hyper_parameters.hidden1_units", 75, self._namespace) hidden1_units = envs.get_global_env("hyper_parameters.hidden1_units", 75, self._namespace)
hidden2_units = envs.get_global_env("hyper_parameters.hidden2_units", 50, self._namespace) hidden2_units = envs.get_global_env("hyper_parameters.hidden2_units", 50, self._namespace)
...@@ -95,7 +93,7 @@ class Model(ModelBase): ...@@ -95,7 +93,7 @@ class Model(ModelBase):
self._metrics["BATCH_AUC"] = batch_auc self._metrics["BATCH_AUC"] = batch_auc
self._metrics["ACC"] = acc self._metrics["ACC"] = acc
cost = fluid.layers.sigmoid_cross_entropy_with_logits(x=prediction, label=label) cost = fluid.layers.sigmoid_cross_entropy_with_logits(x=prediction, label=fluid.layers.cast(label, dtype='float32'))
avg_cost = fluid.layers.mean(cost) avg_cost = fluid.layers.mean(cost)
self._cost = avg_cost self._cost = avg_cost
...@@ -105,4 +103,5 @@ class Model(ModelBase): ...@@ -105,4 +103,5 @@ class Model(ModelBase):
return optimizer return optimizer
def infer_net(self, parameter_list): def infer_net(self, parameter_list):
self.model._init_slots()
self.deepfm_net() self.deepfm_net()
...@@ -22,8 +22,9 @@ train: ...@@ -22,8 +22,9 @@ train:
reader: reader:
batch_size: 2 batch_size: 2
class: "{workspace}/criteo_reader.py" train_data_path: "{workspace}/data/slot_train_data"
train_data_path: "{workspace}/data/train_data" sparse_slots: "label feat_idx"
dense_slots: "feat_value:39"
model: model:
models: "{workspace}/model.py" models: "{workspace}/model.py"
......
...@@ -12,17 +12,24 @@ ...@@ -12,17 +12,24 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import print_function import yaml
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
import paddle.fluid.incubate.data_generator as dg
from paddlerec.core.reader import Reader class TrainReader(dg.MultiSlotDataGenerator):
def __init__(self, config):
dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
class TrainReader(Reader):
def init(self): def init(self):
pass pass
...@@ -39,7 +46,18 @@ class TrainReader(Reader): ...@@ -39,7 +46,18 @@ class TrainReader(Reader):
def generate_sample(self, line): def generate_sample(self, line):
def data_iter(): def data_iter():
feat_idx, feat_value, label = self._process_line(line) feat_idx, feat_value, label = self._process_line(line)
yield [('feat_idx', feat_idx), ('feat_value', feat_value), ('label',
label)] s = ""
for i in [('feat_idx', feat_idx), ('feat_value', feat_value), ('label', label)]:
k = i[0]
v = i[1]
for j in v:
s += " " + k + ":" + str(j)
print s.strip()
yield None
return data_iter return data_iter
reader = TrainReader("../config.yaml")
reader.init()
reader.run_from_stdin()
python download.py
mkdir -p slot_train_data/tr
for i in `ls ./train_data/tr`
do
cat train_data/tr/$i | python get_slot_data.py > slot_train_data/tr/$i
done
mkdir slot_test_data/ev
for i in `ls ./test_data/ev`
do
cat test_data/ev/$i | python get_slot_data.py > slot_test_data/ev/$i
done
...@@ -34,10 +34,11 @@ class Model(ModelBase): ...@@ -34,10 +34,11 @@ class Model(ModelBase):
# ------------------------- network input -------------------------- # ------------------------- network input --------------------------
num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace) num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace)
raw_feat_idx = fluid.data(name='feat_idx', shape=[None, num_field], dtype='int64') raw_feat_idx = self._sparse_data_var[1]
raw_feat_value = fluid.data(name='feat_value', shape=[None, num_field], dtype='float32') raw_feat_value = self._dense_data_var[0]
self.label = fluid.data(name='label', shape=[None, 1], dtype='float32') # None * 1 self.label = self._sparse_data_var[0]
feat_idx = fluid.layers.reshape(raw_feat_idx, [-1, 1]) # (None * num_field) * 1
feat_idx = raw_feat_idx
feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1 feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1
feat_embeddings = fluid.embedding( feat_embeddings = fluid.embedding(
...@@ -52,15 +53,6 @@ class Model(ModelBase): ...@@ -52,15 +53,6 @@ class Model(ModelBase):
[-1, num_field, sparse_feature_dim]) # None * num_field * embedding_size [-1, num_field, sparse_feature_dim]) # None * num_field * embedding_size
feat_embeddings = feat_embeddings * feat_value # None * num_field * embedding_size feat_embeddings = feat_embeddings * feat_value # None * num_field * embedding_size
# ------------------------- set _data_var --------------------------
self._data_var.append(raw_feat_idx)
self._data_var.append(raw_feat_value)
self._data_var.append(self.label)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
# -------------------- linear -------------------- # -------------------- linear --------------------
weights_linear = fluid.embedding( weights_linear = fluid.embedding(
...@@ -153,9 +145,10 @@ class Model(ModelBase): ...@@ -153,9 +145,10 @@ class Model(ModelBase):
self.predict = fluid.layers.sigmoid(y_linear + y_cin + y_dnn) self.predict = fluid.layers.sigmoid(y_linear + y_cin + y_dnn)
def train_net(self): def train_net(self):
self.model._init_slots()
self.xdeepfm_net() self.xdeepfm_net()
cost = fluid.layers.log_loss(input=self.predict, label=self.label, epsilon=0.0000001) cost = fluid.layers.log_loss(input=self.predict, label=fluid.layers.cast(self.label, "float32"), epsilon=0.0000001)
batch_cost = fluid.layers.reduce_mean(cost) batch_cost = fluid.layers.reduce_mean(cost)
self._cost = batch_cost self._cost = batch_cost
...@@ -174,4 +167,5 @@ class Model(ModelBase): ...@@ -174,4 +167,5 @@ class Model(ModelBase):
return optimizer return optimizer
def infer_net(self, parameter_list): def infer_net(self, parameter_list):
self.model._init_slots()
self.xdeepfm_net() self.xdeepfm_net()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册