未验证 提交 8a9d286c 编写于 作者: W wuzhihua 提交者: GitHub

Merge branch 'master' into doc_v7

......@@ -149,11 +149,13 @@ class Model(object):
return optimizer_i
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
None, self._namespace)
optimizer = envs.get_global_env("hyper_parameters.optimizer", None,
self._namespace)
return self._build_optimizer(optimizer, learning_rate)
opt_name = envs.get_global_env("hyper_parameters.optimizer.class")
opt_lr = envs.get_global_env(
"hyper_parameters.optimizer.learning_rate")
opt_strategy = envs.get_global_env(
"hyper_parameters.optimizer.strategy")
return self._build_optimizer(opt_name, opt_lr, opt_strategy)
def input_data(self, is_infer=False, **kwargs):
name = "dataset." + kwargs.get("dataset_name") + "."
......
......@@ -167,6 +167,7 @@ class SingleInfer(TranspileTrainer):
model = envs.lazy_instance_by_fliename(
model_path, "Model")(self._env)
model._infer_data_var = model.input_data(
is_infer=True,
dataset_name=model_dict["dataset_name"])
if envs.get_global_env("dataset." + dataset_name +
".type") == "DataLoader":
......
......@@ -147,11 +147,6 @@ class SingleTrainer(TranspileTrainer):
startup_program = fluid.Program()
scope = fluid.Scope()
dataset_name = model_dict["dataset_name"]
opt_name = envs.get_global_env("hyper_parameters.optimizer.class")
opt_lr = envs.get_global_env(
"hyper_parameters.optimizer.learning_rate")
opt_strategy = envs.get_global_env(
"hyper_parameters.optimizer.strategy")
with fluid.program_guard(train_program, startup_program):
with fluid.unique_name.guard():
with fluid.scope_guard(scope):
......@@ -168,8 +163,7 @@ class SingleTrainer(TranspileTrainer):
self._get_dataloader(dataset_name,
model._data_loader)
model.net(model._data_var, False)
optimizer = model._build_optimizer(opt_name, opt_lr,
opt_strategy)
optimizer = model.optimizer()
optimizer.minimize(model._cost)
self._model[model_dict["name"]][0] = train_program
self._model[model_dict["name"]][1] = startup_program
......@@ -234,10 +228,12 @@ class SingleTrainer(TranspileTrainer):
scope = self._model[model_name][2]
program = self._model[model_name][0]
reader = self._dataset[reader_name]
threads = model_dict.get("thread_num", 1)
with fluid.scope_guard(scope):
self._exe.train_from_dataset(
program=program,
dataset=reader,
thread=threads,
fetch_list=fetch_vars,
fetch_info=fetch_alias,
print_period=fetch_period)
......@@ -247,8 +243,23 @@ class SingleTrainer(TranspileTrainer):
model_name = model_dict["name"]
model_class = self._model[model_name][3]
program = self._model[model_name][0].clone()
_build_strategy = fluid.BuildStrategy()
_exe_strategy = fluid.ExecutionStrategy()
# 0: kCoeffNumDevice; 1: One; 2: Customized
_build_strategy.gradient_scale_strategy = model_dict.get(
"gradient_scale_strategy", 0)
if "thread_num" in model_dict and model_dict["thread_num"] > 1:
_build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.Reduce
_exe_strategy.num_threads = model_dict["thread_num"]
os.environ['CPU_NUM'] = str(_exe_strategy.num_threads)
program = fluid.compiler.CompiledProgram(program).with_data_parallel(
loss_name=model_class.get_avg_cost().name)
loss_name=model_class.get_avg_cost().name,
build_strategy=_build_strategy,
exec_strategy=_exe_strategy)
fetch_vars = []
fetch_alias = []
fetch_period = int(
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
......@@ -12,28 +12,37 @@
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
workspace: "paddlerec.models.contentunderstanding.classification"
epochs: 10
workspace: "paddlerec.models.contentunderstanding.classification"
dataset:
- name: data1
batch_size: 5
type: DataLoader
data_path: "{workspace}/data/train_data"
data_converter: "{workspace}/reader.py"
hyper_parameters:
optimizer:
class: Adagrad
learning_rate: 0.001
is_sparse: False
reader:
batch_size: 5
class: "{workspace}/reader.py"
train_data_path: "{workspace}/train_data"
mode: runner1
model:
models: "{workspace}/model.py"
runner:
- name: runner1
class: single_train
epochs: 10
device: cpu
save_checkpoint_interval: 2
save_inference_interval: 4
save_checkpoint_path: "increment"
save_inference_path: "inference"
save_inference_feed_varnames: []
save_inference_fetch_varnames: []
save:
increment:
dirname: "increment"
epoch_interval: 1
save_last: True
inference:
dirname: "inference"
epoch_interval: 100
save_last: True
phase:
- name: phase1
model: "{workspace}/model.py"
dataset_name: data1
thread_num: 1
......@@ -27,19 +27,27 @@ class Model(ModelBase):
self.emb_dim = 8
self.hid_dim = 128
self.class_dim = 2
self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse",
False)
def train_net(self):
""" network definition """
def input_data(self, is_infer=False, **kwargs):
data = fluid.data(
name="input", shape=[None, self.max_len], dtype='int64')
label = fluid.data(name="label", shape=[None, 1], dtype='int64')
seq_len = fluid.data(name="seq_len", shape=[None], dtype='int64')
return [data, label, seq_len]
self._data_var = [data, label, seq_len]
def net(self, input, is_infer=False):
""" network definition """
data = input[0]
label = input[1]
seq_len = input[2]
# embedding layer
emb = fluid.embedding(input=data, size=[self.dict_dim, self.emb_dim])
emb = fluid.embedding(
input=data,
size=[self.dict_dim, self.emb_dim],
is_sparse=self.is_sparse)
emb = fluid.layers.sequence_unpad(emb, length=seq_len)
# convolution layer
conv = fluid.nets.sequence_conv_pool(
......@@ -59,19 +67,8 @@ class Model(ModelBase):
avg_cost = fluid.layers.mean(x=cost)
acc = fluid.layers.accuracy(input=prediction, label=label)
self.cost = avg_cost
self._metrics["acc"] = acc
def get_avg_cost(self):
return self.cost
def get_metrics(self):
return self._metrics
def optimizer(self):
learning_rate = 0.01
sgd_optimizer = fluid.optimizer.Adagrad(learning_rate=learning_rate)
return sgd_optimizer
def infer_net(self):
self.train_net()
self._cost = avg_cost
if is_infer:
self._infer_results["acc"] = acc
else:
self._metrics["acc"] = acc
......@@ -22,7 +22,7 @@ class TrainReader(Reader):
pass
def _process_line(self, l):
l = l.strip().split(" ")
l = l.strip().split()
data = l[0:10]
seq_len = l[10:11]
label = l[11:]
......@@ -37,8 +37,6 @@ class TrainReader(Reader):
data = [int(i) for i in data]
label = [int(i) for i in label]
seq_len = [int(i) for i in seq_len]
print >> sys.stderr, str(
[('data', data), ('label', label), ('seq_len', seq_len)])
yield [('data', data), ('label', label), ('seq_len', seq_len)]
return data_iter
......@@ -37,7 +37,18 @@
<img align="center" src="../../doc/imgs/cnn-ckim2014.png">
<p>
## 使用教程
##使用教程(快速开始)
```
python -m paddlerec.run -m paddlerec.models.contentunderstanding.tagspace
python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
```
## 使用教程(复现论文)
###注意
为了方便使用者能够快速的跑通每一个模型,我们在每个模型下都提供了样例数据。如果需要复现readme中的效果请使用以下提供的脚本下载对应数据集以及数据预处理。
### 数据处理
**(1)TagSpace**
......@@ -64,20 +75,42 @@ mv test.csv raw_big_test_data
python text2paddle.py raw_big_train_data/ raw_big_test_data/ train_big_data test_big_data big_vocab_text.txt big_vocab_tag.txt
```
**(2)Classification**
### 训练
```
cd modles/contentunderstanding/tagspace
python -m paddlerec.run -m ./config.yaml # 自定义修改超参后,指定配置文件,使用自定义配置
```
### 预测
```
# 修改对应模型的config.yaml, workspace配置为当前目录的绝对路径
# 修改对应模型的config.yaml,mode配置infer_runner
# 示例: mode: train_runner -> mode: infer_runner
# infer_runner中 class配置为 class: single_infer
# 修改phase阶段为infer的配置,参照config注释
# 修改完config.yaml后 执行:
python -m paddlerec.run -m ./config.yaml
```
### 训练
**(2)Classification**
### 训练
```
python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
cd modles/contentunderstanding/classification
python -m paddlerec.run -m ./config.yaml # 自定义修改超参后,指定配置文件,使用自定义配置
```
### 预测
```
python -m paddlerec.run -m paddlerec.models.contentunderstanding.classification
# 修改对应模型的config.yaml, workspace配置为当前目录的绝对路径
# 修改对应模型的config.yaml,mode配置infer_runner
# 示例: mode: train_runner -> mode: infer_runner
# infer_runner中 class配置为 class: single_infer
# 修改phase阶段为infer的配置,参照config注释
# 修改完config.yaml后 执行:
python -m paddlerec.run -m ./config.yaml
```
## 效果对比
......
......@@ -12,38 +12,44 @@
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
workspace: "paddlerec.models.contentunderstanding.tagspace"
epochs: 10
workspace: "paddlerec.models.contentunderstanding.tagspace"
dataset:
- name: sample_1
type: QueueDataset
batch_size: 5
data_path: "{workspace}/data/train_data"
data_converter: "{workspace}/reader.py"
reader:
batch_size: 5
class: "{workspace}/reader.py"
train_data_path: "{workspace}/train_data"
hyper_parameters:
optimizer:
class: Adagrad
learning_rate: 0.001
vocab_text_size: 11447
vocab_tag_size: 4
emb_dim: 10
hid_dim: 1000
win_size: 5
margin: 0.1
neg_size: 3
num_devices: 1
model:
models: "{workspace}/model.py"
hyper_parameters:
vocab_text_size: 11447
vocab_tag_size: 4
emb_dim: 10
hid_dim: 1000
win_size: 5
margin: 0.1
neg_size: 3
num_devices: 1
mode: runner1
runner:
- name: runner1
class: single_train
epochs: 10
device: cpu
save_checkpoint_interval: 2
save_inference_interval: 4
save_checkpoint_path: "increment"
save_inference_path: "inference"
save_inference_feed_varnames: []
save_inference_fetch_varnames: []
save:
increment:
dirname: "increment"
epoch_interval: 1
save_last: True
inference:
dirname: "inference"
epoch_interval: 100
save_last: True
phase:
- name: phase1
model: "{workspace}/model.py"
dataset_name: sample_1
thread_num: 1
......@@ -26,26 +26,30 @@ class Model(ModelBase):
ModelBase.__init__(self, config)
self.cost = None
self.metrics = {}
self.vocab_text_size = envs.get_global_env("vocab_text_size", None,
self._namespace)
self.vocab_tag_size = envs.get_global_env("vocab_tag_size", None,
self._namespace)
self.emb_dim = envs.get_global_env("emb_dim", None, self._namespace)
self.hid_dim = envs.get_global_env("hid_dim", None, self._namespace)
self.win_size = envs.get_global_env("win_size", None, self._namespace)
self.margin = envs.get_global_env("margin", None, self._namespace)
self.neg_size = envs.get_global_env("neg_size", None, self._namespace)
self.vocab_text_size = envs.get_global_env(
"hyper_parameters.vocab_text_size")
self.vocab_tag_size = envs.get_global_env(
"hyper_parameters.vocab_tag_size")
self.emb_dim = envs.get_global_env("hyper_parameters.emb_dim")
self.hid_dim = envs.get_global_env("hyper_parameters.hid_dim")
self.win_size = envs.get_global_env("hyper_parameters.win_size")
self.margin = envs.get_global_env("hyper_parameters.margin")
self.neg_size = envs.get_global_env("hyper_parameters.neg_size")
def train_net(self):
""" network"""
def input_data(self, is_infer=False, **kwargs):
text = fluid.data(
name="text", shape=[None, 1], lod_level=1, dtype='int64')
pos_tag = fluid.data(
name="pos_tag", shape=[None, 1], lod_level=1, dtype='int64')
neg_tag = fluid.data(
name="neg_tag", shape=[None, 1], lod_level=1, dtype='int64')
return [text, pos_tag, neg_tag]
self._data_var = [text, pos_tag, neg_tag]
def net(self, input, is_infer=False):
""" network"""
text = input[0]
pos_tag = input[1]
neg_tag = input[2]
text_emb = fluid.embedding(
input=text,
......@@ -97,22 +101,11 @@ class Model(ModelBase):
avg_cost = nn.mean(loss_part3)
less = tensor.cast(cf.less_than(cos_neg, cos_pos), dtype='float32')
correct = nn.reduce_sum(less)
self.cost = avg_cost
self.metrics["correct"] = correct
self.metrics["cos_pos"] = cos_pos
def get_avg_cost(self):
return self.cost
def get_metrics(self):
return self.metrics
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.base_lr", None,
self._namespace)
sgd_optimizer = fluid.optimizer.Adagrad(learning_rate=learning_rate)
return sgd_optimizer
self._cost = avg_cost
def infer_net(self, parameter_list):
self.train_net()
if is_infer:
self._infer_results["correct"] = correct
self._infer_results["cos_pos"] = cos_pos
else:
self._metrics["correct"] = correct
self._metrics["cos_pos"] = cos_pos
......@@ -11,44 +11,66 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
evaluate:
reader:
batch_size: 1
class: "{workspace}/synthetic_evaluate_reader.py"
test_data_path: "{workspace}/data/train"
train:
trainer:
# for cluster training
strategy: "async"
epochs: 4
workspace: "paddlerec.models.match.dssm"
reader:
batch_size: 4
class: "{workspace}/synthetic_reader.py"
train_data_path: "{workspace}/data/train"
workspace: "paddlerec.models.match.dssm"
dataset:
- name: dataset_train
batch_size: 4
type: QueueDataset
data_path: "{workspace}/data/train"
data_converter: "{workspace}/synthetic_reader.py"
- name: dataset_infer
batch_size: 1
type: QueueDataset
data_path: "{workspace}/data/train"
data_converter: "{workspace}/synthetic_evaluate_reader.py"
model:
models: "{workspace}/model.py"
hyper_parameters:
TRIGRAM_D: 1000
NEG: 4
fc_sizes: [300, 300, 128]
fc_acts: ['tanh', 'tanh', 'tanh']
learning_rate: 0.01
optimizer: sgd
hyper_parameters:
optimizer:
class: sgd
learning_rate: 0.01
strategy: async
trigram_d: 1000
neg_num: 4
fc_sizes: [300, 300, 128]
fc_acts: ['tanh', 'tanh', 'tanh']
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
mode: train_runner
# config of each runner.
# runner is a kind of paddle training class, which wraps the train/infer process.
runner:
- name: train_runner
class: single_train
# num of epochs
epochs: 4
# device to run training or infer
device: cpu
save_checkpoint_interval: 2 # save model interval of epochs
save_inference_interval: 4 # save inference
save_checkpoint_path: "increment" # save checkpoint path
save_inference_path: "inference" # save inference path
save_inference_feed_varnames: ["query", "doc_pos"] # feed vars of save inference
save_inference_fetch_varnames: ["cos_sim_0.tmp_0"] # fetch vars of save inference
init_model_path: "" # load model path
fetch_period: 2
- name: infer_runner
class: single_infer
# num of epochs
epochs: 1
# device to run training or infer
device: cpu
fetch_period: 1
init_model_path: "increment/2" # load model path
inference:
dirname: "inference"
epoch_interval: 4
feed_varnames: ["query", "doc_pos"]
fetch_varnames: ["cos_sim_0.tmp_0"]
save_last: True
# runner will run all the phase in each epoch
phase:
- name: phase1
model: "{workspace}/model.py" # user-defined model
dataset_name: dataset_train # select dataset by name
thread_num: 1
#- name: phase2
# model: "{workspace}/model.py" # user-defined model
# dataset_name: dataset_infer # select dataset by name
# thread_num: 1
......@@ -22,45 +22,39 @@ class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def input(self):
TRIGRAM_D = envs.get_global_env("hyper_parameters.TRIGRAM_D", None,
self._namespace)
Neg = envs.get_global_env("hyper_parameters.NEG", None,
self._namespace)
self.query = fluid.data(
name="query", shape=[-1, TRIGRAM_D], dtype='float32', lod_level=0)
self.doc_pos = fluid.data(
def _init_hyper_parameters(self):
self.trigram_d = envs.get_global_env("hyper_parameters.trigram_d")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
self.hidden_layers = envs.get_global_env("hyper_parameters.fc_sizes")
self.hidden_acts = envs.get_global_env("hyper_parameters.fc_acts")
self.learning_rate = envs.get_global_env(
"hyper_parameters.learning_rate")
def input_data(self, is_infer=False, **kwargs):
query = fluid.data(
name="query",
shape=[-1, self.trigram_d],
dtype='float32',
lod_level=0)
doc_pos = fluid.data(
name="doc_pos",
shape=[-1, TRIGRAM_D],
shape=[-1, self.trigram_d],
dtype='float32',
lod_level=0)
self.doc_negs = [
if is_infer:
return [query, doc_pos]
doc_negs = [
fluid.data(
name="doc_neg_" + str(i),
shape=[-1, TRIGRAM_D],
shape=[-1, self.trigram_d],
dtype="float32",
lod_level=0) for i in range(Neg)
lod_level=0) for i in range(self.neg_num)
]
self._data_var.append(self.query)
self._data_var.append(self.doc_pos)
for input in self.doc_negs:
self._data_var.append(input)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def net(self, is_infer=False):
hidden_layers = envs.get_global_env("hyper_parameters.fc_sizes", None,
self._namespace)
hidden_acts = envs.get_global_env("hyper_parameters.fc_acts", None,
self._namespace)
return [query, doc_pos] + doc_negs
def net(self, inputs, is_infer=False):
def fc(data, hidden_layers, hidden_acts, names):
fc_inputs = [data]
for i in range(len(hidden_layers)):
......@@ -77,71 +71,30 @@ class Model(ModelBase):
fc_inputs.append(out)
return fc_inputs[-1]
query_fc = fc(self.query, hidden_layers, hidden_acts,
query_fc = fc(inputs[0], self.hidden_layers, self.hidden_acts,
['query_l1', 'query_l2', 'query_l3'])
doc_pos_fc = fc(self.doc_pos, hidden_layers, hidden_acts,
doc_pos_fc = fc(inputs[1], self.hidden_layers, self.hidden_acts,
['doc_pos_l1', 'doc_pos_l2', 'doc_pos_l3'])
self.R_Q_D_p = fluid.layers.cos_sim(query_fc, doc_pos_fc)
R_Q_D_p = fluid.layers.cos_sim(query_fc, doc_pos_fc)
if is_infer:
self._infer_results["query_doc_sim"] = R_Q_D_p
return
R_Q_D_ns = []
for i, doc_neg in enumerate(self.doc_negs):
doc_neg_fc_i = fc(doc_neg, hidden_layers, hidden_acts, [
'doc_neg_l1_' + str(i), 'doc_neg_l2_' + str(i),
'doc_neg_l3_' + str(i)
])
for i in range(len(inputs) - 2):
doc_neg_fc_i = fc(
inputs[i + 2], self.hidden_layers, self.hidden_acts, [
'doc_neg_l1_' + str(i), 'doc_neg_l2_' + str(i),
'doc_neg_l3_' + str(i)
])
R_Q_D_ns.append(fluid.layers.cos_sim(query_fc, doc_neg_fc_i))
concat_Rs = fluid.layers.concat(
input=[self.R_Q_D_p] + R_Q_D_ns, axis=-1)
concat_Rs = fluid.layers.concat(input=[R_Q_D_p] + R_Q_D_ns, axis=-1)
prob = fluid.layers.softmax(concat_Rs, axis=1)
hit_prob = fluid.layers.slice(
prob, axes=[0, 1], starts=[0, 0], ends=[4, 1])
loss = -fluid.layers.reduce_sum(fluid.layers.log(hit_prob))
self.avg_cost = fluid.layers.mean(x=loss)
def infer_results(self):
self._infer_results['query_doc_sim'] = self.R_Q_D_p
def avg_loss(self):
self._cost = self.avg_cost
def metrics(self):
self._metrics["LOSS"] = self.avg_cost
def train_net(self):
self.input()
self.net(is_infer=False)
self.avg_loss()
self.metrics()
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
None, self._namespace)
optimizer = fluid.optimizer.SGD(learning_rate)
return optimizer
def infer_input(self):
TRIGRAM_D = envs.get_global_env("hyper_parameters.TRIGRAM_D", None,
self._namespace)
self.query = fluid.data(
name="query", shape=[-1, TRIGRAM_D], dtype='float32', lod_level=0)
self.doc_pos = fluid.data(
name="doc_pos",
shape=[-1, TRIGRAM_D],
dtype='float32',
lod_level=0)
self._infer_data_var = [self.query, self.doc_pos]
self._infer_data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._infer_data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def infer_net(self):
self.infer_input()
self.net(is_infer=True)
self.infer_results()
avg_cost = fluid.layers.mean(x=loss)
self._cost = avg_cost
self._metrics["LOSS"] = avg_cost
......@@ -16,7 +16,7 @@ from __future__ import print_function
from paddlerec.core.reader import Reader
class EvaluateReader(Reader):
class TrainReader(Reader):
def init(self):
pass
......
......@@ -11,49 +11,73 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
evaluate:
workspace: "paddlerec.models.match.multiview-simnet"
reader:
batch_size: 2
class: "{workspace}/evaluate_reader.py"
test_data_path: "{workspace}/data/test"
train:
trainer:
# for cluster training
strategy: "async"
# workspace
workspace: "paddlerec.models.match.multiview-simnet"
epochs: 2
workspace: "paddlerec.models.match.multiview-simnet"
# list of dataset
dataset:
- name: dataset_train # name of dataset to distinguish different datasets
batch_size: 2
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/train"
sparse_slots: "1 2 3"
- name: dataset_infer # name
batch_size: 2
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/test"
sparse_slots: "1 2"
reader:
batch_size: 2
class: "{workspace}/reader.py"
train_data_path: "{workspace}/data/train"
dataset_class: "DataLoader"
# hyper parameters of user-defined network
hyper_parameters:
optimizer:
class: Adam
learning_rate: 0.0001
strategy: async
query_encoder: "bow"
title_encoder: "bow"
query_encode_dim: 128
title_encode_dim: 128
sparse_feature_dim: 1000001
embedding_dim: 128
hidden_size: 128
margin: 0.1
model:
models: "{workspace}/model.py"
hyper_parameters:
use_DataLoader: True
query_encoder: "bow"
title_encoder: "bow"
query_encode_dim: 128
title_encode_dim: 128
query_slots: 1
title_slots: 1
sparse_feature_dim: 1000001
embedding_dim: 128
hidden_size: 128
learning_rate: 0.0001
optimizer: adam
# select runner by name
mode: train_runner
# config of each runner.
# runner is a kind of paddle training class, which wraps the train/infer process.
runner:
- name: train_runner
class: single_train
# num of epochs
epochs: 2
# device to run training or infer
device: cpu
save_checkpoint_interval: 1 # save model interval of epochs
save_inference_interval: 1 # save inference
save_checkpoint_path: "increment" # save checkpoint path
save_inference_path: "inference" # save inference path
save_inference_feed_varnames: [] # feed vars of save inference
save_inference_fetch_varnames: [] # fetch vars of save inference
init_model_path: "" # load model path
fetch_period: 1
- name: infer_runner
class: single_infer
# num of epochs
epochs: 1
# device to run training or infer
device: cpu
fetch_period: 1
init_model_path: "increment/0" # load model path
save:
increment:
dirname: "increment"
epoch_interval: 1
save_last: True
inference:
dirname: "inference"
epoch_interval: 1
save_last: True
# runner will run all the phase in each epoch
phase:
- name: phase1
model: "{workspace}/model.py" # user-defined model
dataset_name: dataset_train # select dataset by name
thread_num: 1
#- name: phase2
# model: "{workspace}/model.py" # user-defined model
# dataset_name: dataset_infer # select dataset by name
# thread_num: 1
......@@ -99,143 +99,89 @@ class SimpleEncoderFactory(object):
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
self.init_config()
def init_config(self):
self._fetch_interval = 1
query_encoder = envs.get_global_env("hyper_parameters.query_encoder",
None, self._namespace)
title_encoder = envs.get_global_env("hyper_parameters.title_encoder",
None, self._namespace)
query_encode_dim = envs.get_global_env(
"hyper_parameters.query_encode_dim", None, self._namespace)
title_encode_dim = envs.get_global_env(
"hyper_parameters.title_encode_dim", None, self._namespace)
query_slots = envs.get_global_env("hyper_parameters.query_slots", None,
self._namespace)
title_slots = envs.get_global_env("hyper_parameters.title_slots", None,
self._namespace)
factory = SimpleEncoderFactory()
self.query_encoders = [
factory.create(query_encoder, query_encode_dim)
for i in range(query_slots)
]
self.title_encoders = [
factory.create(title_encoder, title_encode_dim)
for i in range(title_slots)
]
def _init_hyper_parameters(self):
self.query_encoder = envs.get_global_env(
"hyper_parameters.query_encoder")
self.title_encoder = envs.get_global_env(
"hyper_parameters.title_encoder")
self.query_encode_dim = envs.get_global_env(
"hyper_parameters.query_encode_dim")
self.title_encode_dim = envs.get_global_env(
"hyper_parameters.title_encode_dim")
self.emb_size = envs.get_global_env(
"hyper_parameters.sparse_feature_dim", None, self._namespace)
self.emb_dim = envs.get_global_env("hyper_parameters.embedding_dim",
None, self._namespace)
"hyper_parameters.sparse_feature_dim")
self.emb_dim = envs.get_global_env("hyper_parameters.embedding_dim")
self.emb_shape = [self.emb_size, self.emb_dim]
self.hidden_size = envs.get_global_env("hyper_parameters.hidden_size",
None, self._namespace)
self.margin = 0.1
def input(self, is_train=True):
self.q_slots = [
fluid.data(
name="%d" % i, shape=[None, 1], lod_level=1, dtype='int64')
for i in range(len(self.query_encoders))
]
self.pt_slots = [
fluid.data(
name="%d" % (i + len(self.query_encoders)),
shape=[None, 1],
lod_level=1,
dtype='int64') for i in range(len(self.title_encoders))
]
if is_train == False:
return self.q_slots + self.pt_slots
self.hidden_size = envs.get_global_env("hyper_parameters.hidden_size")
self.margin = envs.get_global_env("hyper_parameters.margin")
self.nt_slots = [
fluid.data(
name="%d" %
(i + len(self.query_encoders) + len(self.title_encoders)),
shape=[None, 1],
lod_level=1,
dtype='int64') for i in range(len(self.title_encoders))
def net(self, input, is_infer=False):
factory = SimpleEncoderFactory()
self.q_slots = self._sparse_data_var[0:1]
self.query_encoders = [
factory.create(self.query_encoder, self.query_encode_dim)
for _ in self.q_slots
]
return self.q_slots + self.pt_slots + self.nt_slots
def train_input(self):
res = self.input()
self._data_var = res
use_dataloader = envs.get_global_env("hyper_parameters.use_DataLoader",
False, self._namespace)
if self._platform != "LINUX" or use_dataloader:
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var,
capacity=256,
use_double_buffer=False,
iterable=False)
def get_acc(self, x, y):
less = tensor.cast(cf.less_than(x, y), dtype='float32')
label_ones = fluid.layers.fill_constant_batch_size_like(
input=x, dtype='float32', shape=[-1, 1], value=1.0)
correct = fluid.layers.reduce_sum(less)
total = fluid.layers.reduce_sum(label_ones)
acc = fluid.layers.elementwise_div(correct, total)
return acc
def net(self):
q_embs = [
fluid.embedding(
input=query, size=self.emb_shape, param_attr="emb")
for query in self.q_slots
]
pt_embs = [
fluid.embedding(
input=title, size=self.emb_shape, param_attr="emb")
for title in self.pt_slots
]
nt_embs = [
fluid.embedding(
input=title, size=self.emb_shape, param_attr="emb")
for title in self.nt_slots
]
# encode each embedding field with encoder
q_encodes = [
self.query_encoders[i].forward(emb) for i, emb in enumerate(q_embs)
]
pt_encodes = [
self.title_encoders[i].forward(emb)
for i, emb in enumerate(pt_embs)
]
nt_encodes = [
self.title_encoders[i].forward(emb)
for i, emb in enumerate(nt_embs)
]
# concat multi view for query, pos_title, neg_title
q_concat = fluid.layers.concat(q_encodes)
pt_concat = fluid.layers.concat(pt_encodes)
nt_concat = fluid.layers.concat(nt_encodes)
# projection of hidden layer
q_hid = fluid.layers.fc(q_concat,
size=self.hidden_size,
param_attr='q_fc.w',
bias_attr='q_fc.b')
self.pt_slots = self._sparse_data_var[1:2]
self.title_encoders = [
factory.create(self.title_encoder, self.title_encode_dim)
]
pt_embs = [
fluid.embedding(
input=title, size=self.emb_shape, param_attr="emb")
for title in self.pt_slots
]
pt_encodes = [
self.title_encoders[i].forward(emb)
for i, emb in enumerate(pt_embs)
]
pt_concat = fluid.layers.concat(pt_encodes)
pt_hid = fluid.layers.fc(pt_concat,
size=self.hidden_size,
param_attr='t_fc.w',
bias_attr='t_fc.b')
# cosine of hidden layers
cos_pos = fluid.layers.cos_sim(q_hid, pt_hid)
if is_infer:
self._infer_results['query_pt_sim'] = cos_pos
return
self.nt_slots = self._sparse_data_var[2:3]
nt_embs = [
fluid.embedding(
input=title, size=self.emb_shape, param_attr="emb")
for title in self.nt_slots
]
nt_encodes = [
self.title_encoders[i].forward(emb)
for i, emb in enumerate(nt_embs)
]
nt_concat = fluid.layers.concat(nt_encodes)
nt_hid = fluid.layers.fc(nt_concat,
size=self.hidden_size,
param_attr='t_fc.w',
bias_attr='t_fc.b')
# cosine of hidden layers
cos_pos = fluid.layers.cos_sim(q_hid, pt_hid)
cos_neg = fluid.layers.cos_sim(q_hid, nt_hid)
# pairwise hinge_loss
......@@ -254,72 +200,16 @@ class Model(ModelBase):
input=loss_part2, shape=[-1, 1], value=0.0, dtype='float32'),
loss_part2)
self.avg_cost = fluid.layers.mean(loss_part3)
self._cost = fluid.layers.mean(loss_part3)
self.acc = self.get_acc(cos_neg, cos_pos)
def avg_loss(self):
self._cost = self.avg_cost
def metrics(self):
self._metrics["loss"] = self.avg_cost
self._metrics["loss"] = self._cost
self._metrics["acc"] = self.acc
def train_net(self):
self.train_input()
self.net()
self.avg_loss()
self.metrics()
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate=learning_rate)
return optimizer
def infer_input(self):
res = self.input(is_train=False)
self._infer_data_var = res
self._infer_data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._infer_data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def infer_net(self):
self.infer_input()
# lookup embedding for each slot
q_embs = [
fluid.embedding(
input=query, size=self.emb_shape, param_attr="emb")
for query in self.q_slots
]
pt_embs = [
fluid.embedding(
input=title, size=self.emb_shape, param_attr="emb")
for title in self.pt_slots
]
# encode each embedding field with encoder
q_encodes = [
self.query_encoders[i].forward(emb) for i, emb in enumerate(q_embs)
]
pt_encodes = [
self.title_encoders[i].forward(emb)
for i, emb in enumerate(pt_embs)
]
# concat multi view for query, pos_title, neg_title
q_concat = fluid.layers.concat(q_encodes)
pt_concat = fluid.layers.concat(pt_encodes)
# projection of hidden layer
q_hid = fluid.layers.fc(q_concat,
size=self.hidden_size,
param_attr='q_fc.w',
bias_attr='q_fc.b')
pt_hid = fluid.layers.fc(pt_concat,
size=self.hidden_size,
param_attr='t_fc.w',
bias_attr='t_fc.b')
# cosine of hidden layers
cos = fluid.layers.cos_sim(q_hid, pt_hid)
self._infer_results['query_pt_sim'] = cos
def get_acc(self, x, y):
less = tensor.cast(cf.less_than(x, y), dtype='float32')
label_ones = fluid.layers.fill_constant_batch_size_like(
input=x, dtype='float32', shape=[-1, 1], value=1.0)
correct = fluid.layers.reduce_sum(less)
total = fluid.layers.reduce_sum(label_ones)
acc = fluid.layers.elementwise_div(correct, total)
return acc
......@@ -31,9 +31,21 @@
<img align="center" src="../../doc/imgs/multiview-simnet.png">
<p>
## 使用教程
### 训练&预测
## 使用教程(快速开始)
### 训练
```shell
python -m paddlerec.run -m paddlerec.models.match.dssm # dssm
python -m paddlerec.run -m paddlerec.models.match.multiview-simnet # multiview-simnet
```
### 预测
```shell
# 修改对应模型的config.yaml, workspace配置为当前目录的绝对路径
# 修改对应模型的config.yaml,mode配置infer_runner
# 示例: mode: train_runner -> mode: infer_runner
# infer_runner中 class配置为 class: single_infer
# 修改phase阶段为infer的配置,参照config注释
# 修改完config.yaml后 执行:
python -m paddlerec.run -m ./config.yaml # 以dssm为例
```
......@@ -59,7 +59,7 @@ class TrainReader(dg.MultiSlotDataGenerator):
self.cat_feat_idx_dict_list = [{} for _ in range(26)]
# TODO: set vocabulary dictionary
vocab_dir = "./vocab/"
vocab_dir = "./sample_data/vocab/"
for i in range(26):
lookup_idx = 1 # remain 0 for default value
for line in open(
......
......@@ -12,9 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import yaml, os
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
import paddle.fluid.incubate.data_generator as dg
try:
import cPickle as pickle
except ImportError:
......@@ -44,7 +46,7 @@ class TrainReader(dg.MultiSlotDataGenerator):
self.continuous_range_ = range(1, 14)
self.categorical_range_ = range(14, 40)
# load preprocessed feature dict
self.feat_dict_name = "aid_data/feat_dict_10.pkl2"
self.feat_dict_name = "sample_data/feat_dict_10.pkl2"
self.feat_dict_ = pickle.load(open(self.feat_dict_name, 'rb'))
def _process_line(self, line):
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# global settings
debug: false
workspace: "paddlerec.models.rank.deepfm"
dataset:
- name: train_sample
type: QueueDataset
batch_size: 5
data_path: "{workspace}/data/sample_data/train"
sparse_slots: "label feat_idx"
dense_slots: "feat_value:39"
- name: infer_sample
type: QueueDataset
batch_size: 5
data_path: "{workspace}/data/sample_data/train"
sparse_slots: "label feat_idx"
dense_slots: "feat_value:39"
hyper_parameters:
optimizer:
class: SGD
learning_rate: 0.0001
sparse_feature_number: 1086460
sparse_feature_dim: 9
num_field: 39
reg: 0.001
mode: train_runner
# if infer, change mode to "infer_runner" and change phase to "infer_phase"
runner:
- name: train_runner
trainer_class: single_train
epochs: 2
device: cpu
init_model_path: ""
save_checkpoint_interval: 1
save_inference_interval: 1
save_checkpoint_path: "increment"
save_inference_path: "inference"
print_interval: 1
- name: infer_runner
trainer_class: single_infer
epochs: 1
device: cpu
init_model_path: "increment/0"
print_interval: 1
phase:
- name: phase1
model: "{workspace}/model.py"
dataset_name: train_sample
thread_num: 1
#- name: infer_phase
# model: "{workspace}/model.py"
# dataset_name: infer_sample
# thread_num: 1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import sys
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
TOOLS_PATH = os.path.join(LOCAL_PATH, "..", "..", "tools")
sys.path.append(TOOLS_PATH)
from paddlerec.tools.tools import download_file_and_uncompress, download_file
if __name__ == '__main__':
url = "https://s3-eu-west-1.amazonaws.com/kaggle-display-advertising-challenge-dataset/dac.tar.gz"
url2 = "https://paddlerec.bj.bcebos.com/deepfm%2Ffeat_dict_10.pkl2"
print("download and extract starting...")
download_file_and_uncompress(url)
download_file(url2, "./aid_data/feat_dict_10.pkl2", True)
print("download and extract finished")
print("preprocessing...")
os.system("python preprocess.py")
print("preprocess done")
shutil.rmtree("raw_data")
print("done")
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import os
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
import paddle.fluid.incubate.data_generator as dg
try:
import cPickle as pickle
except ImportError:
import pickle
class TrainReader(dg.MultiSlotDataGenerator):
def __init__(self, config):
dg.MultiSlotDataGenerator.__init__(self)
if os.path.isfile(config):
with open(config, 'r') as rb:
_config = yaml.load(rb.read(), Loader=yaml.FullLoader)
else:
raise ValueError("reader config only support yaml")
def init(self):
self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.cont_max_ = [
5775, 257675, 65535, 969, 23159456, 431037, 56311, 6047, 29019, 46,
231, 4008, 7393
]
self.cont_diff_ = [
self.cont_max_[i] - self.cont_min_[i]
for i in range(len(self.cont_min_))
]
self.continuous_range_ = range(1, 14)
self.categorical_range_ = range(14, 40)
# load preprocessed feature dict
self.feat_dict_name = "sample_data/feat_dict_10.pkl2"
self.feat_dict_ = pickle.load(open(self.feat_dict_name, 'rb'))
def _process_line(self, line):
features = line.rstrip('\n').split('\t')
feat_idx = []
feat_value = []
for idx in self.continuous_range_:
if features[idx] == '':
feat_idx.append(0)
feat_value.append(0.0)
else:
feat_idx.append(self.feat_dict_[idx])
feat_value.append(
(float(features[idx]) - self.cont_min_[idx - 1]) /
self.cont_diff_[idx - 1])
for idx in self.categorical_range_:
if features[idx] == '' or features[idx] not in self.feat_dict_:
feat_idx.append(0)
feat_value.append(0.0)
else:
feat_idx.append(self.feat_dict_[features[idx]])
feat_value.append(1.0)
label = [int(features[0])]
return feat_idx, feat_value, label
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def data_iter():
feat_idx, feat_value, label = self._process_line(line)
s = ""
for i in [('feat_idx', feat_idx), ('feat_value', feat_value),
('label', label)]:
k = i[0]
v = i[1]
for j in v:
s += " " + k + ":" + str(j)
print s.strip()
yield None
return data_iter
reader = TrainReader(
"../config.yaml") # run this file in original folder to find config.yaml
reader.init()
reader.run_from_stdin()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import numpy
from collections import Counter
import shutil
import pickle
def get_raw_data():
if not os.path.isdir('raw_data'):
os.mkdir('raw_data')
fin = open('train.txt', 'r')
fout = open('raw_data/part-0', 'w')
for line_idx, line in enumerate(fin):
if line_idx % 200000 == 0 and line_idx != 0:
fout.close()
cur_part_idx = int(line_idx / 200000)
fout = open('raw_data/part-' + str(cur_part_idx), 'w')
fout.write(line)
fout.close()
fin.close()
def split_data():
split_rate_ = 0.9
dir_train_file_idx_ = 'aid_data/train_file_idx.txt'
filelist_ = [
'raw_data/part-%d' % x for x in range(len(os.listdir('raw_data')))
]
if not os.path.exists(dir_train_file_idx_):
train_file_idx = list(
numpy.random.choice(
len(filelist_), int(len(filelist_) * split_rate_), False))
with open(dir_train_file_idx_, 'w') as fout:
fout.write(str(train_file_idx))
else:
with open(dir_train_file_idx_, 'r') as fin:
train_file_idx = eval(fin.read())
for idx in range(len(filelist_)):
if idx in train_file_idx:
shutil.move(filelist_[idx], 'train_data')
else:
shutil.move(filelist_[idx], 'test_data')
def get_feat_dict():
freq_ = 10
dir_feat_dict_ = 'aid_data/feat_dict_' + str(freq_) + '.pkl2'
continuous_range_ = range(1, 14)
categorical_range_ = range(14, 40)
if not os.path.exists(dir_feat_dict_):
# Count the number of occurrences of discrete features
feat_cnt = Counter()
with open('train.txt', 'r') as fin:
for line_idx, line in enumerate(fin):
if line_idx % 100000 == 0:
print('generating feature dict', line_idx / 45000000)
features = line.rstrip('\n').split('\t')
for idx in categorical_range_:
if features[idx] == '': continue
feat_cnt.update([features[idx]])
# Only retain discrete features with high frequency
dis_feat_set = set()
for feat, ot in feat_cnt.items():
if ot >= freq_:
dis_feat_set.add(feat)
# Create a dictionary for continuous and discrete features
feat_dict = {}
tc = 1
# Continuous features
for idx in continuous_range_:
feat_dict[idx] = tc
tc += 1
for feat in dis_feat_set:
feat_dict[feat] = tc
tc += 1
# Save dictionary
with open(dir_feat_dict_, 'wb') as fout:
pickle.dump(feat_dict, fout, protocol=2)
print('args.num_feat ', len(feat_dict) + 1)
if __name__ == '__main__':
if not os.path.isdir('train_data'):
os.mkdir('train_data')
if not os.path.isdir('test_data'):
os.mkdir('test_data')
if not os.path.isdir('aid_data'):
os.mkdir('aid_data')
get_raw_data()
split_data()
get_feat_dict()
print('Done!')
python download_preprocess.py
mkdir slot_train_data
for i in `ls ./train_data`
do
cat train_data/$i | python get_slot_data.py > slot_train_data/$i
done
mkdir slot_test_data
for i in `ls ./test_data`
do
cat test_data/$i | python get_slot_data.py > slot_test_data/$i
done
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:655161 feat_idx:0 feat_idx:1075467 feat_idx:314332 feat_idx:615411 feat_idx:733564 feat_idx:795081 feat_idx:148475 feat_idx:123424 feat_idx:582322 feat_idx:0 feat_idx:1082305 feat_idx:288355 feat_idx:328646 feat_idx:756244 feat_idx:13161 feat_idx:134834 feat_idx:734534 feat_idx:1047606 feat_idx:626828 feat_idx:0 feat_idx:476211 feat_idx:819217 feat_idx:502861 feat_idx:767167 feat_value:0.00017316017316 feat_value:1.55232499476e-05 feat_value:7.62951094835e-05 feat_value:0.0 feat_value:5.96732496653e-05 feat_value:9.27994580512e-06 feat_value:0.000266377794747 feat_value:0.000330742516951 feat_value:0.00623729280816 feat_value:0.0217391304348 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:328856 feat_idx:583609 feat_idx:356189 feat_idx:314332 feat_idx:404876 feat_idx:233441 feat_idx:144963 feat_idx:148475 feat_idx:954707 feat_idx:778340 feat_idx:598842 feat_idx:701804 feat_idx:223357 feat_idx:310528 feat_idx:805012 feat_idx:599055 feat_idx:683739 feat_idx:734534 feat_idx:94311 feat_idx:135625 feat_idx:0 feat_idx:476211 feat_idx:737768 feat_idx:502861 feat_idx:618666 feat_value:0.00034632034632 feat_value:1.16424374607e-05 feat_value:0.000671396963455 feat_value:0.00103199174407 feat_value:4.40424852812e-06 feat_value:1.85598916102e-05 feat_value:3.55170392996e-05 feat_value:0.000330742516951 feat_value:0.000137840725042 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:125230 feat_idx:244091 feat_idx:428972 feat_idx:323226 feat_idx:314332 feat_idx:615411 feat_idx:655488 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:989454 feat_idx:789125 feat_idx:274685 feat_idx:59528 feat_idx:142028 feat_idx:791919 feat_idx:339114 feat_idx:12934 feat_idx:0 feat_idx:0 feat_idx:128761 feat_idx:925828 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.00034632034632 feat_value:1.16424374607e-05 feat_value:1.52590218967e-05 feat_value:0.0144478844169 feat_value:3.31182217752e-05 feat_value:0.000206478794164 feat_value:7.10340785992e-05 feat_value:0.000330742516951 feat_value:0.00844274440884 feat_value:0.0217391304348 feat_value:0.012987012987 feat_value:0.000748502994012 feat_value:0.00608683890166 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:695357 feat_idx:541890 feat_idx:0 feat_idx:1012660 feat_idx:314332 feat_idx:404876 feat_idx:1742 feat_idx:144963 feat_idx:148475 feat_idx:456917 feat_idx:220560 feat_idx:0 feat_idx:480237 feat_idx:59528 feat_idx:402233 feat_idx:0 feat_idx:763481 feat_idx:885529 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:476211 feat_idx:68781 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.00347720798826 feat_value:0.0 feat_value:0.0 feat_value:0.000189641760152 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:518052 feat_idx:52223 feat_idx:0 feat_idx:610088 feat_idx:314332 feat_idx:85900 feat_idx:253972 feat_idx:144963 feat_idx:148475 feat_idx:581401 feat_idx:921618 feat_idx:374454 feat_idx:576858 feat_idx:288355 feat_idx:526081 feat_idx:597631 feat_idx:763481 feat_idx:468634 feat_idx:0 feat_idx:0 feat_idx:360559 feat_idx:0 feat_idx:122096 feat_idx:604513 feat_idx:0 feat_idx:0 feat_value:0.000519480519481 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:8.63578142768e-08 feat_value:0.0 feat_value:5.32755589494e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:268086 feat_idx:844726 feat_idx:589259 feat_idx:34922 feat_idx:943087 feat_idx:831162 feat_idx:687817 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:160002 feat_idx:879363 feat_idx:979424 feat_idx:59528 feat_idx:844314 feat_idx:974289 feat_idx:197974 feat_idx:82573 feat_idx:0 feat_idx:0 feat_idx:4620 feat_idx:811639 feat_idx:441547 feat_idx:578537 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.000553726305143 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000206761087563 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:74940 feat_idx:503640 feat_idx:888356 feat_idx:507702 feat_idx:943087 feat_idx:404876 feat_idx:1081499 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:202629 feat_idx:486504 feat_idx:981942 feat_idx:59528 feat_idx:404100 feat_idx:210897 feat_idx:197974 feat_idx:821035 feat_idx:0 feat_idx:0 feat_idx:627303 feat_idx:0 feat_idx:637620 feat_idx:409520 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:3.05180437934e-05 feat_value:0.0 feat_value:0.000136790777814 feat_value:0.0 feat_value:0.0 feat_value:0.000165371258475 feat_value:6.89203625211e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:541890 feat_idx:0 feat_idx:175574 feat_idx:1022525 feat_idx:85900 feat_idx:114990 feat_idx:795081 feat_idx:148475 feat_idx:391150 feat_idx:172637 feat_idx:0 feat_idx:831202 feat_idx:59528 feat_idx:402233 feat_idx:0 feat_idx:13161 feat_idx:885529 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:68781 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:2.71656874083e-05 feat_value:3.05180437934e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.77585196498e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:585875 feat_idx:460446 feat_idx:323226 feat_idx:314332 feat_idx:615411 feat_idx:453185 feat_idx:144963 feat_idx:148475 feat_idx:995582 feat_idx:409958 feat_idx:824386 feat_idx:745363 feat_idx:223357 feat_idx:782190 feat_idx:499188 feat_idx:13161 feat_idx:826986 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:122096 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000182398186884 feat_value:6.10360875868e-05 feat_value:0.00825593395253 feat_value:0.000820831024701 feat_value:0.000577676626369 feat_value:0.000497238550194 feat_value:0.00512650901273 feat_value:0.00485888555774 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00108210469363 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:952850 feat_idx:444926 feat_idx:327161 feat_idx:314332 feat_idx:0 feat_idx:48165 feat_idx:144963 feat_idx:148475 feat_idx:408072 feat_idx:220560 feat_idx:313350 feat_idx:480237 feat_idx:59528 feat_idx:767941 feat_idx:274209 feat_idx:587215 feat_idx:49542 feat_idx:0 feat_idx:0 feat_idx:918027 feat_idx:0 feat_idx:122096 feat_idx:210681 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000147470874502 feat_value:0.0 feat_value:0.00103199174407 feat_value:0.00145672679013 feat_value:4.87197154769e-05 feat_value:1.77585196498e-05 feat_value:0.000330742516951 feat_value:0.000103380543782 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:323969 feat_idx:1007141 feat_idx:1053419 feat_idx:314332 feat_idx:615411 feat_idx:926319 feat_idx:144963 feat_idx:31348 feat_idx:754940 feat_idx:35969 feat_idx:469428 feat_idx:394416 feat_idx:223357 feat_idx:878804 feat_idx:9647 feat_idx:197974 feat_idx:316785 feat_idx:734534 feat_idx:94311 feat_idx:409871 feat_idx:0 feat_idx:476211 feat_idx:755653 feat_idx:522503 feat_idx:379855 feat_value:0.0 feat_value:1.94040624345e-05 feat_value:0.00964370183871 feat_value:0.0 feat_value:0.00245126655825 feat_value:0.0 feat_value:0.0 feat_value:0.000826856292376 feat_value:0.00223991178194 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:985125 feat_idx:0 feat_idx:0 feat_idx:360051 feat_idx:0 feat_idx:304911 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:887175 feat_idx:0 feat_idx:701330 feat_idx:59528 feat_idx:670083 feat_idx:0 feat_idx:587215 feat_idx:334296 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:0 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:3.49273123821e-05 feat_value:9.15541313802e-05 feat_value:0.0061919504644 feat_value:1.81783199053e-05 feat_value:0.000252878523189 feat_value:1.77585196498e-05 feat_value:0.00115759880933 feat_value:0.00368723939488 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000811578520222 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:685954 feat_idx:439682 feat_idx:0 feat_idx:983567 feat_idx:314332 feat_idx:404876 feat_idx:909239 feat_idx:795081 feat_idx:148475 feat_idx:36347 feat_idx:663689 feat_idx:0 feat_idx:398775 feat_idx:59528 feat_idx:996203 feat_idx:150509 feat_idx:13161 feat_idx:183924 feat_idx:0 feat_idx:0 feat_idx:379144 feat_idx:0 feat_idx:122096 feat_idx:604513 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:6.32570989578e-05 feat_value:0.0 feat_value:0.000301894834047 feat_value:0.0 feat_value:0.000137840725042 feat_value:0.0 feat_value:0.017316017316 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:181401 feat_idx:702327 feat_idx:0 feat_idx:334017 feat_idx:314332 feat_idx:0 feat_idx:191120 feat_idx:299805 feat_idx:148475 feat_idx:442554 feat_idx:480141 feat_idx:0 feat_idx:16042 feat_idx:288355 feat_idx:928072 feat_idx:0 feat_idx:599055 feat_idx:91753 feat_idx:297696 feat_idx:330429 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:590863 feat_idx:525837 feat_idx:413413 feat_value:0.0 feat_value:1.94040624345e-05 feat_value:0.000167849240864 feat_value:0.00515995872033 feat_value:0.000443101945054 feat_value:7.88795393435e-05 feat_value:3.55170392996e-05 feat_value:0.000661485033901 feat_value:0.000172300906303 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:506931 feat_idx:655161 feat_idx:0 feat_idx:49997 feat_idx:1076285 feat_idx:85900 feat_idx:79619 feat_idx:144963 feat_idx:148475 feat_idx:817613 feat_idx:933612 feat_idx:0 feat_idx:733763 feat_idx:288355 feat_idx:565066 feat_idx:310463 feat_idx:854924 feat_idx:378884 feat_idx:734534 feat_idx:1047606 feat_idx:884047 feat_idx:0 feat_idx:241528 feat_idx:40100 feat_idx:502861 feat_idx:752176 feat_value:0.0 feat_value:0.000209563874293 feat_value:0.00128175783932 feat_value:0.00412796697626 feat_value:0.000156868969634 feat_value:6.03196477333e-05 feat_value:1.77585196498e-05 feat_value:0.000661485033901 feat_value:0.000275681450084 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:328239 feat_idx:910743 feat_idx:915614 feat_idx:360051 feat_idx:615411 feat_idx:49489 feat_idx:1007823 feat_idx:148475 feat_idx:754940 feat_idx:224964 feat_idx:235573 feat_idx:226878 feat_idx:693306 feat_idx:277510 feat_idx:277345 feat_idx:197974 feat_idx:969807 feat_idx:0 feat_idx:0 feat_idx:539201 feat_idx:0 feat_idx:476211 feat_idx:650546 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.94040624345e-05 feat_value:1.52590218967e-05 feat_value:0.0185758513932 feat_value:0.000874588764088 feat_value:0.0 feat_value:0.0 feat_value:0.000165371258475 feat_value:0.0450049967263 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.00270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:211148 feat_idx:0 feat_idx:0 feat_idx:943087 feat_idx:615411 feat_idx:98894 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:683585 feat_idx:0 feat_idx:460786 feat_idx:59528 feat_idx:883086 feat_idx:0 feat_idx:587215 feat_idx:197941 feat_idx:734534 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:537421 feat_idx:24736 feat_idx:962390 feat_value:0.00017316017316 feat_value:0.00384200436203 feat_value:0.0 feat_value:0.00206398348813 feat_value:4.53378524953e-06 feat_value:4.63997290256e-06 feat_value:1.77585196498e-05 feat_value:0.000330742516951 feat_value:6.89203625211e-05 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:518052 feat_idx:894672 feat_idx:521506 feat_idx:105841 feat_idx:360051 feat_idx:108674 feat_idx:642013 feat_idx:144963 feat_idx:148475 feat_idx:165260 feat_idx:212992 feat_idx:1009370 feat_idx:775147 feat_idx:223357 feat_idx:274230 feat_idx:833849 feat_idx:13161 feat_idx:57230 feat_idx:0 feat_idx:0 feat_idx:844134 feat_idx:925828 feat_idx:122096 feat_idx:141692 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.0 feat_value:0.0 feat_value:0.000716640321776 feat_value:0.00129223245336 feat_value:5.32755589494e-05 feat_value:0.000826856292376 feat_value:0.00423860229505 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:328856 feat_idx:506639 feat_idx:78755 feat_idx:463568 feat_idx:108674 feat_idx:152478 feat_idx:888742 feat_idx:148475 feat_idx:14838 feat_idx:682657 feat_idx:993166 feat_idx:502067 feat_idx:288355 feat_idx:190674 feat_idx:472919 feat_idx:13161 feat_idx:683739 feat_idx:734534 feat_idx:1047606 feat_idx:768815 feat_idx:0 feat_idx:122096 feat_idx:1010006 feat_idx:522503 feat_idx:963757 feat_value:0.0 feat_value:0.000104781937146 feat_value:6.10360875868e-05 feat_value:0.00206398348813 feat_value:8.87758330766e-05 feat_value:2.78398374153e-05 feat_value:0.000106551117899 feat_value:0.00165371258475 feat_value:0.00286019504463 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:738089 feat_idx:606995 feat_idx:964206 feat_idx:269737 feat_idx:360051 feat_idx:85900 feat_idx:608469 feat_idx:144963 feat_idx:148475 feat_idx:307543 feat_idx:405000 feat_idx:65140 feat_idx:749745 feat_idx:218723 feat_idx:686050 feat_idx:594443 feat_idx:13161 feat_idx:96125 feat_idx:0 feat_idx:0 feat_idx:946269 feat_idx:0 feat_idx:943262 feat_idx:395579 feat_idx:0 feat_idx:0 feat_value:0.00121212121212 feat_value:0.000407485311125 feat_value:0.0 feat_value:0.0030959752322 feat_value:3.3679547568e-05 feat_value:3.47997967692e-05 feat_value:0.000124309637549 feat_value:0.00248056887713 feat_value:0.000516902718908 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:906706 feat_idx:439682 feat_idx:4257 feat_idx:430841 feat_idx:314332 feat_idx:615411 feat_idx:998076 feat_idx:66687 feat_idx:148475 feat_idx:754940 feat_idx:648531 feat_idx:779745 feat_idx:718037 feat_idx:288355 feat_idx:360204 feat_idx:944849 feat_idx:13161 feat_idx:631544 feat_idx:0 feat_idx:0 feat_idx:177363 feat_idx:0 feat_idx:122096 feat_idx:1072137 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000194040624345 feat_value:0.0 feat_value:0.0 feat_value:0.000276301826779 feat_value:8.81594851486e-05 feat_value:0.000337411873346 feat_value:0.00165371258475 feat_value:0.00492780592026 feat_value:0.0 feat_value:0.04329004329 feat_value:0.0 feat_value:0.000811578520222 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:0 feat_idx:388090 feat_idx:314332 feat_idx:615411 feat_idx:595457 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:298800 feat_idx:0 feat_idx:349549 feat_idx:59528 feat_idx:28300 feat_idx:0 feat_idx:587215 feat_idx:750233 feat_idx:832803 feat_idx:330429 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:612991 feat_idx:502861 feat_idx:691775 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.00122072175174 feat_value:0.0 feat_value:7.97946203918e-05 feat_value:0.000665836111517 feat_value:1.77585196498e-05 feat_value:0.000661485033901 feat_value:0.00158516833799 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:439682 feat_idx:998375 feat_idx:373577 feat_idx:314332 feat_idx:108674 feat_idx:76428 feat_idx:66687 feat_idx:148475 feat_idx:636407 feat_idx:840978 feat_idx:221841 feat_idx:110276 feat_idx:223357 feat_idx:104371 feat_idx:535541 feat_idx:599055 feat_idx:892333 feat_idx:0 feat_idx:0 feat_idx:519737 feat_idx:0 feat_idx:476211 feat_idx:26849 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.000213626306554 feat_value:0.0061919504644 feat_value:0.000307951965711 feat_value:0.000396717683169 feat_value:3.55170392996e-05 feat_value:0.000330742516951 feat_value:0.000206761087563 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000811578520222 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:507093 feat_idx:28898 feat_idx:1067105 feat_idx:314332 feat_idx:615411 feat_idx:875540 feat_idx:144963 feat_idx:148475 feat_idx:801559 feat_idx:965246 feat_idx:93410 feat_idx:648840 feat_idx:59528 feat_idx:63243 feat_idx:1041736 feat_idx:763481 feat_idx:206486 feat_idx:0 feat_idx:0 feat_idx:623203 feat_idx:0 feat_idx:377126 feat_idx:1017627 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:4.65697498428e-05 feat_value:0.00013733119707 feat_value:0.0175438596491 feat_value:0.000508388452648 feat_value:0.0 feat_value:0.0 feat_value:0.00380353894493 feat_value:0.00441090320135 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.00229947247396 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:506931 feat_idx:195832 feat_idx:460446 feat_idx:323226 feat_idx:314332 feat_idx:615411 feat_idx:414506 feat_idx:144963 feat_idx:148475 feat_idx:127380 feat_idx:385804 feat_idx:824386 feat_idx:203621 feat_idx:59528 feat_idx:631370 feat_idx:499188 feat_idx:587215 feat_idx:855342 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:969590 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:3.05180437934e-05 feat_value:0.0 feat_value:0.000267277435187 feat_value:0.000194878861907 feat_value:1.77585196498e-05 feat_value:0.00446502397883 feat_value:0.0024466728695 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:506931 feat_idx:704711 feat_idx:701980 feat_idx:42486 feat_idx:314332 feat_idx:0 feat_idx:786460 feat_idx:144963 feat_idx:148475 feat_idx:466556 feat_idx:775018 feat_idx:404666 feat_idx:1065844 feat_idx:39086 feat_idx:992008 feat_idx:506428 feat_idx:599055 feat_idx:750233 feat_idx:256242 feat_idx:330429 feat_idx:218251 feat_idx:0 feat_idx:122096 feat_idx:221229 feat_idx:502861 feat_idx:24246 feat_value:0.0 feat_value:2.71656874083e-05 feat_value:0.000244144350347 feat_value:0.0 feat_value:0.000255835024795 feat_value:4.63997290256e-06 feat_value:3.55170392996e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:518052 feat_idx:1049859 feat_idx:0 feat_idx:1096 feat_idx:314332 feat_idx:615411 feat_idx:714816 feat_idx:795081 feat_idx:148475 feat_idx:900313 feat_idx:855314 feat_idx:0 feat_idx:603555 feat_idx:59528 feat_idx:211559 feat_idx:0 feat_idx:379814 feat_idx:311468 feat_idx:734534 feat_idx:330429 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:383498 feat_idx:917031 feat_idx:879752 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.000305180437934 feat_value:0.0165118679051 feat_value:6.68409482503e-05 feat_value:0.000215758739969 feat_value:0.000745857825292 feat_value:0.00529188027121 feat_value:0.0314276853096 feat_value:0.0 feat_value:0.0649350649351 feat_value:0.000249500998004 feat_value:0.00216420938726 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:695357 feat_idx:439682 feat_idx:433159 feat_idx:217415 feat_idx:360051 feat_idx:615411 feat_idx:235834 feat_idx:144963 feat_idx:148475 feat_idx:343946 feat_idx:489781 feat_idx:168412 feat_idx:950158 feat_idx:59528 feat_idx:419036 feat_idx:782554 feat_idx:854924 feat_idx:502656 feat_idx:0 feat_idx:0 feat_idx:1082526 feat_idx:0 feat_idx:476211 feat_idx:972567 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:8.92586871988e-05 feat_value:3.05180437934e-05 feat_value:0.00206398348813 feat_value:0.000310369984511 feat_value:0.000394397696717 feat_value:3.55170392996e-05 feat_value:0.000496113775426 feat_value:0.000827044350253 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:983083 feat_idx:555506 feat_idx:311508 feat_idx:360051 feat_idx:831162 feat_idx:662893 feat_idx:144963 feat_idx:148475 feat_idx:453404 feat_idx:437228 feat_idx:866349 feat_idx:987534 feat_idx:223357 feat_idx:872276 feat_idx:719825 feat_idx:13161 feat_idx:146364 feat_idx:0 feat_idx:0 feat_idx:1083188 feat_idx:0 feat_idx:122096 feat_idx:33938 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000314345811439 feat_value:3.05180437934e-05 feat_value:0.015479876161 feat_value:0.000186144268674 feat_value:0.000197198848359 feat_value:7.10340785992e-05 feat_value:0.00297668265255 feat_value:0.00792584168993 feat_value:0.0 feat_value:0.012987012987 feat_value:0.0 feat_value:0.00202894630055 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:638696 feat_idx:232393 feat_idx:537609 feat_idx:314332 feat_idx:85900 feat_idx:158968 feat_idx:144963 feat_idx:148475 feat_idx:411650 feat_idx:220560 feat_idx:633471 feat_idx:480237 feat_idx:39086 feat_idx:611928 feat_idx:584121 feat_idx:13161 feat_idx:747604 feat_idx:0 feat_idx:0 feat_idx:204145 feat_idx:0 feat_idx:476211 feat_idx:485685 feat_idx:0 feat_idx:0 feat_value:0.000519480519481 feat_value:1.16424374607e-05 feat_value:6.10360875868e-05 feat_value:0.0134158926729 feat_value:9.672075199e-06 feat_value:6.49596206358e-05 feat_value:5.32755589494e-05 feat_value:0.00578799404663 feat_value:0.000930424894035 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00175842012715 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:518052 feat_idx:245713 feat_idx:964221 feat_idx:976933 feat_idx:360051 feat_idx:404876 feat_idx:469669 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:496768 feat_idx:978607 feat_idx:788967 feat_idx:59528 feat_idx:717827 feat_idx:227446 feat_idx:13161 feat_idx:251726 feat_idx:0 feat_idx:0 feat_idx:2400 feat_idx:0 feat_idx:476211 feat_idx:942610 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.00108662749633 feat_value:0.0 feat_value:0.0030959752322 feat_value:0.000315983242439 feat_value:5.56796748307e-05 feat_value:0.000106551117899 feat_value:0.000496113775426 feat_value:0.00337709776353 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:181401 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:314332 feat_idx:404876 feat_idx:286011 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:966589 feat_idx:824386 feat_idx:429895 feat_idx:863222 feat_idx:406685 feat_idx:499188 feat_idx:197974 feat_idx:251433 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:321110 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.000213994663778 feat_value:0.0 feat_value:0.0 feat_value:0.00611873656359 feat_value:0.00334263758227 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:0 feat_idx:268086 feat_idx:83142 feat_idx:288162 feat_idx:1060646 feat_idx:360051 feat_idx:615411 feat_idx:714816 feat_idx:144963 feat_idx:148475 feat_idx:138291 feat_idx:855314 feat_idx:165496 feat_idx:603555 feat_idx:59528 feat_idx:224690 feat_idx:316295 feat_idx:854924 feat_idx:257823 feat_idx:0 feat_idx:0 feat_idx:704548 feat_idx:0 feat_idx:122096 feat_idx:782694 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:1.16424374607e-05 feat_value:1.52590218967e-05 feat_value:0.0 feat_value:6.16163004865e-05 feat_value:6.95995935384e-06 feat_value:0.000284136314397 feat_value:0.00181908384323 feat_value:0.00172300906303 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.000249500998004 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:563443 feat_idx:51995 feat_idx:49997 feat_idx:314332 feat_idx:0 feat_idx:595457 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:188162 feat_idx:721984 feat_idx:349549 feat_idx:199920 feat_idx:180762 feat_idx:310463 feat_idx:197974 feat_idx:319863 feat_idx:734534 feat_idx:330429 feat_idx:467968 feat_idx:0 feat_idx:122096 feat_idx:40100 feat_idx:502861 feat_idx:777305 feat_value:0.000692640692641 feat_value:1.16424374607e-05 feat_value:0.000839246204318 feat_value:0.00825593395253 feat_value:3.70906812319e-05 feat_value:3.01598238666e-05 feat_value:7.10340785992e-05 feat_value:0.0019844551017 feat_value:0.000447982356387 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00108210469363 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:281207 feat_idx:430926 feat_idx:909211 feat_idx:314332 feat_idx:0 feat_idx:928918 feat_idx:144963 feat_idx:148475 feat_idx:904134 feat_idx:535335 feat_idx:327558 feat_idx:639245 feat_idx:223357 feat_idx:18380 feat_idx:471487 feat_idx:13161 feat_idx:188469 feat_idx:0 feat_idx:0 feat_idx:500616 feat_idx:0 feat_idx:122096 feat_idx:657898 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:0.00101677287157 feat_value:1.52590218967e-05 feat_value:0.00103199174407 feat_value:2.15894535692e-07 feat_value:2.31998645128e-06 feat_value:0.000106551117899 feat_value:0.000165371258475 feat_value:3.44601812606e-05 feat_value:0.0217391304348 feat_value:0.012987012987 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:87449 feat_idx:691591 feat_idx:466372 feat_idx:360051 feat_idx:108674 feat_idx:537959 feat_idx:144963 feat_idx:148475 feat_idx:882632 feat_idx:1037965 feat_idx:783604 feat_idx:521533 feat_idx:59528 feat_idx:185313 feat_idx:972394 feat_idx:339114 feat_idx:644343 feat_idx:603603 feat_idx:330429 feat_idx:722203 feat_idx:925828 feat_idx:377126 feat_idx:221229 feat_idx:343446 feat_idx:24246 feat_value:0.0 feat_value:0.000504505623297 feat_value:1.52590218967e-05 feat_value:0.0030959752322 feat_value:7.26701007139e-05 feat_value:4.40797425743e-05 feat_value:0.000461721510895 feat_value:0.00281131139408 feat_value:0.0163685860988 feat_value:0.0 feat_value:0.038961038961 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:87868 feat_idx:585875 feat_idx:143202 feat_idx:105841 feat_idx:314332 feat_idx:615411 feat_idx:685294 feat_idx:795081 feat_idx:148475 feat_idx:754940 feat_idx:853239 feat_idx:1062322 feat_idx:529712 feat_idx:223357 feat_idx:715789 feat_idx:334774 feat_idx:197974 feat_idx:339749 feat_idx:0 feat_idx:0 feat_idx:540979 feat_idx:0 feat_idx:122096 feat_idx:141692 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0010041254855 feat_value:0.0 feat_value:0.0 feat_value:0.000165371258475 feat_value:0.00251559323202 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:154881 feat_idx:664380 feat_idx:0 feat_idx:470673 feat_idx:314332 feat_idx:108674 feat_idx:610634 feat_idx:144963 feat_idx:148475 feat_idx:125722 feat_idx:153800 feat_idx:0 feat_idx:297062 feat_idx:223357 feat_idx:712970 feat_idx:124318 feat_idx:13161 feat_idx:521259 feat_idx:734534 feat_idx:330429 feat_idx:0 feat_idx:969590 feat_idx:217677 feat_idx:643925 feat_idx:24736 feat_idx:941404 feat_value:0.00103896103896 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:3.95087000316e-05 feat_value:9.27994580512e-05 feat_value:0.000461721510895 feat_value:0.00545725152968 feat_value:0.00248113305076 feat_value:0.0217391304348 feat_value:0.012987012987 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:245713 feat_idx:987054 feat_idx:399764 feat_idx:360051 feat_idx:615411 feat_idx:684605 feat_idx:144963 feat_idx:148475 feat_idx:874792 feat_idx:107682 feat_idx:879950 feat_idx:321212 feat_idx:288355 feat_idx:369087 feat_idx:762311 feat_idx:13161 feat_idx:879575 feat_idx:0 feat_idx:0 feat_idx:1086254 feat_idx:0 feat_idx:122096 feat_idx:942610 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:4.57770656901e-05 feat_value:0.0123839009288 feat_value:0.000315551453367 feat_value:0.000225038685774 feat_value:3.55170392996e-05 feat_value:0.00347279642798 feat_value:0.00310141631345 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00162315704044 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:506931 feat_idx:714652 feat_idx:0 feat_idx:213479 feat_idx:314332 feat_idx:0 feat_idx:432079 feat_idx:144963 feat_idx:148475 feat_idx:666980 feat_idx:405740 feat_idx:0 feat_idx:705197 feat_idx:288355 feat_idx:104862 feat_idx:0 feat_idx:339114 feat_idx:679030 feat_idx:734534 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:1057480 feat_idx:343446 feat_idx:502409 feat_value:0.00138528138528 feat_value:1.16424374607e-05 feat_value:0.00022888532845 feat_value:0.0206398348813 feat_value:4.96557432092e-06 feat_value:5.56796748307e-05 feat_value:0.000142068157198 feat_value:0.00380353894493 feat_value:0.000827044350253 feat_value:0.0434782608696 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00270526173407 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:83142 feat_idx:460446 feat_idx:323226 feat_idx:360051 feat_idx:108674 feat_idx:714816 feat_idx:795081 feat_idx:148475 feat_idx:900313 feat_idx:855314 feat_idx:824386 feat_idx:603555 feat_idx:59528 feat_idx:95559 feat_idx:499188 feat_idx:339114 feat_idx:882666 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:122096 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000159113311963 feat_value:3.05180437934e-05 feat_value:0.00412796697626 feat_value:0.000134675011365 feat_value:0.000345677981241 feat_value:0.00113654525759 feat_value:0.00793782040681 feat_value:0.00478996519522 feat_value:0.0 feat_value:0.025974025974 feat_value:0.00149700598802 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:507093 feat_idx:968965 feat_idx:115714 feat_idx:314332 feat_idx:108674 feat_idx:585814 feat_idx:144963 feat_idx:148475 feat_idx:1067472 feat_idx:905164 feat_idx:292795 feat_idx:1053010 feat_idx:223357 feat_idx:460894 feat_idx:592287 feat_idx:339114 feat_idx:1024304 feat_idx:0 feat_idx:0 feat_idx:1006115 feat_idx:0 feat_idx:122096 feat_idx:831861 feat_idx:0 feat_idx:0 feat_value:0.0152380952381 feat_value:0.00124962162078 feat_value:0.0 feat_value:0.00412796697626 feat_value:2.15894535692e-07 feat_value:9.27994580512e-06 feat_value:0.00158050824883 feat_value:0.00661485033901 feat_value:0.00303249595093 feat_value:0.0652173913043 feat_value:0.017316017316 feat_value:0.00299401197605 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:160536 feat_idx:572549 feat_idx:314332 feat_idx:0 feat_idx:984584 feat_idx:144963 feat_idx:148475 feat_idx:120200 feat_idx:190379 feat_idx:768743 feat_idx:628725 feat_idx:288355 feat_idx:967940 feat_idx:824472 feat_idx:854924 feat_idx:575938 feat_idx:568485 feat_idx:330429 feat_idx:469863 feat_idx:0 feat_idx:122096 feat_idx:26849 feat_idx:502861 feat_idx:9838 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.000274662394141 feat_value:0.00515995872033 feat_value:7.26701007139e-05 feat_value:0.000185598916102 feat_value:0.000674823746692 feat_value:0.000826856292376 feat_value:0.00327371721975 feat_value:0.0 feat_value:0.021645021645 feat_value:0.0 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:506931 feat_idx:439682 feat_idx:462322 feat_idx:892535 feat_idx:314332 feat_idx:615411 feat_idx:183327 feat_idx:66687 feat_idx:31348 feat_idx:754940 feat_idx:780959 feat_idx:1076845 feat_idx:127420 feat_idx:59528 feat_idx:1034303 feat_idx:3336 feat_idx:587215 feat_idx:786401 feat_idx:0 feat_idx:0 feat_idx:273839 feat_idx:0 feat_idx:476211 feat_idx:841950 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000116424374607 feat_value:0.0 feat_value:0.0 feat_value:0.00487394867997 feat_value:0.00488589146639 feat_value:0.0 feat_value:0.000330742516951 feat_value:0.00327371721975 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:268086 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:943087 feat_idx:615411 feat_idx:646596 feat_idx:144963 feat_idx:148475 feat_idx:320091 feat_idx:786096 feat_idx:824386 feat_idx:708545 feat_idx:863222 feat_idx:406685 feat_idx:499188 feat_idx:599055 feat_idx:251433 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:969590 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.000211317571535 feat_value:4.63997290256e-05 feat_value:1.77585196498e-05 feat_value:0.00115759880933 feat_value:0.000689203625211 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:585875 feat_idx:1083253 feat_idx:105841 feat_idx:314332 feat_idx:615411 feat_idx:183043 feat_idx:66687 feat_idx:148475 feat_idx:754940 feat_idx:785290 feat_idx:78319 feat_idx:769776 feat_idx:223357 feat_idx:715789 feat_idx:30992 feat_idx:854924 feat_idx:339749 feat_idx:0 feat_idx:0 feat_idx:87470 feat_idx:0 feat_idx:122096 feat_idx:141692 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000135828437042 feat_value:0.0 feat_value:0.00103199174407 feat_value:0.000404802254423 feat_value:0.0 feat_value:0.0 feat_value:0.00611873656359 feat_value:0.00062028326269 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:34199 feat_idx:460446 feat_idx:323226 feat_idx:360051 feat_idx:615411 feat_idx:617010 feat_idx:1041627 feat_idx:148475 feat_idx:754940 feat_idx:224964 feat_idx:824386 feat_idx:226878 feat_idx:288355 feat_idx:303932 feat_idx:499188 feat_idx:13161 feat_idx:628988 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:122096 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:3.49273123821e-05 feat_value:9.15541313802e-05 feat_value:0.015479876161 feat_value:0.000872775249989 feat_value:0.0011762331308 feat_value:0.000124309637549 feat_value:0.00694559285596 feat_value:0.0124056652538 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:664380 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:108674 feat_idx:248083 feat_idx:144963 feat_idx:148475 feat_idx:804470 feat_idx:868888 feat_idx:0 feat_idx:797434 feat_idx:59528 feat_idx:747120 feat_idx:0 feat_idx:13161 feat_idx:521259 feat_idx:495815 feat_idx:330429 feat_idx:0 feat_idx:11923 feat_idx:407810 feat_idx:566713 feat_idx:24736 feat_idx:915104 feat_value:0.00536796536797 feat_value:7.7616249738e-05 feat_value:3.05180437934e-05 feat_value:0.0113519091847 feat_value:1.25218830701e-05 feat_value:5.33596883794e-05 feat_value:0.000550514109144 feat_value:0.00380353894493 feat_value:0.00223991178194 feat_value:0.0434782608696 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00148789395374 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:439682 feat_idx:661250 feat_idx:819482 feat_idx:314332 feat_idx:404876 feat_idx:173004 feat_idx:795081 feat_idx:148475 feat_idx:133411 feat_idx:790823 feat_idx:853868 feat_idx:963286 feat_idx:223357 feat_idx:961787 feat_idx:355708 feat_idx:13161 feat_idx:618619 feat_idx:0 feat_idx:0 feat_idx:542491 feat_idx:0 feat_idx:377126 feat_idx:320543 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:0.00925573778126 feat_value:0.000198367284657 feat_value:0.00412796697626 feat_value:1.72715628554e-06 feat_value:9.27994580512e-06 feat_value:0.00122533785584 feat_value:0.000496113775426 feat_value:0.0209862503877 feat_value:0.0217391304348 feat_value:0.047619047619 feat_value:0.0 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:695357 feat_idx:881707 feat_idx:387392 feat_idx:38631 feat_idx:314332 feat_idx:0 feat_idx:608594 feat_idx:144963 feat_idx:148475 feat_idx:756085 feat_idx:879727 feat_idx:1083007 feat_idx:253536 feat_idx:223357 feat_idx:462961 feat_idx:367591 feat_idx:13161 feat_idx:144331 feat_idx:0 feat_idx:0 feat_idx:853418 feat_idx:0 feat_idx:122096 feat_idx:783958 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000748996809972 feat_value:0.0 feat_value:0.0 feat_value:7.01225451928e-05 feat_value:1.39199187077e-05 feat_value:0.000514997069844 feat_value:0.000992227550852 feat_value:0.00255005341328 feat_value:0.0 feat_value:0.038961038961 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:87449 feat_idx:536408 feat_idx:619856 feat_idx:729041 feat_idx:615411 feat_idx:689549 feat_idx:1041627 feat_idx:148475 feat_idx:754940 feat_idx:42362 feat_idx:181047 feat_idx:385295 feat_idx:223357 feat_idx:751650 feat_idx:367088 feat_idx:339114 feat_idx:644343 feat_idx:809973 feat_idx:330429 feat_idx:28648 feat_idx:0 feat_idx:217677 feat_idx:305383 feat_idx:343446 feat_idx:1083427 feat_value:0.0 feat_value:8.53778747118e-05 feat_value:0.000122072175174 feat_value:0.00928792569659 feat_value:6.50274341504e-05 feat_value:7.19195799897e-05 feat_value:5.32755589494e-05 feat_value:0.00115759880933 feat_value:0.00117164616286 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00121736778033 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:506931 feat_idx:439682 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:108674 feat_idx:713567 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:963705 feat_idx:0 feat_idx:599643 feat_idx:59528 feat_idx:967283 feat_idx:0 feat_idx:587215 feat_idx:434748 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:925828 feat_idx:476211 feat_idx:753350 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:0.000128066812068 feat_value:0.0 feat_value:0.0030959752322 feat_value:5.00875322806e-06 feat_value:7.19195799897e-05 feat_value:1.77585196498e-05 feat_value:0.000496113775426 feat_value:0.000103380543782 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:432429 feat_idx:319665 feat_idx:183269 feat_idx:85674 feat_idx:463568 feat_idx:0 feat_idx:130525 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:392441 feat_idx:1050223 feat_idx:862081 feat_idx:288355 feat_idx:484086 feat_idx:1077738 feat_idx:339114 feat_idx:934587 feat_idx:734534 feat_idx:94311 feat_idx:548757 feat_idx:0 feat_idx:321110 feat_idx:686449 feat_idx:474802 feat_idx:789529 feat_value:0.0 feat_value:3.49273123821e-05 feat_value:3.05180437934e-05 feat_value:0.0030959752322 feat_value:0.000119994182938 feat_value:0.0 feat_value:0.0 feat_value:0.000496113775426 feat_value:0.000447982356387 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:702327 feat_idx:0 feat_idx:217102 feat_idx:314332 feat_idx:85900 feat_idx:331250 feat_idx:888742 feat_idx:148475 feat_idx:197667 feat_idx:872960 feat_idx:0 feat_idx:925332 feat_idx:223357 feat_idx:57227 feat_idx:0 feat_idx:339114 feat_idx:91753 feat_idx:305875 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:476211 feat_idx:117207 feat_idx:502861 feat_idx:866455 feat_value:0.0 feat_value:1.94040624345e-05 feat_value:0.000335698481727 feat_value:0.0030959752322 feat_value:0.000202379537758 feat_value:0.00056143672121 feat_value:0.000106551117899 feat_value:0.000992227550852 feat_value:0.00630621317068 feat_value:0.0 feat_value:0.004329004329 feat_value:0.000998003992016 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:849120 feat_idx:982375 feat_idx:949507 feat_idx:82312 feat_idx:314332 feat_idx:615411 feat_idx:641839 feat_idx:66687 feat_idx:148475 feat_idx:351286 feat_idx:1067936 feat_idx:1021395 feat_idx:423678 feat_idx:288355 feat_idx:491071 feat_idx:210032 feat_idx:13161 feat_idx:384630 feat_idx:661313 feat_idx:330429 feat_idx:466643 feat_idx:0 feat_idx:407810 feat_idx:818126 feat_idx:35064 feat_idx:312157 feat_value:0.0 feat_value:0.00022508712424 feat_value:0.000244144350347 feat_value:0.00722394220846 feat_value:7.32314265067e-05 feat_value:0.000167039024492 feat_value:3.55170392996e-05 feat_value:0.00115759880933 feat_value:0.00327371721975 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000946841606925 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:518052 feat_idx:702327 feat_idx:0 feat_idx:450730 feat_idx:314332 feat_idx:615411 feat_idx:491223 feat_idx:27549 feat_idx:148475 feat_idx:24666 feat_idx:283209 feat_idx:0 feat_idx:91978 feat_idx:59528 feat_idx:89255 feat_idx:282181 feat_idx:13161 feat_idx:91753 feat_idx:633602 feat_idx:94311 feat_idx:0 feat_idx:0 feat_idx:377126 feat_idx:26849 feat_idx:502861 feat_idx:989849 feat_value:0.00103896103896 feat_value:1.16424374607e-05 feat_value:0.000427252613107 feat_value:0.0 feat_value:1.33854612129e-06 feat_value:0.0 feat_value:0.000106551117899 feat_value:0.0 feat_value:0.0 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:181401 feat_idx:704711 feat_idx:1084300 feat_idx:958176 feat_idx:314332 feat_idx:615411 feat_idx:809683 feat_idx:536544 feat_idx:148475 feat_idx:197667 feat_idx:23597 feat_idx:771551 feat_idx:444756 feat_idx:59528 feat_idx:28300 feat_idx:351738 feat_idx:339114 feat_idx:750233 feat_idx:734534 feat_idx:330429 feat_idx:5418 feat_idx:0 feat_idx:476211 feat_idx:221229 feat_idx:1007264 feat_idx:24246 feat_value:0.0 feat_value:8.53778747118e-05 feat_value:0.00013733119707 feat_value:0.0030959752322 feat_value:0.000622380767493 feat_value:0.00313894166858 feat_value:5.32755589494e-05 feat_value:0.000165371258475 feat_value:0.0124745856163 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:746729 feat_idx:742925 feat_idx:205831 feat_idx:912022 feat_idx:0 feat_idx:653684 feat_idx:144963 feat_idx:148475 feat_idx:891197 feat_idx:122292 feat_idx:282954 feat_idx:561978 feat_idx:223357 feat_idx:222724 feat_idx:538143 feat_idx:599055 feat_idx:706003 feat_idx:729650 feat_idx:1047606 feat_idx:475068 feat_idx:0 feat_idx:122096 feat_idx:744639 feat_idx:530010 feat_idx:785927 feat_value:0.0 feat_value:8.14970622249e-05 feat_value:0.00018310826276 feat_value:0.00825593395253 feat_value:0.000387098902496 feat_value:0.000102079403856 feat_value:3.55170392996e-05 feat_value:0.0019844551017 feat_value:0.00196423033185 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00108210469363 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:201945 feat_idx:631742 feat_idx:306726 feat_idx:186386 feat_idx:314332 feat_idx:615411 feat_idx:337962 feat_idx:989504 feat_idx:31348 feat_idx:1068694 feat_idx:746192 feat_idx:359807 feat_idx:597620 feat_idx:59528 feat_idx:834098 feat_idx:463498 feat_idx:13161 feat_idx:144824 feat_idx:734534 feat_idx:1047606 feat_idx:447900 feat_idx:0 feat_idx:476211 feat_idx:421203 feat_idx:24736 feat_idx:272262 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:3.05180437934e-05 feat_value:0.0 feat_value:0.00767176914691 feat_value:0.0 feat_value:0.0 feat_value:0.000496113775426 feat_value:6.89203625211e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:894961 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:615411 feat_idx:927764 feat_idx:144963 feat_idx:148475 feat_idx:967242 feat_idx:1062285 feat_idx:0 feat_idx:736367 feat_idx:59528 feat_idx:562438 feat_idx:0 feat_idx:587215 feat_idx:896897 feat_idx:960559 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:377126 feat_idx:428982 feat_idx:525837 feat_idx:697480 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.000305180437934 feat_value:0.0 feat_value:0.000190505338295 feat_value:0.00198358841584 feat_value:0.0 feat_value:0.000661485033901 feat_value:0.017988214618 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:506931 feat_idx:889703 feat_idx:428972 feat_idx:323226 feat_idx:314332 feat_idx:108674 feat_idx:731191 feat_idx:66687 feat_idx:31348 feat_idx:754940 feat_idx:639052 feat_idx:789125 feat_idx:318898 feat_idx:223357 feat_idx:275810 feat_idx:791919 feat_idx:189960 feat_idx:990004 feat_idx:0 feat_idx:0 feat_idx:128761 feat_idx:0 feat_idx:441547 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000228967936727 feat_value:3.05180437934e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:3.44601812606e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:0 feat_idx:8 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:12 feat_idx:0 feat_idx:695357 feat_idx:702327 feat_idx:112382 feat_idx:364273 feat_idx:314332 feat_idx:615411 feat_idx:680585 feat_idx:144963 feat_idx:31348 feat_idx:776916 feat_idx:972993 feat_idx:307964 feat_idx:509894 feat_idx:59528 feat_idx:89255 feat_idx:498076 feat_idx:854924 feat_idx:91753 feat_idx:734534 feat_idx:94311 feat_idx:797195 feat_idx:0 feat_idx:377126 feat_idx:520021 feat_idx:522503 feat_idx:516793 feat_value:0.0 feat_value:0.000306584186465 feat_value:7.62951094835e-05 feat_value:0.0 feat_value:0.00199486550979 feat_value:0.0 feat_value:0.0 feat_value:0.00115759880933 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:711611 feat_idx:461913 feat_idx:1019942 feat_idx:360051 feat_idx:615411 feat_idx:1055981 feat_idx:948645 feat_idx:148475 feat_idx:754940 feat_idx:380775 feat_idx:858292 feat_idx:571110 feat_idx:288355 feat_idx:122497 feat_idx:986082 feat_idx:13161 feat_idx:87215 feat_idx:734534 feat_idx:94311 feat_idx:675199 feat_idx:0 feat_idx:122096 feat_idx:294199 feat_idx:522503 feat_idx:87571 feat_value:0.00675324675325 feat_value:4.26889373559e-05 feat_value:0.000640878919661 feat_value:0.0330237358101 feat_value:1.16583049274e-06 feat_value:7.65595528922e-05 feat_value:0.000692582266342 feat_value:0.00396891020341 feat_value:0.00110272580034 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00432841877452 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:72868 feat_idx:17848 feat_idx:314332 feat_idx:615411 feat_idx:363835 feat_idx:144963 feat_idx:31348 feat_idx:1069123 feat_idx:258719 feat_idx:753245 feat_idx:820316 feat_idx:39086 feat_idx:992008 feat_idx:325584 feat_idx:13161 feat_idx:750233 feat_idx:321110 feat_idx:94311 feat_idx:644181 feat_idx:0 feat_idx:476211 feat_idx:221229 feat_idx:502861 feat_idx:952230 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.000839246204318 feat_value:0.00515995872033 feat_value:0.000625101038643 feat_value:0.0 feat_value:0.0 feat_value:0.000826856292376 feat_value:3.44601812606e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:31161 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:85900 feat_idx:834217 feat_idx:760883 feat_idx:148475 feat_idx:697060 feat_idx:390104 feat_idx:0 feat_idx:916053 feat_idx:59528 feat_idx:608516 feat_idx:0 feat_idx:587215 feat_idx:473726 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:476211 feat_idx:0 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:1.55232499476e-05 feat_value:7.62951094835e-05 feat_value:0.00825593395253 feat_value:3.02252349969e-07 feat_value:1.85598916102e-05 feat_value:1.77585196498e-05 feat_value:0.0013229700678 feat_value:0.000275681450084 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00108210469363 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:746729 feat_idx:0 feat_idx:415419 feat_idx:314332 feat_idx:85900 feat_idx:341613 feat_idx:341430 feat_idx:148475 feat_idx:219803 feat_idx:273068 feat_idx:0 feat_idx:427647 feat_idx:59528 feat_idx:86971 feat_idx:85678 feat_idx:13161 feat_idx:706003 feat_idx:970598 feat_idx:94311 feat_idx:378304 feat_idx:0 feat_idx:476211 feat_idx:26849 feat_idx:502861 feat_idx:1082916 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.000106813153277 feat_value:0.0030959752322 feat_value:0.000435545636305 feat_value:0.000155439092236 feat_value:0.000106551117899 feat_value:0.000496113775426 feat_value:0.00196423033185 feat_value:0.0 feat_value:0.012987012987 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:0 feat_idx:695357 feat_idx:655161 feat_idx:410781 feat_idx:572549 feat_idx:314332 feat_idx:615411 feat_idx:438251 feat_idx:1017442 feat_idx:148475 feat_idx:754940 feat_idx:939988 feat_idx:175321 feat_idx:940584 feat_idx:223357 feat_idx:400890 feat_idx:229140 feat_idx:13161 feat_idx:512136 feat_idx:734534 feat_idx:94311 feat_idx:59009 feat_idx:0 feat_idx:122096 feat_idx:26849 feat_idx:502861 feat_idx:602609 feat_value:0.00121212121212 feat_value:1.55232499476e-05 feat_value:0.000610360875868 feat_value:0.0 feat_value:6.12276903223e-05 feat_value:5.33596883794e-05 feat_value:0.00261050238852 feat_value:0.0 feat_value:0.000241221268824 feat_value:0.0 feat_value:0.017316017316 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:181401 feat_idx:563443 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:85900 feat_idx:1086355 feat_idx:66687 feat_idx:148475 feat_idx:754940 feat_idx:294725 feat_idx:0 feat_idx:937034 feat_idx:59528 feat_idx:827972 feat_idx:0 feat_idx:197974 feat_idx:319863 feat_idx:734534 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:808702 feat_idx:502861 feat_idx:792764 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.000152590218967 feat_value:0.00206398348813 feat_value:0.000153069225806 feat_value:0.0 feat_value:0.0 feat_value:0.000330742516951 feat_value:0.000103380543782 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:962300 feat_idx:623087 feat_idx:0 feat_idx:53376 feat_idx:314332 feat_idx:615411 feat_idx:264532 feat_idx:144963 feat_idx:148475 feat_idx:14838 feat_idx:682657 feat_idx:0 feat_idx:502067 feat_idx:59528 feat_idx:519185 feat_idx:0 feat_idx:854924 feat_idx:372673 feat_idx:764350 feat_idx:330429 feat_idx:0 feat_idx:925828 feat_idx:377126 feat_idx:383664 feat_idx:522503 feat_idx:14052 feat_value:0.000865800865801 feat_value:0.000209563874293 feat_value:0.0 feat_value:0.00515995872033 feat_value:1.97327605623e-05 feat_value:1.15999322564e-05 feat_value:8.8792598249e-05 feat_value:0.00115759880933 feat_value:0.000379061993866 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.000249500998004 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:244091 feat_idx:428972 feat_idx:323226 feat_idx:314332 feat_idx:615411 feat_idx:253814 feat_idx:144963 feat_idx:148475 feat_idx:367991 feat_idx:359193 feat_idx:789125 feat_idx:173541 feat_idx:59528 feat_idx:433504 feat_idx:791919 feat_idx:587215 feat_idx:884062 feat_idx:0 feat_idx:0 feat_idx:128761 feat_idx:0 feat_idx:637620 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.00022888532845 feat_value:0.00206398348813 feat_value:0.000868414180368 feat_value:0.00070759586764 feat_value:1.77585196498e-05 feat_value:0.00711096411444 feat_value:0.00785692132741 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:518052 feat_idx:631742 feat_idx:209780 feat_idx:691946 feat_idx:463568 feat_idx:404876 feat_idx:781648 feat_idx:66687 feat_idx:148475 feat_idx:294231 feat_idx:673759 feat_idx:780141 feat_idx:636360 feat_idx:223357 feat_idx:656844 feat_idx:720701 feat_idx:13161 feat_idx:284891 feat_idx:734534 feat_idx:330429 feat_idx:564494 feat_idx:0 feat_idx:122096 feat_idx:529367 feat_idx:24736 feat_idx:225414 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:6.10360875868e-05 feat_value:0.0030959752322 feat_value:7.29291741568e-05 feat_value:0.000426877507035 feat_value:0.000213102235798 feat_value:0.00760707788986 feat_value:0.00182638960681 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:87449 feat_idx:0 feat_idx:0 feat_idx:943087 feat_idx:615411 feat_idx:14123 feat_idx:128514 feat_idx:148475 feat_idx:338941 feat_idx:655530 feat_idx:0 feat_idx:945302 feat_idx:288355 feat_idx:1078572 feat_idx:0 feat_idx:587215 feat_idx:644343 feat_idx:215210 feat_idx:330429 feat_idx:0 feat_idx:0 feat_idx:217677 feat_idx:830506 feat_idx:502861 feat_idx:560344 feat_value:0.000692640692641 feat_value:1.16424374607e-05 feat_value:0.00135805294881 feat_value:0.00412796697626 feat_value:2.09849488693e-05 feat_value:1.15999322564e-05 feat_value:7.10340785992e-05 feat_value:0.00115759880933 feat_value:0.000137840725042 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:76757 feat_idx:0 feat_idx:748549 feat_idx:729041 feat_idx:404876 feat_idx:897525 feat_idx:66687 feat_idx:148475 feat_idx:809357 feat_idx:739161 feat_idx:0 feat_idx:571774 feat_idx:223357 feat_idx:726585 feat_idx:450365 feat_idx:13161 feat_idx:1064696 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:925828 feat_idx:476211 feat_idx:381001 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.00016299412445 feat_value:3.05180437934e-05 feat_value:0.00103199174407 feat_value:0.000144347086564 feat_value:2.31998645128e-06 feat_value:0.000301894834047 feat_value:0.000330742516951 feat_value:3.44601812606e-05 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:695357 feat_idx:702327 feat_idx:593344 feat_idx:1065368 feat_idx:463568 feat_idx:85900 feat_idx:669411 feat_idx:27549 feat_idx:148475 feat_idx:227359 feat_idx:1043530 feat_idx:320625 feat_idx:575561 feat_idx:223357 feat_idx:57227 feat_idx:1021160 feat_idx:854924 feat_idx:91753 feat_idx:943801 feat_idx:94311 feat_idx:758526 feat_idx:0 feat_idx:122096 feat_idx:154807 feat_idx:522503 feat_idx:406770 feat_value:0.0 feat_value:1.94040624345e-05 feat_value:1.52590218967e-05 feat_value:0.00206398348813 feat_value:0.000346985697764 feat_value:0.00038047777801 feat_value:0.000319653353696 feat_value:0.00214982636018 feat_value:0.0126468865226 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:518052 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:314332 feat_idx:108674 feat_idx:2775 feat_idx:144963 feat_idx:31348 feat_idx:892705 feat_idx:1040029 feat_idx:824386 feat_idx:524213 feat_idx:863222 feat_idx:406685 feat_idx:499188 feat_idx:599055 feat_idx:251433 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.00307174745383 feat_value:0.000329438076082 feat_value:0.0 feat_value:0.00115759880933 feat_value:0.00217099141941 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:328856 feat_idx:583609 feat_idx:356189 feat_idx:314332 feat_idx:0 feat_idx:407260 feat_idx:144963 feat_idx:148475 feat_idx:699806 feat_idx:967004 feat_idx:598842 feat_idx:676678 feat_idx:223357 feat_idx:310528 feat_idx:805012 feat_idx:599055 feat_idx:683739 feat_idx:734534 feat_idx:94311 feat_idx:135625 feat_idx:0 feat_idx:122096 feat_idx:737768 feat_idx:522503 feat_idx:618666 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.000167849240864 feat_value:0.0030959752322 feat_value:0.000698807433128 feat_value:0.00028999830641 feat_value:3.55170392996e-05 feat_value:0.000496113775426 feat_value:0.00354939866984 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:0 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:849120 feat_idx:439682 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:615411 feat_idx:443349 feat_idx:1007823 feat_idx:31348 feat_idx:754940 feat_idx:1072328 feat_idx:0 feat_idx:321212 feat_idx:59528 feat_idx:163883 feat_idx:0 feat_idx:189960 feat_idx:1040747 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:925828 feat_idx:122096 feat_idx:0 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000554956185627 feat_value:3.05180437934e-05 feat_value:0.00206398348813 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000330742516951 feat_value:6.89203625211e-05 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:738089 feat_idx:439682 feat_idx:374405 feat_idx:984218 feat_idx:943087 feat_idx:108674 feat_idx:884166 feat_idx:144963 feat_idx:148475 feat_idx:683571 feat_idx:374802 feat_idx:530646 feat_idx:826201 feat_idx:223357 feat_idx:43619 feat_idx:1001991 feat_idx:339114 feat_idx:603612 feat_idx:0 feat_idx:0 feat_idx:60686 feat_idx:0 feat_idx:122096 feat_idx:138318 feat_idx:0 feat_idx:0 feat_value:0.00034632034632 feat_value:1.16424374607e-05 feat_value:0.0 feat_value:0.00722394220846 feat_value:1.91282558623e-05 feat_value:8.58394986973e-05 feat_value:0.000124309637549 feat_value:0.00562262278816 feat_value:0.00971777111548 feat_value:0.0217391304348 feat_value:0.017316017316 feat_value:0.00174650698603 feat_value:0.000946841606925 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:1049859 feat_idx:420263 feat_idx:271401 feat_idx:360051 feat_idx:615411 feat_idx:714816 feat_idx:144963 feat_idx:148475 feat_idx:900313 feat_idx:855314 feat_idx:74337 feat_idx:603555 feat_idx:288355 feat_idx:650698 feat_idx:322858 feat_idx:339114 feat_idx:311468 feat_idx:489978 feat_idx:330429 feat_idx:101492 feat_idx:0 feat_idx:217677 feat_idx:221229 feat_idx:917031 feat_idx:24246 feat_value:0.00034632034632 feat_value:1.55232499476e-05 feat_value:0.000915541313802 feat_value:0.077399380805 feat_value:2.63391333544e-06 feat_value:0.000280718360605 feat_value:0.00092344302179 feat_value:0.00644947908054 feat_value:0.00854612495262 feat_value:0.0217391304348 feat_value:0.034632034632 feat_value:0.000249500998004 feat_value:0.0104152576762 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:541890 feat_idx:93486 feat_idx:892417 feat_idx:314332 feat_idx:0 feat_idx:870784 feat_idx:66687 feat_idx:148475 feat_idx:1064406 feat_idx:605532 feat_idx:908441 feat_idx:411003 feat_idx:223357 feat_idx:415710 feat_idx:177994 feat_idx:13161 feat_idx:721813 feat_idx:0 feat_idx:0 feat_idx:702388 feat_idx:0 feat_idx:122096 feat_idx:68781 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:0.000143590062015 feat_value:3.05180437934e-05 feat_value:0.0433436532508 feat_value:1.41626815414e-05 feat_value:0.000102079403856 feat_value:0.000266377794747 feat_value:0.00810319166529 feat_value:0.00199869051311 feat_value:0.0217391304348 feat_value:0.038961038961 feat_value:0.0 feat_value:0.00568104964155 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:268086 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:463568 feat_idx:404876 feat_idx:679269 feat_idx:1007823 feat_idx:148475 feat_idx:754940 feat_idx:392943 feat_idx:824386 feat_idx:502022 feat_idx:863222 feat_idx:406685 feat_idx:499188 feat_idx:763481 feat_idx:251433 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.0 feat_value:0.0 feat_value:0.000644186115598 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:52223 feat_idx:0 feat_idx:610088 feat_idx:360051 feat_idx:108674 feat_idx:207287 feat_idx:144963 feat_idx:148475 feat_idx:198726 feat_idx:1050332 feat_idx:0 feat_idx:575881 feat_idx:863222 feat_idx:428650 feat_idx:56538 feat_idx:587215 feat_idx:520546 feat_idx:0 feat_idx:0 feat_idx:3328 feat_idx:0 feat_idx:321110 feat_idx:604513 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.0 feat_value:0.00103199174407 feat_value:0.00087290478671 feat_value:0.000153119105784 feat_value:1.77585196498e-05 feat_value:0.000165371258475 feat_value:3.44601812606e-05 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:506931 feat_idx:664380 feat_idx:464058 feat_idx:794391 feat_idx:314332 feat_idx:615411 feat_idx:1008575 feat_idx:144963 feat_idx:148475 feat_idx:811905 feat_idx:262025 feat_idx:792836 feat_idx:853632 feat_idx:863222 feat_idx:190922 feat_idx:989611 feat_idx:13161 feat_idx:402822 feat_idx:622170 feat_idx:94311 feat_idx:626744 feat_idx:925828 feat_idx:122096 feat_idx:423382 feat_idx:24736 feat_idx:1081226 feat_value:0.00225108225108 feat_value:6.20929997904e-05 feat_value:0.00122072175174 feat_value:0.0330237358101 feat_value:1.63216268983e-05 feat_value:0.000266798441897 feat_value:0.000266377794747 feat_value:0.00611873656359 feat_value:0.00196423033185 feat_value:0.0217391304348 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00649262816177 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:268086 feat_idx:894961 feat_idx:594422 feat_idx:823711 feat_idx:360051 feat_idx:615411 feat_idx:919751 feat_idx:888742 feat_idx:148475 feat_idx:725649 feat_idx:522685 feat_idx:14144 feat_idx:242991 feat_idx:288355 feat_idx:645605 feat_idx:99736 feat_idx:379814 feat_idx:896897 feat_idx:734534 feat_idx:330429 feat_idx:710067 feat_idx:0 feat_idx:407810 feat_idx:474780 feat_idx:525837 feat_idx:815828 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.0013885709926 feat_value:0.00412796697626 feat_value:1.26514197916e-05 feat_value:0.000510397019281 feat_value:0.000621548187743 feat_value:0.000661485033901 feat_value:0.0022743719632 feat_value:0.0 feat_value:0.021645021645 feat_value:0.000249500998004 feat_value:0.000541052346815 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:0 feat_idx:268086 feat_idx:704711 feat_idx:539260 feat_idx:133619 feat_idx:943087 feat_idx:108674 feat_idx:277955 feat_idx:795081 feat_idx:148475 feat_idx:46173 feat_idx:414978 feat_idx:796305 feat_idx:317564 feat_idx:59528 feat_idx:28300 feat_idx:252652 feat_idx:854924 feat_idx:750233 feat_idx:637425 feat_idx:330429 feat_idx:538163 feat_idx:0 feat_idx:122096 feat_idx:623412 feat_idx:917031 feat_idx:421993 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.000244133540961 feat_value:0.0 feat_value:0.0 feat_value:0.000661485033901 feat_value:0.000516902718908 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:631742 feat_idx:0 feat_idx:618078 feat_idx:314332 feat_idx:831162 feat_idx:302234 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:683585 feat_idx:0 feat_idx:460786 feat_idx:59528 feat_idx:834098 feat_idx:0 feat_idx:13161 feat_idx:144824 feat_idx:734534 feat_idx:1047606 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:225853 feat_idx:24736 feat_idx:83301 feat_value:0.0 feat_value:0.000217325499267 feat_value:0.0 feat_value:0.0103199174407 feat_value:0.000282821841757 feat_value:0.000227358672225 feat_value:0.000603789668093 feat_value:0.00181908384323 feat_value:0.0120266032599 feat_value:0.0 feat_value:0.038961038961 feat_value:0.0 feat_value:0.00135263086704 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:552317 feat_idx:56734 feat_idx:314332 feat_idx:615411 feat_idx:205494 feat_idx:66687 feat_idx:148475 feat_idx:721787 feat_idx:258719 feat_idx:1026950 feat_idx:820316 feat_idx:59528 feat_idx:28300 feat_idx:783420 feat_idx:13161 feat_idx:750233 feat_idx:505787 feat_idx:330429 feat_idx:515764 feat_idx:0 feat_idx:476211 feat_idx:221229 feat_idx:502861 feat_idx:24246 feat_value:0.00103896103896 feat_value:7.7616249738e-06 feat_value:0.000152590218967 feat_value:0.0061919504644 feat_value:0.0 feat_value:0.0 feat_value:0.000106551117899 feat_value:0.00148834132628 feat_value:0.000310141631345 feat_value:0.0217391304348 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:294042 feat_idx:507045 feat_idx:549419 feat_idx:314332 feat_idx:0 feat_idx:1012202 feat_idx:795081 feat_idx:148475 feat_idx:68578 feat_idx:717684 feat_idx:462100 feat_idx:729242 feat_idx:59528 feat_idx:182004 feat_idx:253871 feat_idx:763481 feat_idx:256400 feat_idx:0 feat_idx:0 feat_idx:915751 feat_idx:0 feat_idx:122096 feat_idx:1030847 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:1.52590218967e-05 feat_value:0.0030959752322 feat_value:0.000125262009609 feat_value:0.0 feat_value:0.0 feat_value:0.000496113775426 feat_value:0.000310141631345 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000405789260111 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:1
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:181401 feat_idx:439682 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:0 feat_idx:1027059 feat_idx:144963 feat_idx:148475 feat_idx:307216 feat_idx:1086145 feat_idx:0 feat_idx:784143 feat_idx:59528 feat_idx:127555 feat_idx:0 feat_idx:13161 feat_idx:757164 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:0 feat_idx:0 feat_idx:0 feat_value:0.00017316017316 feat_value:0.000100901124659 feat_value:1.52590218967e-05 feat_value:0.0144478844169 feat_value:2.41801879975e-06 feat_value:3.47997967692e-05 feat_value:0.000177585196498 feat_value:0.00578799404663 feat_value:0.00554808918295 feat_value:0.0217391304348 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.00202894630055 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:849120 feat_idx:704711 feat_idx:160536 feat_idx:572549 feat_idx:360051 feat_idx:0 feat_idx:731718 feat_idx:66687 feat_idx:148475 feat_idx:31385 feat_idx:1047396 feat_idx:768743 feat_idx:258527 feat_idx:863222 feat_idx:866128 feat_idx:824472 feat_idx:599055 feat_idx:575938 feat_idx:568485 feat_idx:94311 feat_idx:469863 feat_idx:0 feat_idx:122096 feat_idx:26849 feat_idx:502861 feat_idx:9838 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.00114442664225 feat_value:0.0227038183695 feat_value:0.000255273699002 feat_value:0.000419917547682 feat_value:3.55170392996e-05 feat_value:0.00363816768646 feat_value:0.00234329232572 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00297578790748 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:695357 feat_idx:447935 feat_idx:937213 feat_idx:905937 feat_idx:314332 feat_idx:404876 feat_idx:142618 feat_idx:144963 feat_idx:148475 feat_idx:750865 feat_idx:596218 feat_idx:919681 feat_idx:840670 feat_idx:59528 feat_idx:380839 feat_idx:380828 feat_idx:13161 feat_idx:197572 feat_idx:1030936 feat_idx:94311 feat_idx:827510 feat_idx:0 feat_idx:377126 feat_idx:288434 feat_idx:24736 feat_idx:933741 feat_value:0.0 feat_value:0.000504505623297 feat_value:3.05180437934e-05 feat_value:0.0237358101135 feat_value:0.000683824352351 feat_value:5.33596883794e-05 feat_value:7.10340785992e-05 feat_value:0.00396891020341 feat_value:0.000792584168993 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.00311105099418 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:1
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:695357 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:314332 feat_idx:404876 feat_idx:195437 feat_idx:144963 feat_idx:148475 feat_idx:303093 feat_idx:895160 feat_idx:824386 feat_idx:332768 feat_idx:288355 feat_idx:452911 feat_idx:499188 feat_idx:339114 feat_idx:1026477 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:0 feat_idx:407810 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.00100192336124 feat_value:0.0 feat_value:0.0 feat_value:0.00529188027121 feat_value:0.0013094868879 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:0 feat_idx:5 feat_idx:0 feat_idx:0 feat_idx:8 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:0 feat_idx:268086 feat_idx:569676 feat_idx:460446 feat_idx:323226 feat_idx:943087 feat_idx:615411 feat_idx:831536 feat_idx:144963 feat_idx:31348 feat_idx:1084149 feat_idx:472585 feat_idx:824386 feat_idx:1085274 feat_idx:863222 feat_idx:406685 feat_idx:499188 feat_idx:13161 feat_idx:251433 feat_idx:0 feat_idx:0 feat_idx:335421 feat_idx:969590 feat_idx:476211 feat_idx:686449 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:7.7616249738e-06 feat_value:0.0 feat_value:0.0 feat_value:0.0294215028194 feat_value:0.0 feat_value:0.0 feat_value:0.00181908384323 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:29151 feat_idx:0 feat_idx:0 feat_idx:314332 feat_idx:615411 feat_idx:351823 feat_idx:144963 feat_idx:148475 feat_idx:633435 feat_idx:734591 feat_idx:0 feat_idx:346678 feat_idx:59528 feat_idx:246568 feat_idx:0 feat_idx:13161 feat_idx:669279 feat_idx:734534 feat_idx:94311 feat_idx:0 feat_idx:0 feat_idx:122096 feat_idx:311968 feat_idx:1007264 feat_idx:210855 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:0.000976577401389 feat_value:0.0113519091847 feat_value:6.45092872648e-05 feat_value:0.00019951883481 feat_value:0.000266377794747 feat_value:0.00214982636018 feat_value:0.00796030187119 feat_value:0.0 feat_value:0.017316017316 feat_value:0.0 feat_value:0.00148789395374 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:0 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:0 feat_idx:268086 feat_idx:655161 feat_idx:160536 feat_idx:572549 feat_idx:943087 feat_idx:108674 feat_idx:179440 feat_idx:144963 feat_idx:148475 feat_idx:754940 feat_idx:216593 feat_idx:768743 feat_idx:272886 feat_idx:288355 feat_idx:1059113 feat_idx:824472 feat_idx:599055 feat_idx:512136 feat_idx:734534 feat_idx:94311 feat_idx:469863 feat_idx:0 feat_idx:476211 feat_idx:26849 feat_idx:502861 feat_idx:507836 feat_value:0.0 feat_value:1.55232499476e-05 feat_value:1.52590218967e-05 feat_value:0.0 feat_value:0.000125348367423 feat_value:4.63997290256e-06 feat_value:5.32755589494e-05 feat_value:0.0 feat_value:6.89203625211e-05 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:0 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:439682 feat_idx:434330 feat_idx:626900 feat_idx:360051 feat_idx:615411 feat_idx:448250 feat_idx:66687 feat_idx:31348 feat_idx:621494 feat_idx:345898 feat_idx:171523 feat_idx:728643 feat_idx:288355 feat_idx:993766 feat_idx:479691 feat_idx:599055 feat_idx:786401 feat_idx:0 feat_idx:0 feat_idx:914361 feat_idx:0 feat_idx:407810 feat_idx:253237 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.000419127748585 feat_value:1.52590218967e-05 feat_value:0.00103199174407 feat_value:0.00740600297347 feat_value:0.0 feat_value:0.0 feat_value:0.000165371258475 feat_value:0.000447982356387 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:599320 feat_idx:36543 feat_idx:348417 feat_idx:314332 feat_idx:615411 feat_idx:507688 feat_idx:795081 feat_idx:148475 feat_idx:1085001 feat_idx:538920 feat_idx:698736 feat_idx:914324 feat_idx:223357 feat_idx:726559 feat_idx:327135 feat_idx:13161 feat_idx:214732 feat_idx:324501 feat_idx:1047606 feat_idx:434899 feat_idx:0 feat_idx:377126 feat_idx:221229 feat_idx:522503 feat_idx:24246 feat_value:0.0 feat_value:0.000147470874502 feat_value:0.0013733119707 feat_value:0.00206398348813 feat_value:0.00178026634132 feat_value:0.00081663523085 feat_value:0.0 feat_value:0.000826856292376 feat_value:0.00151624797546 feat_value:0.0 feat_value:0.0 feat_value:0.0 feat_value:0.000676315433518 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:0 feat_idx:417270 feat_idx:314332 feat_idx:404876 feat_idx:180197 feat_idx:144963 feat_idx:148475 feat_idx:891898 feat_idx:832883 feat_idx:0 feat_idx:406751 feat_idx:59528 feat_idx:28300 feat_idx:80459 feat_idx:587215 feat_idx:750233 feat_idx:52536 feat_idx:1047606 feat_idx:584293 feat_idx:0 feat_idx:476211 feat_idx:26849 feat_idx:502861 feat_idx:983005 feat_value:0.0 feat_value:1.16424374607e-05 feat_value:0.00119020370794 feat_value:0.00103199174407 feat_value:0.000683737994537 feat_value:0.000510397019281 feat_value:1.77585196498e-05 feat_value:0.000165371258475 feat_value:3.44601812606e-05 feat_value:0.0 feat_value:0.004329004329 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
feat_idx:0 feat_idx:2 feat_idx:0 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:0 feat_idx:11 feat_idx:12 feat_idx:13 feat_idx:506931 feat_idx:123566 feat_idx:961529 feat_idx:810019 feat_idx:314332 feat_idx:615411 feat_idx:475867 feat_idx:795081 feat_idx:148475 feat_idx:697060 feat_idx:1069621 feat_idx:370551 feat_idx:696973 feat_idx:69630 feat_idx:396064 feat_idx:95177 feat_idx:854924 feat_idx:488825 feat_idx:0 feat_idx:0 feat_idx:581782 feat_idx:0 feat_idx:476211 feat_idx:289148 feat_idx:0 feat_idx:0 feat_value:0.0 feat_value:0.0066672358525 feat_value:0.0 feat_value:0.00103199174407 feat_value:0.000325784854359 feat_value:4.40797425743e-05 feat_value:0.000266377794747 feat_value:0.000165371258475 feat_value:0.00299803576967 feat_value:0.0 feat_value:0.030303030303 feat_value:0.0 feat_value:0.000135263086704 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:0.0 label:0
feat_idx:1 feat_idx:2 feat_idx:3 feat_idx:4 feat_idx:5 feat_idx:6 feat_idx:7 feat_idx:8 feat_idx:9 feat_idx:10 feat_idx:11 feat_idx:0 feat_idx:13 feat_idx:268086 feat_idx:704711 feat_idx:995515 feat_idx:139394 feat_idx:943087 feat_idx:0 feat_idx:546815 feat_idx:144963 feat_idx:148475 feat_idx:364765 feat_idx:552750 feat_idx:920037 feat_idx:816538 feat_idx:223357 feat_idx:790588 feat_idx:560935 feat_idx:13161 feat_idx:750233 feat_idx:734534 feat_idx:1047606 feat_idx:361734 feat_idx:0 feat_idx:122096 feat_idx:434883 feat_idx:502861 feat_idx:203213 feat_value:0.0 feat_value:0.000197921436832 feat_value:4.57770656901e-05 feat_value:0.00206398348813 feat_value:0.000625316933178 feat_value:0.000874634892132 feat_value:0.000142068157198 feat_value:0.000330742516951 feat_value:0.00975223129674 feat_value:0.0 feat_value:0.00865800865801 feat_value:0.0 feat_value:0.000270526173407 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:0.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 feat_value:1.0 label:0
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import paddle.fluid as fluid
from paddlerec.core.utils import envs
from paddlerec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def _init_hyper_parameters(self):
self.sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number", None)
self.num_field = envs.get_global_env("hyper_parameters.num_field",
None)
self.reg = envs.get_global_env("hyper_parameters.reg", 1e-4)
def net(self, inputs, is_infer=False):
init_value_ = 0.1
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
# ------------------------- network input --------------------------
raw_feat_idx = self._sparse_data_var[1]
raw_feat_value = self._dense_data_var[0]
self.label = self._sparse_data_var[0]
feat_idx = raw_feat_idx
feat_value = fluid.layers.reshape(
raw_feat_value, [-1, self.num_field]) # None * num_field * 1
first_weights_re = fluid.embedding(
input=feat_idx,
is_sparse=True,
is_distributed=is_distributed,
dtype='float32',
size=[self.sparse_feature_number + 1, 1],
padding_idx=0,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_),
regularizer=fluid.regularizer.L1DecayRegularizer(self.reg)))
first_weights = fluid.layers.reshape(
first_weights_re,
shape=[-1, self.num_field]) # None * num_field * 1
y_first_order = fluid.layers.reduce_sum(
first_weights * feat_value, 1, keep_dim=True)
b_linear = fluid.layers.create_parameter(
shape=[1],
dtype='float32',
default_initializer=fluid.initializer.ConstantInitializer(value=0))
self.predict = fluid.layers.sigmoid(y_first_order + b_linear)
cost = fluid.layers.log_loss(
input=self.predict, label=fluid.layers.cast(self.label, "float32"))
avg_cost = fluid.layers.reduce_sum(cost)
self._cost = avg_cost
predict_2d = fluid.layers.concat([1 - self.predict, self.predict], 1)
label_int = fluid.layers.cast(self.label, 'int64')
auc_var, batch_auc_var, _ = fluid.layers.auc(input=predict_2d,
label=label_int,
slide_steps=0)
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc_var
if is_infer:
self._infer_results["AUC"] = auc_var
......@@ -11,7 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import yaml, os
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
try:
......
......@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import yaml, os
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
try:
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
workspace: "paddlerec.models.recall.fasttext"
# list of dataset
dataset:
- name: dataset_train # name of dataset to distinguish different datasets
batch_size: 10
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/train"
word_count_dict_path: "{workspace}/data/dict/word_count_dict.txt"
word_ngrams_path: "{workspace}/data/dict/word_ngrams_id.txt"
data_converter: "{workspace}/reader.py"
- name: dataset_infer # name
batch_size: 10
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/test"
word_id_dict_path: "{workspace}/data/dict/word_id_dict.txt"
data_converter: "{workspace}/evaluate_reader.py"
hyper_parameters:
optimizer:
learning_rate: 1.0
decay_steps: 100000
decay_rate: 0.999
class: sgd
strategy: async
sparse_feature_number: 227915
sparse_feature_dim: 300
with_shuffle_batch: False
neg_num: 5
window_size: 5
min_n: 3
max_n: 5
# select runner by name
mode: train_runner
# config of each runner.
# runner is a kind of paddle training class, which wraps the train/infer process.
runner:
- name: train_runner
class: single_train
# num of epochs
epochs: 2
# device to run training or infer
device: cpu
save_checkpoint_interval: 1 # save model interval of epochs
save_inference_interval: 1 # save inference
save_checkpoint_path: "increment" # save checkpoint path
save_inference_path: "inference" # save inference path
save_inference_feed_varnames: [] # feed vars of save inference
save_inference_fetch_varnames: [] # fetch vars of save inference
init_model_path: "" # load model path
fetch_period: 10
- name: infer_runner
class: single_infer
# num of epochs
epochs: 1
# device to run training or infer
device: cpu
init_model_path: "increment/0" # load model path
# runner will run all the phase in each epoch
phase:
- name: phase1
model: "{workspace}/model.py" # user-defined model
dataset_name: dataset_train # select dataset by name
thread_num: 1
gradient_scale_strategy: 1
#- name: phase2
# model: "{workspace}/model.py" # user-defined model
# dataset_name: dataset_infer # select dataset by name
# thread_num: 1
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
Athens Greece Baghdad Iraq
Athens Greece Bangkok Thailand
Athens Greece Beijing China
Athens Greece Berlin Germany
Athens Greece Bern Switzerland
Athens Greece Cairo Egypt
Athens Greece Canberra Australia
Athens Greece Hanoi Vietnam
Athens Greece Havana Cuba
Athens Greece Helsinki Finland
Athens Greece Islamabad Pakistan
Athens Greece Kabul Afghanistan
Athens Greece London England
Athens Greece Madrid Spain
Athens Greece Moscow Russia
Athens Greece Oslo Norway
Athens Greece Ottawa Canada
Athens Greece Paris France
Athens Greece Rome Italy
Athens Greece Stockholm Sweden
Athens Greece Tehran Iran
Athens Greece Tokyo Japan
Baghdad Iraq Bangkok Thailand
Baghdad Iraq Beijing China
Baghdad Iraq Berlin Germany
Baghdad Iraq Bern Switzerland
Baghdad Iraq Cairo Egypt
Baghdad Iraq Canberra Australia
Baghdad Iraq Hanoi Vietnam
Baghdad Iraq Havana Cuba
Baghdad Iraq Helsinki Finland
Baghdad Iraq Islamabad Pakistan
Baghdad Iraq Kabul Afghanistan
Baghdad Iraq London England
Baghdad Iraq Madrid Spain
Baghdad Iraq Moscow Russia
Baghdad Iraq Oslo Norway
Baghdad Iraq Ottawa Canada
Baghdad Iraq Paris France
Baghdad Iraq Rome Italy
Baghdad Iraq Stockholm Sweden
Baghdad Iraq Tehran Iran
Baghdad Iraq Tokyo Japan
Baghdad Iraq Athens Greece
Bangkok Thailand Beijing China
Bangkok Thailand Berlin Germany
Bangkok Thailand Bern Switzerland
Bangkok Thailand Cairo Egypt
Bangkok Thailand Canberra Australia
Bangkok Thailand Hanoi Vietnam
Bangkok Thailand Havana Cuba
Bangkok Thailand Helsinki Finland
Bangkok Thailand Islamabad Pakistan
Bangkok Thailand Kabul Afghanistan
Bangkok Thailand London England
Bangkok Thailand Madrid Spain
Bangkok Thailand Moscow Russia
Bangkok Thailand Oslo Norway
Bangkok Thailand Ottawa Canada
Bangkok Thailand Paris France
Bangkok Thailand Rome Italy
Bangkok Thailand Stockholm Sweden
Bangkok Thailand Tehran Iran
Bangkok Thailand Tokyo Japan
Bangkok Thailand Athens Greece
Bangkok Thailand Baghdad Iraq
Beijing China Berlin Germany
Beijing China Bern Switzerland
Beijing China Cairo Egypt
Beijing China Canberra Australia
Beijing China Hanoi Vietnam
Beijing China Havana Cuba
Beijing China Helsinki Finland
Beijing China Islamabad Pakistan
Beijing China Kabul Afghanistan
Beijing China London England
Beijing China Madrid Spain
Beijing China Moscow Russia
Beijing China Oslo Norway
Beijing China Ottawa Canada
Beijing China Paris France
Beijing China Rome Italy
Beijing China Stockholm Sweden
Beijing China Tehran Iran
Beijing China Tokyo Japan
Beijing China Athens Greece
Beijing China Baghdad Iraq
Beijing China Bangkok Thailand
Berlin Germany Bern Switzerland
Berlin Germany Cairo Egypt
Berlin Germany Canberra Australia
Berlin Germany Hanoi Vietnam
Berlin Germany Havana Cuba
Berlin Germany Helsinki Finland
Berlin Germany Islamabad Pakistan
Berlin Germany Kabul Afghanistan
Berlin Germany London England
Berlin Germany Madrid Spain
Berlin Germany Moscow Russia
Berlin Germany Oslo Norway
Berlin Germany Ottawa Canada
Berlin Germany Paris France
Berlin Germany Rome Italy
Berlin Germany Stockholm Sweden
Berlin Germany Tehran Iran
Berlin Germany Tokyo Japan
Berlin Germany Athens Greece
Berlin Germany Baghdad Iraq
Berlin Germany Bangkok Thailand
Berlin Germany Beijing China
Bern Switzerland Cairo Egypt
Bern Switzerland Canberra Australia
Bern Switzerland Hanoi Vietnam
Bern Switzerland Havana Cuba
Bern Switzerland Helsinki Finland
Bern Switzerland Islamabad Pakistan
Bern Switzerland Kabul Afghanistan
Bern Switzerland London England
Bern Switzerland Madrid Spain
Bern Switzerland Moscow Russia
Bern Switzerland Oslo Norway
Bern Switzerland Ottawa Canada
Bern Switzerland Paris France
Bern Switzerland Rome Italy
Bern Switzerland Stockholm Sweden
Bern Switzerland Tehran Iran
Bern Switzerland Tokyo Japan
Bern Switzerland Athens Greece
Bern Switzerland Baghdad Iraq
Bern Switzerland Bangkok Thailand
Bern Switzerland Beijing China
Bern Switzerland Berlin Germany
Cairo Egypt Canberra Australia
Cairo Egypt Hanoi Vietnam
Cairo Egypt Havana Cuba
Cairo Egypt Helsinki Finland
Cairo Egypt Islamabad Pakistan
Cairo Egypt Kabul Afghanistan
Cairo Egypt London England
Cairo Egypt Madrid Spain
Cairo Egypt Moscow Russia
Cairo Egypt Oslo Norway
Cairo Egypt Ottawa Canada
Cairo Egypt Paris France
Cairo Egypt Rome Italy
Cairo Egypt Stockholm Sweden
Cairo Egypt Tehran Iran
Cairo Egypt Tokyo Japan
Cairo Egypt Athens Greece
Cairo Egypt Baghdad Iraq
Cairo Egypt Bangkok Thailand
Cairo Egypt Beijing China
Cairo Egypt Berlin Germany
Cairo Egypt Bern Switzerland
Canberra Australia Hanoi Vietnam
Canberra Australia Havana Cuba
Canberra Australia Helsinki Finland
Canberra Australia Islamabad Pakistan
Canberra Australia Kabul Afghanistan
Canberra Australia London England
Canberra Australia Madrid Spain
Canberra Australia Moscow Russia
Canberra Australia Oslo Norway
Canberra Australia Ottawa Canada
Canberra Australia Paris France
Canberra Australia Rome Italy
Canberra Australia Stockholm Sweden
Canberra Australia Tehran Iran
Canberra Australia Tokyo Japan
Canberra Australia Athens Greece
Canberra Australia Baghdad Iraq
Canberra Australia Bangkok Thailand
Canberra Australia Beijing China
Canberra Australia Berlin Germany
Canberra Australia Bern Switzerland
Canberra Australia Cairo Egypt
Hanoi Vietnam Havana Cuba
Hanoi Vietnam Helsinki Finland
Hanoi Vietnam Islamabad Pakistan
Hanoi Vietnam Kabul Afghanistan
Hanoi Vietnam London England
Hanoi Vietnam Madrid Spain
Hanoi Vietnam Moscow Russia
Hanoi Vietnam Oslo Norway
Hanoi Vietnam Ottawa Canada
Hanoi Vietnam Paris France
Hanoi Vietnam Rome Italy
Hanoi Vietnam Stockholm Sweden
Hanoi Vietnam Tehran Iran
Hanoi Vietnam Tokyo Japan
Hanoi Vietnam Athens Greece
Hanoi Vietnam Baghdad Iraq
Hanoi Vietnam Bangkok Thailand
Hanoi Vietnam Beijing China
Hanoi Vietnam Berlin Germany
Hanoi Vietnam Bern Switzerland
Hanoi Vietnam Cairo Egypt
Hanoi Vietnam Canberra Australia
Havana Cuba Helsinki Finland
Havana Cuba Islamabad Pakistan
183648 183648 183648 183648 65918 183648 94834 93002 71149 183648 89518 183648 68731 183648 183648 63766 183648 183648 63766 183648 183648 63690 63766 83291 183648 183648 63766 183648 183648 183648 63766 65918 183648 73068 71149 183648 183648 183648 65370 183648 183648 67665 63945 93281 71149 183648 139630 183648 183648 63766 183648 183648 183648 183648 65062 110843 175871 94491 183648 183648 183648 183648 89277 183648 78014 183648 183648 63766 69529 183648 183648 183648 102013 63766 139449 183648 183648 113280 87363 64479 183648 183648 183648 183648 183648 183648 63766 183648 93281 183648 64068 183648 63690 183648 183648 183648 183648 71353 64068 183648 183648 183648 102334 97824 139630 183648 110843 183648 183648 183648 183648 183648 93281 183648 63766 183648 183648 183648 183648 183648 63766 67946 63766 69529 183648 183648 74700 183648 64292 183648 97584 64890 112995 125717 183648 183648 183648 65927 183648 183648 183648 64366 183648 183648 183648 68450 183648 102334 64068 183648 183648 183648 183648 183648 63690 183648 183648 183648 183648 183648 183648 183648 129534 92829 183648 183648 183648 183648 183648 183648 66719 63690 78014 183648 97719 183648 183648 74700 183648 183648 183648 102334 183648 183648 183648 183648 110843 80622 183648 183648 183648 102334 118096 183648 183648 183648 183648 208852 67073 183648 183648 125996 95715 66971 183648 183648 183648 97660 65663 98873 183648 183648 70581 183648 183648 183648 71353 183648 110843 183648 95715 66971 183648 67073 183648 97660 183648 183648 95715 183648 63690 183648 183648 63690 183648 99530 93281 183648 183648 183648 70654 183648 68437 183648 183648 183648 183648 74700 183648 183648 183648 63690 183648 183648 183648 93281 183648 183648 106973 75457 183648 63690 183648 183648 183648 183648 183648 183648 183648 183648 67438 63690 66087 76115 183648 183648 183648 183648 183648 183648 183648 63766 63766 183648 183648 63690 183648 80719 183648 183648 183648 183648 102013 183648 89431 183648 71932 183648 125996 183648 99901 213612 183648 183648 183648 183648 183648 213612 183648 63766 183648 183648 63766 67946 65113 183648 63690 183648 63690 183648 63766 183648 183648 63766 75094 67438 183648 63766 183648 63766 183648 183648 63856 79145 183648 183648 183648 64068 183648 183648 183648 183648 183648 183648 183648 183648 183648 183648 183648 183648 64649 183648 183648 183648 183648 183648 93281 63766 183648 183648 63689 67438 105276 118096 82908 93281 63766 72124 63702 68692 183648 183648 183648 88510
#! /bin/bash
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# download train_data
mkdir raw_data
wget --no-check-certificate https://paddlerec.bj.bcebos.com/word2vec/1-billion-word-language-modeling-benchmark-r13output.tar
tar xvf 1-billion-word-language-modeling-benchmark-r13output.tar
mv 1-billion-word-language-modeling-benchmark-r13output/training-monolingual.tokenized.shuffled/ raw_data/
# preprocess data
python preprocess.py --build_dict --build_dict_corpus_dir raw_data/training-monolingual.tokenized.shuffled --dict_path raw_data/word_count_dict.txt --ngrams_path raw_data/word_ngrams.txt
python preprocess.py --filter_corpus --dict_path raw_data/word_count_dict.txt --word_id_path raw_data/word_id_dict.txt --input_corpus_dir raw_data/training-monolingual.tokenized.shuffled --output_corpus_dir raw_data/convert_text8 --ngrams_id_path raw_data/word_ngrams_id.txt --ngrams_path raw_data/word_ngrams.txt --min_count 5 --downsample 0.001
mv raw_data/word_count_dict.txt data/dict/
mv raw_data/word_id_dict.txt data/dict/
mv raw_data/word_ngrams.txt data/dict/
mv raw_data/word_ngrams_id.txt data/dict/
rm -rf data/train/*
rm -rf data/test/*
python preprocess.py --data_resplit --file_nums 24 --input_corpus_dir=raw_data/convert_text8 --output_corpus_dir=data/train
# download test data
wget --no-check-certificate https://paddlerec.bj.bcebos.com/word2vec/test_dir.tar
tar xzvf test_dir.tar -C raw_data
mv raw_data/data/test_dir/* data/test/
rm -rf raw_data
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import six
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
class TrainReader(Reader):
def init(self):
dict_path = envs.get_global_env(
"dataset.dataset_infer.word_id_dict_path")
self.min_n = envs.get_global_env("hyper_parameters.min_n")
self.max_n = envs.get_global_env("hyper_parameters.max_n")
self.word_to_id = dict()
self.id_to_word = dict()
with io.open(dict_path, 'r', encoding='utf-8') as f:
for line in f:
self.word_to_id[line.split(' ')[0]] = int(line.split(' ')[1])
self.id_to_word[int(line.split(' ')[1])] = line.split(' ')[0]
self.dict_size = len(self.word_to_id)
def computeSubwords(self, word):
ngrams = set()
for i in range(len(word) - self.min_n + 1):
for j in range(self.min_n, self.max_n + 1):
end = min(len(word), i + j)
ngrams.add("".join(word[i:end]))
return list(ngrams)
def native_to_unicode(self, s):
if self._is_unicode(s):
return s
try:
return self._to_unicode(s)
except UnicodeDecodeError:
res = self._to_unicode(s, ignore_errors=True)
return res
def _is_unicode(self, s):
if six.PY2:
if isinstance(s, unicode):
return True
else:
if isinstance(s, str):
return True
return False
def _to_unicode(self, s, ignore_errors=False):
if self._is_unicode(s):
return s
error_mode = "ignore" if ignore_errors else "strict"
return s.decode("utf-8", errors=error_mode)
def strip_lines(self, line, vocab):
return self._replace_oov(vocab, self.native_to_unicode(line))
def _replace_oov(self, original_vocab, line):
"""Replace out-of-vocab words with "<UNK>".
This maintains compatibility with published results.
Args:
original_vocab: a set of strings (The standard vocabulary for the dataset)
line: a unicode string - a space-delimited sequence of words.
Returns:
a unicode string - a space-delimited sequence of words.
"""
return u" ".join([
"<" + word + ">"
if "<" + word + ">" in original_vocab else u"<UNK>"
for word in line.split()
])
def generate_sample(self, line):
def reader():
if ':' in line:
pass
features = self.strip_lines(line.lower(), self.word_to_id)
features = features.split()
inputs = []
for item in features:
if item == "<UNK>":
inputs.append([self.word_to_id[item]])
else:
ngrams = self.computeSubwords(item)
res = []
res.append(self.word_to_id[item])
for _ in ngrams:
res.append(self.word_to_id[_])
inputs.append(res)
yield [('analogy_a', inputs[0]), ('analogy_b', inputs[1]),
('analogy_c', inputs[2]), ('analogy_d', inputs[3][0:1])]
return reader
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle.fluid as fluid
from paddlerec.core.utils import envs
from paddlerec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def _init_hyper_parameters(self):
self.is_distributed = True if envs.get_trainer(
) == "CtrTrainer" else False
self.sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number")
self.sparse_feature_dim = envs.get_global_env(
"hyper_parameters.sparse_feature_dim")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
self.with_shuffle_batch = envs.get_global_env(
"hyper_parameters.with_shuffle_batch")
self.learning_rate = envs.get_global_env(
"hyper_parameters.optimizer.learning_rate")
self.decay_steps = envs.get_global_env(
"hyper_parameters.optimizer.decay_steps")
self.decay_rate = envs.get_global_env(
"hyper_parameters.optimizer.decay_rate")
def input_data(self, is_infer=False, **kwargs):
if is_infer:
analogy_a = fluid.data(
name="analogy_a", shape=[None, 1], lod_level=1, dtype='int64')
analogy_b = fluid.data(
name="analogy_b", shape=[None, 1], lod_level=1, dtype='int64')
analogy_c = fluid.data(
name="analogy_c", shape=[None, 1], lod_level=1, dtype='int64')
analogy_d = fluid.data(
name="analogy_d", shape=[None, 1], dtype='int64')
return [analogy_a, analogy_b, analogy_c, analogy_d]
input_word = fluid.data(
name="input_word", shape=[None, 1], lod_level=1, dtype='int64')
true_word = fluid.data(
name='true_label', shape=[None, 1], lod_level=1, dtype='int64')
if self.with_shuffle_batch:
return [input_word, true_word]
neg_word = fluid.data(
name="neg_label", shape=[None, self.neg_num], dtype='int64')
return [input_word, true_word, neg_word]
def net(self, inputs, is_infer=False):
if is_infer:
self.infer_net(inputs)
return
def embedding_layer(input,
table_name,
initializer_instance=None,
sequence_pool=False):
emb = fluid.embedding(
input=input,
is_sparse=True,
is_distributed=self.is_distributed,
size=[self.sparse_feature_number, self.sparse_feature_dim],
param_attr=fluid.ParamAttr(
name=table_name, initializer=initializer_instance), )
if sequence_pool:
emb = fluid.layers.sequence_pool(
input=emb, pool_type='average')
return emb
init_width = 1.0 / self.sparse_feature_dim
emb_initializer = fluid.initializer.Uniform(-init_width, init_width)
emb_w_initializer = fluid.initializer.Constant(value=0.0)
input_emb = embedding_layer(inputs[0], "emb", emb_initializer, True)
input_emb = fluid.layers.squeeze(input=input_emb, axes=[1])
true_emb_w = embedding_layer(inputs[1], "emb_w", emb_w_initializer,
True)
true_emb_w = fluid.layers.squeeze(input=true_emb_w, axes=[1])
if self.with_shuffle_batch:
neg_emb_w_list = []
for i in range(self.neg_num):
neg_emb_w_list.append(
fluid.contrib.layers.shuffle_batch(
true_emb_w)) # shuffle true_word
neg_emb_w_concat = fluid.layers.concat(neg_emb_w_list, axis=0)
neg_emb_w = fluid.layers.reshape(
neg_emb_w_concat,
shape=[-1, self.neg_num, self.sparse_feature_dim])
else:
neg_emb_w = embedding_layer(inputs[2], "emb_w", emb_w_initializer)
true_logits = fluid.layers.reduce_sum(
fluid.layers.elementwise_mul(input_emb, true_emb_w),
dim=1,
keep_dim=True)
input_emb_re = fluid.layers.reshape(
input_emb, shape=[-1, 1, self.sparse_feature_dim])
neg_matmul = fluid.layers.matmul(
input_emb_re, neg_emb_w, transpose_y=True)
neg_logits = fluid.layers.reshape(neg_matmul, shape=[-1, 1])
logits = fluid.layers.concat([true_logits, neg_logits], axis=0)
label_ones = fluid.layers.fill_constant(
shape=[fluid.layers.shape(true_logits)[0], 1],
value=1.0,
dtype='float32')
label_zeros = fluid.layers.fill_constant(
shape=[fluid.layers.shape(neg_logits)[0], 1],
value=0.0,
dtype='float32')
label = fluid.layers.concat([label_ones, label_zeros], axis=0)
loss = fluid.layers.log_loss(fluid.layers.sigmoid(logits), label)
avg_cost = fluid.layers.reduce_sum(loss)
global_right_cnt = fluid.layers.create_global_var(
name="global_right_cnt",
persistable=True,
dtype='float32',
shape=[1],
value=0)
global_total_cnt = fluid.layers.create_global_var(
name="global_total_cnt",
persistable=True,
dtype='float32',
shape=[1],
value=0)
global_right_cnt.stop_gradient = True
global_total_cnt.stop_gradient = True
self._cost = avg_cost
self._metrics["LOSS"] = avg_cost
def optimizer(self):
optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.exponential_decay(
learning_rate=self.learning_rate,
decay_steps=self.decay_steps,
decay_rate=self.decay_rate,
staircase=True))
return optimizer
def infer_net(self, inputs):
def embedding_layer(input,
table_name,
initializer_instance=None,
sequence_pool=False):
emb = fluid.embedding(
input=input,
size=[self.sparse_feature_number, self.sparse_feature_dim],
param_attr=table_name)
if sequence_pool:
emb = fluid.layers.sequence_pool(
input=emb, pool_type='average')
return emb
all_label = np.arange(self.sparse_feature_number).reshape(
self.sparse_feature_number).astype('int32')
self.all_label = fluid.layers.cast(
x=fluid.layers.assign(all_label), dtype='int64')
emb_all_label = embedding_layer(self.all_label, "emb")
emb_a = embedding_layer(inputs[0], "emb", sequence_pool=True)
emb_b = embedding_layer(inputs[1], "emb", sequence_pool=True)
emb_c = embedding_layer(inputs[2], "emb", sequence_pool=True)
target = fluid.layers.elementwise_add(
fluid.layers.elementwise_sub(emb_b, emb_a), emb_c)
emb_all_label_l2 = fluid.layers.l2_normalize(x=emb_all_label, axis=1)
dist = fluid.layers.matmul(
x=target, y=emb_all_label_l2, transpose_y=True)
values, pred_idx = fluid.layers.topk(input=dist, k=4)
label = fluid.layers.expand(inputs[3], expand_times=[1, 4])
label_ones = fluid.layers.fill_constant_batch_size_like(
label, shape=[-1, 1], value=1.0, dtype='float32')
right_cnt = fluid.layers.reduce_sum(input=fluid.layers.cast(
fluid.layers.equal(pred_idx, label), dtype='float32'))
total_cnt = fluid.layers.reduce_sum(label_ones)
global_right_cnt = fluid.layers.create_global_var(
name="global_right_cnt",
persistable=True,
dtype='float32',
shape=[1],
value=0)
global_total_cnt = fluid.layers.create_global_var(
name="global_total_cnt",
persistable=True,
dtype='float32',
shape=[1],
value=0)
global_right_cnt.stop_gradient = True
global_total_cnt.stop_gradient = True
tmp1 = fluid.layers.elementwise_add(right_cnt, global_right_cnt)
fluid.layers.assign(tmp1, global_right_cnt)
tmp2 = fluid.layers.elementwise_add(total_cnt, global_total_cnt)
fluid.layers.assign(tmp2, global_total_cnt)
acc = fluid.layers.elementwise_div(
global_right_cnt, global_total_cnt, name="total_acc")
self._infer_results['acc'] = acc
# -*- coding: utf-8 -*
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import math
import os
import random
import re
import six
import argparse
prog = re.compile("[^a-z ]", flags=0)
def parse_args():
parser = argparse.ArgumentParser(
description="Paddle Fluid word2 vector preprocess")
parser.add_argument(
'--build_dict_corpus_dir', type=str, help="The dir of corpus")
parser.add_argument(
'--input_corpus_dir', type=str, help="The dir of input corpus")
parser.add_argument(
'--output_corpus_dir', type=str, help="The dir of output corpus")
parser.add_argument(
'--dict_path',
type=str,
default='./dict',
help="The path of dictionary ")
parser.add_argument(
'--word_id_path',
type=str,
default='./word_id',
help="The path of word_id ")
parser.add_argument(
'--ngrams_path',
type=str,
default='./word_ngrams',
help="The path of word_ngrams ")
parser.add_argument(
'--ngrams_id_path',
type=str,
default='./word_ngrams_id',
help="The path of word_ngrams_id ")
parser.add_argument(
'--min_count',
type=int,
default=5,
help="If the word count is less then min_count, it will be removed from dict"
)
parser.add_argument('--min_n', type=int, default=3, help="min_n of ngrams")
parser.add_argument('--max_n', type=int, default=5, help="max_n of ngrams")
parser.add_argument(
'--file_nums',
type=int,
default=1024,
help="re-split input corpus file nums")
parser.add_argument(
'--downsample',
type=float,
default=0.001,
help="filter word by downsample")
parser.add_argument(
'--filter_corpus',
action='store_true',
default=False,
help='Filter corpus')
parser.add_argument(
'--build_dict',
action='store_true',
default=False,
help='Build dict from corpus')
parser.add_argument(
'--data_resplit',
action='store_true',
default=False,
help='re-split input corpus files')
return parser.parse_args()
def text_strip(text):
# English Preprocess Rule
return prog.sub("", text.lower())
# Shameless copy from Tensorflow https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/data_generators/text_encoder.py
# Unicode utility functions that work with Python 2 and 3
def native_to_unicode(s):
if _is_unicode(s):
return s
try:
return _to_unicode(s)
except UnicodeDecodeError:
res = _to_unicode(s, ignore_errors=True)
return res
def _is_unicode(s):
if six.PY2:
if isinstance(s, unicode):
return True
else:
if isinstance(s, str):
return True
return False
def _to_unicode(s, ignore_errors=False):
if _is_unicode(s):
return s
error_mode = "ignore" if ignore_errors else "strict"
return s.decode("utf-8", errors=error_mode)
def filter_corpus(args):
"""
filter corpus and convert id.
"""
word_count = dict()
word_to_id_ = dict()
word_all_count = 0
id_counts = []
word_id = 0
# read dict
with io.open(args.dict_path, 'r', encoding='utf-8') as f:
for line in f:
word, count = line.split()[0], int(line.split()[1])
word_count[word] = count
word_to_id_[word] = word_id
word_id += 1
id_counts.append(count)
word_all_count += count
word_ngrams = dict()
with io.open(args.ngrams_path, 'r', encoding='utf-8') as f:
for line in f:
word, ngrams = line.rstrip().split(':')
ngrams = ngrams.split()
ngrams = [str(word_to_id_[_]) for _ in ngrams]
word_ngrams[word_to_id_[word]] = ' '.join(ngrams)
with io.open(args.ngrams_id_path, 'w+', encoding='utf-8') as fid:
for k, v in word_ngrams.items():
fid.write(u'{} {}\n'.format(k, v))
# write word2id file
print("write word2id file to : " + args.dict_path + "_word_to_id_")
with io.open(args.word_id_path, 'w+', encoding='utf-8') as fid:
for k, v in word_to_id_.items():
fid.write(k + " " + str(v) + '\n')
# filter corpus and convert id
if not os.path.exists(args.output_corpus_dir):
os.makedirs(args.output_corpus_dir)
for file in os.listdir(args.input_corpus_dir):
with io.open(args.output_corpus_dir + '/convert_' + file + '.csv',
"w") as wf:
with io.open(
args.input_corpus_dir + '/' + file,
encoding='utf-8') as rf:
print(args.input_corpus_dir + '/' + file)
for line in rf:
signal = False
line = text_strip(line)
words = line.split()
write_line = ""
for item in words:
if item in word_count:
idx = word_to_id_[item]
else:
idx = word_to_id_[native_to_unicode('<UNK>')]
count_w = id_counts[idx]
corpus_size = word_all_count
keep_prob = (
math.sqrt(count_w /
(args.downsample * corpus_size)) + 1
) * (args.downsample * corpus_size) / count_w
r_value = random.random()
if r_value > keep_prob:
continue
write_line += str(idx)
write_line += " "
signal = True
if signal:
write_line = write_line[:-1] + "\n"
wf.write(_to_unicode(write_line))
def computeSubwords(word, min_n, max_n):
ngrams = set()
for i in range(len(word) - min_n + 1):
for j in range(min_n, max_n + 1):
end = min(len(word), i + j)
ngrams.add("".join(word[i:end]))
return list(ngrams)
def build_dict(args):
"""
proprocess the data, generate dictionary and save into dict_path.
:param corpus_dir: the input data dir.
:param dict_path: the generated dict path. the data in dict is "word count"
:param min_count:
:return:
"""
# word to count
word_count = dict()
for file in os.listdir(args.build_dict_corpus_dir):
with io.open(
args.build_dict_corpus_dir + "/" + file,
encoding='utf-8') as f:
print("build dict : ", args.build_dict_corpus_dir + "/" + file)
for line in f:
line = text_strip(line)
words = line.split()
for item in words:
item = '<' + item + '>'
if item in word_count:
word_count[item] = word_count[item] + 1
else:
word_count[item] = 1
item_to_remove = []
for item in word_count:
if word_count[item] <= args.min_count:
item_to_remove.append(item)
unk_sum = 0
for item in item_to_remove:
unk_sum += word_count[item]
del word_count[item]
# sort by count
word_count[native_to_unicode('<UNK>')] = unk_sum
word_ngrams = dict()
ngrams_count = dict()
for item in word_count:
ngrams = computeSubwords(item, args.min_n, args.max_n)
word_ngrams[item] = ngrams
for sub_word in ngrams:
if sub_word not in ngrams_count:
ngrams_count[sub_word] = 1
else:
ngrams_count[sub_word] = ngrams_count[sub_word] + 1
ngrams_count = sorted(
ngrams_count.items(), key=lambda ngrams_count: -ngrams_count[1])
word_count = sorted(
word_count.items(), key=lambda word_count: -word_count[1])
with io.open(args.dict_path, 'w+', encoding='utf-8') as f:
for k, v in word_count:
f.write(k + " " + str(v) + '\n')
for k, v in ngrams_count:
f.write(k + " " + str(v) + '\n')
with io.open(args.ngrams_path, 'w+', encoding='utf-8') as f:
for key in word_ngrams:
f.write(key + ":")
f.write(" ".join(word_ngrams[key]))
f.write(u'\n')
def data_split(args):
raw_data_dir = args.input_corpus_dir
new_data_dir = args.output_corpus_dir
if not os.path.exists(new_data_dir):
os.mkdir(new_data_dir)
files = os.listdir(raw_data_dir)
print(files)
index = 0
contents = []
for file_ in files:
with open(os.path.join(raw_data_dir, file_), 'r') as f:
contents.extend(f.readlines())
num = int(args.file_nums)
lines_per_file = len(contents) / num
print("contents: ", str(len(contents)))
print("lines_per_file: ", str(lines_per_file))
for i in range(1, num + 1):
with open(os.path.join(new_data_dir, "part_" + str(i)), 'w') as fout:
data = contents[(i - 1) * lines_per_file:min(i * lines_per_file,
len(contents))]
for line in data:
fout.write(line)
if __name__ == "__main__":
args = parse_args()
if args.build_dict:
build_dict(args)
elif args.filter_corpus:
filter_corpus(args)
elif args.data_resplit:
data_split(args)
else:
print(
"error command line, please choose --build_dict or --filter_corpus")
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import numpy as np
from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
class NumpyRandomInt(object):
def __init__(self, a, b, buf_size=1000):
self.idx = 0
self.buffer = np.random.random_integers(a, b, buf_size)
self.a = a
self.b = b
def __call__(self):
if self.idx == len(self.buffer):
self.buffer = np.random.random_integers(self.a, self.b,
len(self.buffer))
self.idx = 0
result = self.buffer[self.idx]
self.idx += 1
return result
class TrainReader(Reader):
def init(self):
dict_path = envs.get_global_env(
"dataset.dataset_train.word_count_dict_path")
word_ngrams_path = envs.get_global_env(
"dataset.dataset_train.word_ngrams_path")
self.window_size = envs.get_global_env("hyper_parameters.window_size")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
self.with_shuffle_batch = envs.get_global_env(
"hyper_parameters.with_shuffle_batch")
self.random_generator = NumpyRandomInt(1, self.window_size + 1)
self.word_ngrams = dict()
with io.open(word_ngrams_path, 'r', encoding='utf-8') as f:
for line in f:
line = line.rstrip().split()
self.word_ngrams[str(line[0])] = map(int, line[1:])
self.cs = None
if not self.with_shuffle_batch:
id_counts = []
word_all_count = 0
with io.open(dict_path, 'r', encoding='utf-8') as f:
for line in f:
word, count = line.split()[0], int(line.split()[1])
id_counts.append(count)
word_all_count += count
id_frequencys = [
float(count) / word_all_count for count in id_counts
]
np_power = np.power(np.array(id_frequencys), 0.75)
id_frequencys_pow = np_power / np_power.sum()
self.cs = np.array(id_frequencys_pow).cumsum()
def get_context_words(self, words, idx):
"""
Get the context word list of target word.
words: the words of the current line
idx: input word index
window_size: window size
"""
target_window = self.random_generator()
start_point = idx - target_window # if (idx - target_window) > 0 else 0
if start_point < 0:
start_point = 0
end_point = idx + target_window
targets = words[start_point:idx] + words[idx + 1:end_point + 1]
return targets
def generate_sample(self, line):
def reader():
word_ids = [w for w in line.split()]
for idx, target_id in enumerate(word_ids):
input_word = [int(target_id)]
if target_id in self.word_ngrams:
input_word += self.word_ngrams[target_id]
context_word_ids = self.get_context_words(word_ids, idx)
for context_id in context_word_ids:
output = [('input_word', input_word),
('true_label', [int(context_id)])]
if not self.with_shuffle_batch:
neg_array = self.cs.searchsorted(
np.random.sample(self.neg_num))
output += [('neg_label',
[int(str(i)) for i in neg_array])]
yield output
return reader
......@@ -11,46 +11,71 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
evaluate:
workspace: "paddlerec.models.recall.gnn"
reader:
batch_size: 50
class: "{workspace}/evaluate_reader.py"
test_data_path: "{workspace}/data/test"
train:
trainer:
# for cluster training
strategy: "async"
# workspace
workspace: "paddlerec.models.recall.gnn"
epochs: 2
workspace: "paddlerec.models.recall.gnn"
# list of dataset
dataset:
- name: dataset_train # name of dataset to distinguish different datasets
batch_size: 100
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/train"
data_converter: "{workspace}/reader.py"
- name: dataset_infer # name
batch_size: 50
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/test"
data_converter: "{workspace}/evaluate_reader.py"
reader:
batch_size: 100
class: "{workspace}/reader.py"
train_data_path: "{workspace}/data/train"
dataset_class: "DataLoader"
# hyper parameters of user-defined network
hyper_parameters:
optimizer:
class: Adam
learning_rate: 0.001
decay_steps: 3
decay_rate: 0.1
l2: 0.00001
sparse_feature_number: 43098
sparse_feature_dim: 100
corpus_size: 719470
gnn_propogation_steps: 1
model:
models: "{workspace}/model.py"
hyper_parameters:
use_DataLoader: True
config_path: "{workspace}/data/config.txt"
sparse_feature_dim: 100
gnn_propogation_steps: 1
learning_rate: 0.001
l2: 0.00001
decay_steps: 3
decay_rate: 0.1
optimizer: adam
# select runner by name
mode: train_runner
# config of each runner.
# runner is a kind of paddle training class, which wraps the train/infer process.
runner:
- name: train_runner
class: single_train
# num of epochs
epochs: 2
# device to run training or infer
device: cpu
save_checkpoint_interval: 1 # save model interval of epochs
save_inference_interval: 1 # save inference
save_checkpoint_path: "increment" # save checkpoint path
save_inference_path: "inference" # save inference path
save_inference_feed_varnames: [] # feed vars of save inference
save_inference_fetch_varnames: [] # fetch vars of save inference
init_model_path: "" # load model path
fetch_period: 10
- name: infer_runner
class: single_infer
# num of epochs
epochs: 1
# device to run training or infer
device: cpu
fetch_period: 1
init_model_path: "increment/0" # load model path
save:
increment:
dirname: "increment"
epoch_interval: 1
save_last: True
inference:
dirname: "inference"
epoch_interval: 1
save_last: True
# runner will run all the phase in each epoch
phase:
- name: phase1
model: "{workspace}/model.py" # user-defined model
dataset_name: dataset_train # select dataset by name
thread_num: 1
#- name: phase2
# model: "{workspace}/model.py" # user-defined model
# dataset_name: dataset_infer # select dataset by name
# thread_num: 1
......@@ -17,7 +17,7 @@
set -e
echo "begin to download data"
cd raw_data && python download.py
cd data && python download.py
mkdir diginetica
python preprocess.py --dataset diginetica
......@@ -26,8 +26,10 @@ python convert_data.py --data_dir diginetica
cat diginetica/train.txt | wc -l >> diginetica/config.txt
mkdir train_data
mv diginetica/train.txt train_data
rm -rf train && mkdir train
mv diginetica/train.txt train
mkdir test_data
mv diginetica/test.txt test_data
rm -rf test && mkdir test
mv diginetica/test.txt test
mv diginetica/config.txt ./config.txt
......@@ -21,10 +21,10 @@ from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
class EvaluateReader(Reader):
class TrainReader(Reader):
def init(self):
self.batch_size = envs.get_global_env("batch_size", None,
"evaluate.reader")
self.batch_size = envs.get_global_env(
"dataset.dataset_infer.batch_size")
self.input = []
self.length = None
......
......@@ -25,74 +25,65 @@ from paddlerec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
self.init_config()
def init_config(self):
self._fetch_interval = 1
self.items_num, self.ins_num = self.config_read(
envs.get_global_env("hyper_parameters.config_path", None,
self._namespace))
self.train_batch_size = envs.get_global_env("batch_size", None,
"train.reader")
self.evaluate_batch_size = envs.get_global_env("batch_size", None,
"evaluate.reader")
self.hidden_size = envs.get_global_env(
"hyper_parameters.sparse_feature_dim", None, self._namespace)
self.step = envs.get_global_env(
"hyper_parameters.gnn_propogation_steps", None, self._namespace)
def _init_hyper_parameters(self):
self.learning_rate = envs.get_global_env(
"hyper_parameters.optimizer.learning_rate")
self.decay_steps = envs.get_global_env(
"hyper_parameters.optimizer.decay_steps")
self.decay_rate = envs.get_global_env(
"hyper_parameters.optimizer.decay_rate")
self.l2 = envs.get_global_env("hyper_parameters.optimizer.l2")
self.dict_size = envs.get_global_env(
"hyper_parameters.sparse_feature_number")
self.corpus_size = envs.get_global_env("hyper_parameters.corpus_size")
def config_read(self, config_path=None):
if config_path is None:
raise ValueError(
"please set train.model.hyper_parameters.config_path at first")
with open(config_path, "r") as fin:
item_nums = int(fin.readline().strip())
ins_nums = int(fin.readline().strip())
return item_nums, ins_nums
self.train_batch_size = envs.get_global_env(
"dataset.dataset_train.batch_size")
self.evaluate_batch_size = envs.get_global_env(
"dataset.dataset_infer.batch_size")
def input(self, bs):
self.items = fluid.data(
self.hidden_size = envs.get_global_env(
"hyper_parameters.sparse_feature_dim")
self.step = envs.get_global_env(
"hyper_parameters.gnn_propogation_steps")
def input_data(self, is_infer=False, **kwargs):
if is_infer:
bs = self.evaluate_batch_size
else:
bs = self.train_batch_size
items = fluid.data(
name="items", shape=[bs, -1],
dtype="int64") # [batch_size, uniq_max]
self.seq_index = fluid.data(
seq_index = fluid.data(
name="seq_index", shape=[bs, -1, 2],
dtype="int32") # [batch_size, seq_max, 2]
self.last_index = fluid.data(
last_index = fluid.data(
name="last_index", shape=[bs, 2], dtype="int32") # [batch_size, 2]
self.adj_in = fluid.data(
adj_in = fluid.data(
name="adj_in", shape=[bs, -1, -1],
dtype="float32") # [batch_size, seq_max, seq_max]
self.adj_out = fluid.data(
adj_out = fluid.data(
name="adj_out", shape=[bs, -1, -1],
dtype="float32") # [batch_size, seq_max, seq_max]
self.mask = fluid.data(
mask = fluid.data(
name="mask", shape=[bs, -1, 1],
dtype="float32") # [batch_size, seq_max, 1]
self.label = fluid.data(
label = fluid.data(
name="label", shape=[bs, 1], dtype="int64") # [batch_size, 1]
res = [
self.items, self.seq_index, self.last_index, self.adj_in,
self.adj_out, self.mask, self.label
]
res = [items, seq_index, last_index, adj_in, adj_out, mask, label]
return res
def train_input(self):
res = self.input(self.train_batch_size)
self._data_var = res
use_dataloader = envs.get_global_env("hyper_parameters.use_DataLoader",
False, self._namespace)
def net(self, inputs, is_infer=False):
if is_infer:
bs = self.evaluate_batch_size
else:
bs = self.train_batch_size
if self._platform != "LINUX" or use_dataloader:
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var,
capacity=256,
use_double_buffer=False,
iterable=False)
def net(self, items_num, hidden_size, step, bs):
stdv = 1.0 / math.sqrt(hidden_size)
stdv = 1.0 / math.sqrt(self.hidden_size)
def embedding_layer(input,
table_name,
......@@ -100,22 +91,22 @@ class Model(ModelBase):
initializer_instance=None):
emb = fluid.embedding(
input=input,
size=[items_num, emb_dim],
size=[self.dict_size, emb_dim],
param_attr=fluid.ParamAttr(
name=table_name, initializer=initializer_instance), )
name=table_name, initializer=initializer_instance))
return emb
sparse_initializer = fluid.initializer.Uniform(low=-stdv, high=stdv)
items_emb = embedding_layer(self.items, "emb", hidden_size,
items_emb = embedding_layer(inputs[0], "emb", self.hidden_size,
sparse_initializer)
pre_state = items_emb
for i in range(step):
for i in range(self.step):
pre_state = layers.reshape(
x=pre_state, shape=[bs, -1, hidden_size])
x=pre_state, shape=[bs, -1, self.hidden_size])
state_in = layers.fc(
input=pre_state,
name="state_in",
size=hidden_size,
size=self.hidden_size,
act=None,
num_flatten_dims=2,
param_attr=fluid.ParamAttr(
......@@ -127,7 +118,7 @@ class Model(ModelBase):
state_out = layers.fc(
input=pre_state,
name="state_out",
size=hidden_size,
size=self.hidden_size,
act=None,
num_flatten_dims=2,
param_attr=fluid.ParamAttr(
......@@ -137,33 +128,34 @@ class Model(ModelBase):
initializer=fluid.initializer.Uniform(
low=-stdv, high=stdv))) # [batch_size, uniq_max, h]
state_adj_in = layers.matmul(self.adj_in,
state_adj_in = layers.matmul(inputs[3],
state_in) # [batch_size, uniq_max, h]
state_adj_out = layers.matmul(
self.adj_out, state_out) # [batch_size, uniq_max, h]
inputs[4], state_out) # [batch_size, uniq_max, h]
gru_input = layers.concat([state_adj_in, state_adj_out], axis=2)
gru_input = layers.reshape(
x=gru_input, shape=[-1, hidden_size * 2])
x=gru_input, shape=[-1, self.hidden_size * 2])
gru_fc = layers.fc(input=gru_input,
name="gru_fc",
size=3 * hidden_size,
size=3 * self.hidden_size,
bias_attr=False)
pre_state, _, _ = fluid.layers.gru_unit(
input=gru_fc,
hidden=layers.reshape(
x=pre_state, shape=[-1, hidden_size]),
size=3 * hidden_size)
x=pre_state, shape=[-1, self.hidden_size]),
size=3 * self.hidden_size)
final_state = layers.reshape(pre_state, shape=[bs, -1, hidden_size])
seq = layers.gather_nd(final_state, self.seq_index)
last = layers.gather_nd(final_state, self.last_index)
final_state = layers.reshape(
pre_state, shape=[bs, -1, self.hidden_size])
seq = layers.gather_nd(final_state, inputs[1])
last = layers.gather_nd(final_state, inputs[2])
seq_fc = layers.fc(
input=seq,
name="seq_fc",
size=hidden_size,
size=self.hidden_size,
bias_attr=False,
act=None,
num_flatten_dims=2,
......@@ -171,7 +163,7 @@ class Model(ModelBase):
low=-stdv, high=stdv))) # [batch_size, seq_max, h]
last_fc = layers.fc(input=last,
name="last_fc",
size=hidden_size,
size=self.hidden_size,
bias_attr=False,
act=None,
num_flatten_dims=1,
......@@ -184,7 +176,7 @@ class Model(ModelBase):
add = layers.elementwise_add(seq_fc_t,
last_fc) # [seq_max, batch_size, h]
b = layers.create_parameter(
shape=[hidden_size],
shape=[self.hidden_size],
dtype='float32',
default_initializer=fluid.initializer.Constant(value=0.0)) # [h]
add = layers.elementwise_add(add, b) # [seq_max, batch_size, h]
......@@ -202,7 +194,7 @@ class Model(ModelBase):
bias_attr=False,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Uniform(
low=-stdv, high=stdv))) # [batch_size, seq_max, 1]
weight *= self.mask
weight *= inputs[5]
weight_mask = layers.elementwise_mul(
seq, weight, axis=0) # [batch_size, seq_max, h]
global_attention = layers.reduce_sum(
......@@ -213,7 +205,7 @@ class Model(ModelBase):
final_attention_fc = layers.fc(
input=final_attention,
name="final_attention_fc",
size=hidden_size,
size=self.hidden_size,
bias_attr=False,
act=None,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Uniform(
......@@ -225,7 +217,7 @@ class Model(ModelBase):
# dtype="int64",
# persistable=True,
# name="all_vocab")
all_vocab = np.arange(1, items_num).reshape((-1)).astype('int32')
all_vocab = np.arange(1, self.dict_size).reshape((-1)).astype('int32')
all_vocab = fluid.layers.cast(
x=fluid.layers.assign(all_vocab), dtype='int64')
......@@ -235,63 +227,32 @@ class Model(ModelBase):
name="emb",
initializer=fluid.initializer.Uniform(
low=-stdv, high=stdv)),
size=[items_num, hidden_size]) # [all_vocab, h]
size=[self.dict_size, self.hidden_size]) # [all_vocab, h]
logits = layers.matmul(
x=final_attention_fc, y=all_emb,
transpose_y=True) # [batch_size, all_vocab]
softmax = layers.softmax_with_cross_entropy(
logits=logits, label=self.label) # [batch_size, 1]
logits=logits, label=inputs[6]) # [batch_size, 1]
self.loss = layers.reduce_mean(softmax) # [1]
self.acc = layers.accuracy(input=logits, label=self.label, k=20)
self.acc = layers.accuracy(input=logits, label=inputs[6], k=20)
def avg_loss(self):
self._cost = self.loss
if is_infer:
self._infer_results['acc'] = self.acc
self._infer_results['loss'] = self.loss
return
def metrics(self):
self._metrics["LOSS"] = self.loss
self._metrics["train_acc"] = self.acc
def train_net(self):
self.train_input()
self.net(self.items_num, self.hidden_size, self.step,
self.train_batch_size)
self.avg_loss()
self.metrics()
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
None, self._namespace)
step_per_epoch = self.ins_num // self.train_batch_size
decay_steps = envs.get_global_env("hyper_parameters.decay_steps", None,
self._namespace)
decay_rate = envs.get_global_env("hyper_parameters.decay_rate", None,
self._namespace)
l2 = envs.get_global_env("hyper_parameters.l2", None, self._namespace)
step_per_epoch = self.corpus_size // self.train_batch_size
optimizer = fluid.optimizer.Adam(
learning_rate=fluid.layers.exponential_decay(
learning_rate=learning_rate,
decay_steps=decay_steps * step_per_epoch,
decay_rate=decay_rate),
learning_rate=self.learning_rate,
decay_steps=self.decay_steps * step_per_epoch,
decay_rate=self.decay_rate),
regularization=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=l2))
regularization_coeff=self.l2))
return optimizer
def infer_input(self):
self._reader_namespace = "evaluate.reader"
res = self.input(self.evaluate_batch_size)
self._infer_data_var = res
self._infer_data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._infer_data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def infer_net(self):
self.infer_input()
self.net(self.items_num, self.hidden_size, self.step,
self.evaluate_batch_size)
self._infer_results['acc'] = self.acc
self._infer_results['loss'] = self.loss
......@@ -23,9 +23,8 @@ from paddlerec.core.utils import envs
class TrainReader(Reader):
def init(self):
self.batch_size = envs.get_global_env("batch_size", None,
"train.reader")
self.batch_size = envs.get_global_env(
"dataset.dataset_train.batch_size")
self.input = []
self.length = None
......
......@@ -57,8 +57,8 @@
<img align="center" src="../../doc/imgs/gnn.png">
<p>
## 使用教程
### 训练 预测
## 使用教程(快速开始)
###
```shell
python -m paddlerec.run -m paddlerec.models.recall.word2vec # word2vec
python -m paddlerec.run -m paddlerec.models.recall.ssr # ssr
......@@ -67,6 +67,40 @@ python -m paddlerec.run -m paddlerec.models.recall.gnn # gnn
python -m paddlerec.run -m paddlerec.models.recall.ncf # ncf
python -m paddlerec.run -m paddlerec.models.recall.youtube_dnn # youtube_dnn
```
## 使用教程(复现论文)
为了方便使用者能够快速的跑通每一个模型,我们在每个模型下都提供了样例数据,并且调整了batch_size等超参以便在样例数据上更加友好的显示训练&测试日志。如果需要复现readme中的效果请按照如下表格调整batch_size等超参,并使用提供的脚本下载对应数据集以及数据预处理。
| 模型 | batch_size | thread_num | epoch_num |
| :---: | :---: | :---: | :---: |
| Word2Vec | 100 | 5 | 5 |
| GNN | 100 | 1 | 30 |
| GRU4REC | 500 | 1 | 10 |
### 数据处理
参考每个模型目录数据下载&预处理脚本。
```bash
sh data_prepare.sh
```
### 训练
```bash
cd modles/recall/gnn # 进入选定好的召回模型的目录 以gnn为例
python -m paddlerec.run -m ./config.yaml # 自定义修改超参后,指定配置文件,使用自定义配置
```
### 预测
```
# 修改对应模型的config.yaml, workspace配置为当前目录的绝对路径
# 修改对应模型的config.yaml,mode配置infer_runner
# 示例: mode: train_runner -> mode: infer_runner
# infer_runner中 class配置为 class: single_infer
# 修改phase阶段为infer的配置,参照config注释
# 修改完config.yaml后 执行:
python -m paddlerec.run -m ./config.yaml # 以gnn为例
```
## 效果对比
### 模型效果列表
......
......@@ -11,51 +11,70 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
evaluate:
workspace: "paddlerec.models.recall.word2vec"
workspace: "paddlerec.models.recall.word2vec"
evaluate_only: False
evaluate_model_path: ""
reader:
batch_size: 50
class: "{workspace}/w2v_evaluate_reader.py"
test_data_path: "{workspace}/data/test"
word_id_dict_path: "{workspace}/data/dict/word_id_dict.txt"
# list of dataset
dataset:
- name: dataset_train # name of dataset to distinguish different datasets
batch_size: 100
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/train"
word_count_dict_path: "{workspace}/data/dict/word_count_dict.txt"
data_converter: "{workspace}/w2v_reader.py"
- name: dataset_infer # name
batch_size: 50
type: DataLoader # or QueueDataset
data_path: "{workspace}/data/test"
word_id_dict_path: "{workspace}/data/dict/word_id_dict.txt"
data_converter: "{workspace}/w2v_evaluate_reader.py"
train:
trainer:
# for cluster training
strategy: "async"
hyper_parameters:
optimizer:
learning_rate: 1.0
decay_steps: 100000
decay_rate: 0.999
class: sgd
strategy: async
sparse_feature_number: 354051
sparse_feature_dim: 300
with_shuffle_batch: False
neg_num: 5
window_size: 5
# select runner by name
mode: train_runner
# config of each runner.
# runner is a kind of paddle training class, which wraps the train/infer process.
runner:
- name: train_runner
class: single_train
# num of epochs
epochs: 2
workspace: "paddlerec.models.recall.word2vec"
# device to run training or infer
device: cpu
save_checkpoint_interval: 1 # save model interval of epochs
save_inference_interval: 1 # save inference
save_checkpoint_path: "increment" # save checkpoint path
save_inference_path: "inference" # save inference path
save_inference_feed_varnames: [] # feed vars of save inference
save_inference_fetch_varnames: [] # fetch vars of save inference
init_model_path: "" # load model path
fetch_period: 10
- name: infer_runner
class: single_infer
# num of epochs
epochs: 1
# device to run training or infer
device: cpu
init_model_path: "increment/0" # load model path
reader:
batch_size: 100
class: "{workspace}/w2v_reader.py"
train_data_path: "{workspace}/data/train"
word_count_dict_path: "{workspace}/data/dict/word_count_dict.txt"
model:
models: "{workspace}/model.py"
hyper_parameters:
sparse_feature_number: 85
sparse_feature_dim: 300
with_shuffle_batch: False
neg_num: 5
window_size: 5
learning_rate: 1.0
decay_steps: 100000
decay_rate: 0.999
optimizer: sgd
save:
increment:
dirname: "increment"
epoch_interval: 1
save_last: True
inference:
dirname: "inference"
epoch_interval: 1
save_last: True
# runner will run all the phase in each epoch
phase:
- name: phase1
model: "{workspace}/model.py" # user-defined model
dataset_name: dataset_train # select dataset by name
thread_num: 1
#- name: phase2
# model: "{workspace}/model.py" # user-defined model
# dataset_name: dataset_infer # select dataset by name
# thread_num: 1
......@@ -22,16 +22,17 @@ tar xvf 1-billion-word-language-modeling-benchmark-r13output.tar
mv 1-billion-word-language-modeling-benchmark-r13output/training-monolingual.tokenized.shuffled/ raw_data/
# preprocess data
python preprocess.py --build_dict --build_dict_corpus_dir raw_data/training-monolingual.tokenized.shuffled --dict_path raw_data/test_build_dict
python preprocess.py --filter_corpus --dict_path raw_data/test_build_dict --input_corpus_dir raw_data/training-monolingual.tokenized.shuffled --output_corpus_dir raw_data/convert_text8 --min_count 5 --downsample 0.001
mkdir thirdparty
mv raw_data/test_build_dict thirdparty/
mv raw_data/test_build_dict_word_to_id_ thirdparty/
python preprocess.py --build_dict --build_dict_corpus_dir raw_data/training-monolingual.tokenized.shuffled --dict_path raw_data/word_count_dict.txt
python preprocess.py --filter_corpus --dict_path raw_data/word_count_dict.txt --input_corpus_dir raw_data/training-monolingual.tokenized.shuffled --output_corpus_dir raw_data/convert_text8 --min_count 5 --downsample 0.001
mv raw_data/word_count_dict.txt data/dict/
mv raw_data/word_id_dict.txt data/dict/
python preprocess.py --data_resplit --input_corpus_dir=raw_data/convert_text8 --output_corpus_dir=train_data
rm -rf data/train/*
rm -rf data/test/*
python preprocess.py --data_resplit --input_corpus_dir=raw_data/convert_text8 --output_corpus_dir=data/train
# download test data
wget --no-check-certificate https://paddlerec.bj.bcebos.com/word2vec/test_dir.tar
tar xzvf test_dir.tar -C raw_data
mv raw_data/data/test_dir test_data/
mv raw_data/data/test_dir/* data/test/
rm -rf raw_data
......@@ -23,45 +23,50 @@ class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def input(self):
neg_num = int(
envs.get_global_env("hyper_parameters.neg_num", None,
self._namespace))
self.input_word = fluid.data(
def _init_hyper_parameters(self):
self.is_distributed = True if envs.get_trainer(
) == "CtrTrainer" else False
self.sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number")
self.sparse_feature_dim = envs.get_global_env(
"hyper_parameters.sparse_feature_dim")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
self.with_shuffle_batch = envs.get_global_env(
"hyper_parameters.with_shuffle_batch")
self.learning_rate = envs.get_global_env(
"hyper_parameters.optimizer.learning_rate")
self.decay_steps = envs.get_global_env(
"hyper_parameters.optimizer.decay_steps")
self.decay_rate = envs.get_global_env(
"hyper_parameters.optimizer.decay_rate")
def input_data(self, is_infer=False, **kwargs):
if is_infer:
analogy_a = fluid.data(
name="analogy_a", shape=[None], dtype='int64')
analogy_b = fluid.data(
name="analogy_b", shape=[None], dtype='int64')
analogy_c = fluid.data(
name="analogy_c", shape=[None], dtype='int64')
analogy_d = fluid.data(
name="analogy_d", shape=[None], dtype='int64')
return [analogy_a, analogy_b, analogy_c, analogy_d]
input_word = fluid.data(
name="input_word", shape=[None, 1], dtype='int64')
self.true_word = fluid.data(
true_word = fluid.data(
name='true_label', shape=[None, 1], dtype='int64')
self._data_var.append(self.input_word)
self._data_var.append(self.true_word)
with_shuffle_batch = bool(
int(
envs.get_global_env("hyper_parameters.with_shuffle_batch",
None, self._namespace)))
if not with_shuffle_batch:
self.neg_word = fluid.data(
name="neg_label", shape=[None, neg_num], dtype='int64')
self._data_var.append(self.neg_word)
if self.with_shuffle_batch:
return [input_word, true_word]
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
neg_word = fluid.data(
name="neg_label", shape=[None, self.neg_num], dtype='int64')
return [input_word, true_word, neg_word]
def net(self):
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
neg_num = int(
envs.get_global_env("hyper_parameters.neg_num", None,
self._namespace))
sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number", None, self._namespace)
sparse_feature_dim = envs.get_global_env(
"hyper_parameters.sparse_feature_dim", None, self._namespace)
with_shuffle_batch = bool(
int(
envs.get_global_env("hyper_parameters.with_shuffle_batch",
None, self._namespace)))
def net(self, inputs, is_infer=False):
if is_infer:
self.infer_net(inputs)
return
def embedding_layer(input,
table_name,
......@@ -71,8 +76,8 @@ class Model(ModelBase):
emb = fluid.embedding(
input=input,
is_sparse=True,
is_distributed=is_distributed,
size=[sparse_feature_number, emb_dim],
is_distributed=self.is_distributed,
size=[self.sparse_feature_number, emb_dim],
param_attr=fluid.ParamAttr(
name=table_name, initializer=initializer_instance), )
if squeeze:
......@@ -80,44 +85,44 @@ class Model(ModelBase):
else:
return emb
init_width = 0.5 / sparse_feature_dim
init_width = 0.5 / self.sparse_feature_dim
emb_initializer = fluid.initializer.Uniform(-init_width, init_width)
emb_w_initializer = fluid.initializer.Constant(value=0.0)
input_emb = embedding_layer(self.input_word, "emb", sparse_feature_dim,
input_emb = embedding_layer(inputs[0], "emb", self.sparse_feature_dim,
emb_initializer, True)
true_emb_w = embedding_layer(self.true_word, "emb_w",
sparse_feature_dim, emb_w_initializer,
True)
true_emb_b = embedding_layer(self.true_word, "emb_b", 1,
true_emb_w = embedding_layer(inputs[1], "emb_w",
self.sparse_feature_dim,
emb_w_initializer, True)
true_emb_b = embedding_layer(inputs[1], "emb_b", 1, emb_w_initializer,
True)
if with_shuffle_batch:
if self.with_shuffle_batch:
neg_emb_w_list = []
for i in range(neg_num):
for i in range(self.neg_num):
neg_emb_w_list.append(
fluid.contrib.layers.shuffle_batch(
true_emb_w)) # shuffle true_word
neg_emb_w_concat = fluid.layers.concat(neg_emb_w_list, axis=0)
neg_emb_w = fluid.layers.reshape(
neg_emb_w_concat, shape=[-1, neg_num, sparse_feature_dim])
neg_emb_w_concat,
shape=[-1, self.neg_num, self.sparse_feature_dim])
neg_emb_b_list = []
for i in range(neg_num):
for i in range(self.neg_num):
neg_emb_b_list.append(
fluid.contrib.layers.shuffle_batch(
true_emb_b)) # shuffle true_word
neg_emb_b = fluid.layers.concat(neg_emb_b_list, axis=0)
neg_emb_b_vec = fluid.layers.reshape(
neg_emb_b, shape=[-1, neg_num])
neg_emb_b, shape=[-1, self.neg_num])
else:
neg_emb_w = embedding_layer(self.neg_word, "emb_w",
sparse_feature_dim, emb_w_initializer)
neg_emb_b = embedding_layer(self.neg_word, "emb_b", 1,
neg_emb_w = embedding_layer(
inputs[2], "emb_w", self.sparse_feature_dim, emb_w_initializer)
neg_emb_b = embedding_layer(inputs[2], "emb_b", 1,
emb_w_initializer)
neg_emb_b_vec = fluid.layers.reshape(
neg_emb_b, shape=[-1, neg_num])
neg_emb_b, shape=[-1, self.neg_num])
true_logits = fluid.layers.elementwise_add(
fluid.layers.reduce_sum(
......@@ -127,18 +132,22 @@ class Model(ModelBase):
true_emb_b)
input_emb_re = fluid.layers.reshape(
input_emb, shape=[-1, 1, sparse_feature_dim])
input_emb, shape=[-1, 1, self.sparse_feature_dim])
neg_matmul = fluid.layers.matmul(
input_emb_re, neg_emb_w, transpose_y=True)
neg_logits = fluid.layers.elementwise_add(
fluid.layers.reshape(
neg_matmul, shape=[-1, neg_num]),
neg_emb_b_vec)
label_ones = fluid.layers.fill_constant_batch_size_like(
true_logits, shape=[-1, 1], value=1.0, dtype='float32')
label_zeros = fluid.layers.fill_constant_batch_size_like(
true_logits, shape=[-1, neg_num], value=0.0, dtype='float32')
neg_matmul_re = fluid.layers.reshape(
neg_matmul, shape=[-1, self.neg_num])
neg_logits = fluid.layers.elementwise_add(neg_matmul_re, neg_emb_b_vec)
#nce loss
label_ones = fluid.layers.fill_constant(
shape=[fluid.layers.shape(true_logits)[0], 1],
value=1.0,
dtype='float32')
label_zeros = fluid.layers.fill_constant(
shape=[fluid.layers.shape(true_logits)[0], self.neg_num],
value=0.0,
dtype='float32')
true_xent = fluid.layers.sigmoid_cross_entropy_with_logits(true_logits,
label_ones)
......@@ -149,7 +158,9 @@ class Model(ModelBase):
true_xent, dim=1),
fluid.layers.reduce_sum(
neg_xent, dim=1))
self.avg_cost = fluid.layers.reduce_mean(cost)
avg_cost = fluid.layers.reduce_mean(cost)
self._cost = avg_cost
global_right_cnt = fluid.layers.create_global_var(
name="global_right_cnt",
persistable=True,
......@@ -164,77 +175,33 @@ class Model(ModelBase):
value=0)
global_right_cnt.stop_gradient = True
global_total_cnt.stop_gradient = True
def avg_loss(self):
self._cost = self.avg_cost
def metrics(self):
self._metrics["LOSS"] = self.avg_cost
def train_net(self):
self.input()
self.net()
self.avg_loss()
self.metrics()
self._metrics["LOSS"] = avg_cost
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate",
None, self._namespace)
decay_steps = envs.get_global_env("hyper_parameters.decay_steps", None,
self._namespace)
decay_rate = envs.get_global_env("hyper_parameters.decay_rate", None,
self._namespace)
optimizer = fluid.optimizer.SGD(
learning_rate=fluid.layers.exponential_decay(
learning_rate=learning_rate,
decay_steps=decay_steps,
decay_rate=decay_rate,
learning_rate=self.learning_rate,
decay_steps=self.decay_steps,
decay_rate=self.decay_rate,
staircase=True))
return optimizer
def analogy_input(self):
sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number", None, self._namespace)
self.analogy_a = fluid.data(
name="analogy_a", shape=[None], dtype='int64')
self.analogy_b = fluid.data(
name="analogy_b", shape=[None], dtype='int64')
self.analogy_c = fluid.data(
name="analogy_c", shape=[None], dtype='int64')
self.analogy_d = fluid.data(
name="analogy_d", shape=[None], dtype='int64')
self._infer_data_var = [
self.analogy_a, self.analogy_b, self.analogy_c, self.analogy_d
]
self._infer_data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._infer_data_var,
capacity=64,
use_double_buffer=False,
iterable=False)
def infer_net(self):
sparse_feature_dim = envs.get_global_env(
"hyper_parameters.sparse_feature_dim", None, self._namespace)
sparse_feature_number = envs.get_global_env(
"hyper_parameters.sparse_feature_number", None, self._namespace)
def infer_net(self, inputs):
def embedding_layer(input, table_name, initializer_instance=None):
emb = fluid.embedding(
input=input,
size=[sparse_feature_number, sparse_feature_dim],
size=[self.sparse_feature_number, self.sparse_feature_dim],
param_attr=table_name)
return emb
self.analogy_input()
all_label = np.arange(sparse_feature_number).reshape(
sparse_feature_number).astype('int32')
all_label = np.arange(self.sparse_feature_number).reshape(
self.sparse_feature_number).astype('int32')
self.all_label = fluid.layers.cast(
x=fluid.layers.assign(all_label), dtype='int64')
emb_all_label = embedding_layer(self.all_label, "emb")
emb_a = embedding_layer(self.analogy_a, "emb")
emb_b = embedding_layer(self.analogy_b, "emb")
emb_c = embedding_layer(self.analogy_c, "emb")
emb_a = embedding_layer(inputs[0], "emb")
emb_b = embedding_layer(inputs[1], "emb")
emb_c = embedding_layer(inputs[2], "emb")
target = fluid.layers.elementwise_add(
fluid.layers.elementwise_sub(emb_b, emb_a), emb_c)
......@@ -245,8 +212,7 @@ class Model(ModelBase):
values, pred_idx = fluid.layers.topk(input=dist, k=4)
label = fluid.layers.expand(
fluid.layers.unsqueeze(
self.analogy_d, axes=[1]),
expand_times=[1, 4])
inputs[3], axes=[1]), expand_times=[1, 4])
label_ones = fluid.layers.fill_constant_batch_size_like(
label, shape=[-1, 1], value=1.0, dtype='float32')
right_cnt = fluid.layers.reduce_sum(input=fluid.layers.cast(
......
......@@ -162,7 +162,7 @@ def filter_corpus(args):
if r_value > keep_prob:
continue
write_line += str(idx)
write_line += ","
write_line += " "
signal = True
if signal:
write_line = write_line[:-1] + "\n"
......
......@@ -20,10 +20,10 @@ from paddlerec.core.reader import Reader
from paddlerec.core.utils import envs
class EvaluateReader(Reader):
class TrainReader(Reader):
def init(self):
dict_path = envs.get_global_env("word_id_dict_path", None,
"evaluate.reader")
dict_path = envs.get_global_env(
"dataset.dataset_infer.word_id_dict_path")
self.word_to_id = dict()
self.id_to_word = dict()
with io.open(dict_path, 'r', encoding='utf-8') as f:
......@@ -75,6 +75,8 @@ class EvaluateReader(Reader):
def generate_sample(self, line):
def reader():
if ':' in line:
pass
features = self.strip_lines(line.lower(), self.word_to_id)
features = features.split()
yield [('analogy_a', [self.word_to_id[features[0]]]),
......
......@@ -40,14 +40,12 @@ class NumpyRandomInt(object):
class TrainReader(Reader):
def init(self):
dict_path = envs.get_global_env("word_count_dict_path", None,
"train.reader")
self.window_size = envs.get_global_env("hyper_parameters.window_size",
None, "train.model")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num", None,
"train.model")
dict_path = envs.get_global_env(
"dataset.dataset_train.word_count_dict_path")
self.window_size = envs.get_global_env("hyper_parameters.window_size")
self.neg_num = envs.get_global_env("hyper_parameters.neg_num")
self.with_shuffle_batch = envs.get_global_env(
"hyper_parameters.with_shuffle_batch", None, "train.model")
"hyper_parameters.with_shuffle_batch")
self.random_generator = NumpyRandomInt(1, self.window_size + 1)
self.cs = None
......
......@@ -63,7 +63,7 @@ def build(dirname):
models_copy = [
'data/*.txt', 'data/*/*.txt', '*.yaml', '*.sh', 'tree/*.npy',
'tree/*.txt', 'data/sample_data/*', 'data/sample_data/train/*',
'data/sample_data/infer/*'
'data/sample_data/infer/*', 'data/*/*.csv'
]
engine_copy = ['*/*.sh']
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册