提交 05b1b890 编写于 作者: M malin10

Merge branch 'develop' of ssh://gitlab.baidu.com:8022/tangwei12/paddlerec into infer_dssm_w2v

文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
......@@ -20,13 +20,13 @@ import os
import copy
from fleetrec.core.engine.engine import Engine
from fleetrec.core.utils import envs
class LocalClusterEngine(Engine):
def start_procs(self):
worker_num = self.envs["worker_num"]
server_num = self.envs["server_num"]
start_port = self.envs["start_port"]
ports = [self.envs["start_port"]]
logs_dir = self.envs["log_dir"]
default_env = os.environ.copy()
......@@ -36,7 +36,13 @@ class LocalClusterEngine(Engine):
current_env.pop("https_proxy", None)
procs = []
log_fns = []
ports = range(start_port, start_port + server_num, 1)
for i in range(server_num - 1):
while True:
new_port = envs.find_free_port()
if new_port not in ports:
ports.append(new_port)
break
user_endpoints = ",".join(["127.0.0.1:" + str(x) for x in ports])
user_endpoints_ips = [x.split(":")[0] for x in user_endpoints.split(",")]
user_endpoints_port = [x.split(":")[1] for x in user_endpoints.split(",")]
......
文件模式从 100644 更改为 100755
......@@ -19,15 +19,24 @@ import yaml
from fleetrec.core.utils import envs
trainer_abs = os.path.join(os.path.dirname(os.path.abspath(__file__)), "trainers")
trainer_abs = os.path.join(os.path.dirname(
os.path.abspath(__file__)), "trainers")
trainers = {}
def trainer_registry():
trainers["SingleTrainer"] = os.path.join(trainer_abs, "single_trainer.py")
trainers["ClusterTrainer"] = os.path.join(trainer_abs, "cluster_trainer.py")
trainers["CtrCodingTrainer"] = os.path.join(trainer_abs, "ctr_coding_trainer.py")
trainers["CtrModulTrainer"] = os.path.join(trainer_abs, "ctr_modul_trainer.py")
trainers["SingleTrainer"] = os.path.join(
trainer_abs, "single_trainer.py")
trainers["ClusterTrainer"] = os.path.join(
trainer_abs, "cluster_trainer.py")
trainers["CtrCodingTrainer"] = os.path.join(
trainer_abs, "ctr_coding_trainer.py")
trainers["CtrModulTrainer"] = os.path.join(
trainer_abs, "ctr_modul_trainer.py")
trainers["TDMSingleTrainer"] = os.path.join(
trainer_abs, "tdm_single_trainer.py")
trainers["TDMClusterTrainer"] = os.path.join(
trainer_abs, "tdm_cluster_trainer.py")
trainer_registry()
......@@ -46,7 +55,8 @@ class TrainerFactory(object):
if trainer_abs is None:
if not os.path.isfile(train_mode):
raise IOError("trainer {} can not be recognized".format(train_mode))
raise IOError(
"trainer {} can not be recognized".format(train_mode))
trainer_abs = train_mode
train_mode = "UserDefineTrainer"
......
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
......@@ -43,6 +43,12 @@ class Model(object):
"""
return self._metrics
def custom_preprocess(self):
"""
do something after exe.run(stratup_program) and before run()
"""
pass
def get_fetch_period(self):
return self._fetch_interval
......@@ -50,23 +56,30 @@ class Model(object):
name = name.upper()
optimizers = ["SGD", "ADAM", "ADAGRAD"]
if name not in optimizers:
raise ValueError("configured optimizer can only supported SGD/Adam/Adagrad")
raise ValueError(
"configured optimizer can only supported SGD/Adam/Adagrad")
if name == "SGD":
optimizer_i = fluid.optimizer.Adam(lr, lazy_mode=True)
reg = envs.get_global_env(
"hyper_parameters.reg", 0.0001, self._namespace)
optimizer_i = fluid.optimizer.SGD(
lr, regularization=fluid.regularizer.L2DecayRegularizer(reg))
elif name == "ADAM":
optimizer_i = fluid.optimizer.Adam(lr, lazy_mode=True)
elif name == "ADAGRAD":
optimizer_i = fluid.optimizer.Adam(lr, lazy_mode=True)
optimizer_i = fluid.optimizer.Adagrad(lr)
else:
raise ValueError("configured optimizer can only supported SGD/Adam/Adagrad")
raise ValueError(
"configured optimizer can only supported SGD/Adam/Adagrad")
return optimizer_i
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = envs.get_global_env("hyper_parameters.optimizer", None, self._namespace)
learning_rate = envs.get_global_env(
"hyper_parameters.learning_rate", None, self._namespace)
optimizer = envs.get_global_env(
"hyper_parameters.optimizer", None, self._namespace)
print(">>>>>>>>>>>.learnig rate: %s" % learning_rate)
return self._build_optimizer(optimizer, learning_rate)
@abc.abstractmethod
......
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
文件模式从 100644 更改为 100755
......@@ -95,5 +95,6 @@ def user_define_engine(engine_yaml):
train_dirname = os.path.dirname(train_location)
base_name = os.path.splitext(os.path.basename(train_location))[0]
sys.path.append(train_dirname)
trainer_class = envs.lazy_instance_by_fliename(base_name, "UserDefineTraining")
trainer_class = envs.lazy_instance_by_fliename(
base_name, "UserDefineTraining")
return trainer_class
......@@ -18,6 +18,7 @@ Training use fluid with one node only.
from __future__ import print_function
import os
import paddle.fluid as fluid
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import StrategyFactory
......@@ -39,11 +40,12 @@ class ClusterTrainer(TranspileTrainer):
else:
self.regist_context_processor('uninit', self.instance)
self.regist_context_processor('init_pass', self.init)
if envs.get_platform() == "LINUX":
self.regist_context_processor('startup_pass', self.startup)
if envs.get_platform() == "LINUX" and envs.get_global_env("dataset_class", None, "train.reader") != "DataLoader":
self.regist_context_processor('train_pass', self.dataset_train)
else:
self.regist_context_processor('train_pass', self.dataloader_train)
self.regist_context_processor(
'train_pass', self.dataloader_train)
self.regist_context_processor('terminal_pass', self.terminal)
def build_strategy(self):
......@@ -70,6 +72,11 @@ class ClusterTrainer(TranspileTrainer):
def init(self, context):
self.model.train_net()
optimizer = self.model.optimizer()
optimizer_name = envs.get_global_env(
"hyper_parameters.optimizer", None, "train.model")
if optimizer_name not in ["", "sgd", "SGD", "Sgd"]:
os.environ["FLAGS_communicator_is_sgd_optimizer"] = '0'
strategy = self.build_strategy()
optimizer = fleet.distributed_optimizer(optimizer, strategy)
optimizer.minimize(self.model.get_cost_op())
......@@ -85,16 +92,18 @@ class ClusterTrainer(TranspileTrainer):
if metrics:
self.fetch_vars = metrics.values()
self.fetch_alias = metrics.keys()
context['status'] = 'train_pass'
context['status'] = 'startup_pass'
def server(self, context):
fleet.init_server()
fleet.run_server()
context['is_exit'] = True
def dataloader_train(self, context):
def startup(self, context):
self._exe.run(fleet.startup_program)
context['status'] = 'train_pass'
def dataloader_train(self, context):
fleet.init_worker()
reader = self._get_dataloader()
......@@ -140,7 +149,6 @@ class ClusterTrainer(TranspileTrainer):
context['status'] = 'terminal_pass'
def dataset_train(self, context):
self._exe.run(fleet.startup_program)
fleet.init_worker()
dataset = self._get_dataset()
......
......@@ -33,8 +33,8 @@ class SingleTrainer(TranspileTrainer):
def processor_register(self):
self.regist_context_processor('uninit', self.instance)
self.regist_context_processor('init_pass', self.init)
if envs.get_platform() == "LINUX":
self.regist_context_processor('startup_pass', self.startup)
if envs.get_platform() == "LINUX" and envs.get_global_env("dataset_class", None, "train.reader") != "DataLoader":
self.regist_context_processor('train_pass', self.dataset_train)
else:
self.regist_context_processor('train_pass', self.dataloader_train)
......@@ -55,10 +55,13 @@ class SingleTrainer(TranspileTrainer):
if metrics:
self.fetch_vars = metrics.values()
self.fetch_alias = metrics.keys()
context['status'] = 'startup_pass'
def startup(self, context):
self._exe.run(fluid.default_startup_program())
context['status'] = 'train_pass'
def dataloader_train(self, context):
self._exe.run(fluid.default_startup_program())
reader = self._get_dataloader("TRAIN")
epochs = envs.get_global_env("train.epochs")
......@@ -100,8 +103,6 @@ class SingleTrainer(TranspileTrainer):
context['status'] = 'infer_pass'
def dataset_train(self, context):
# run startup program at once
self._exe.run(fluid.default_startup_program())
dataset = self._get_dataset("TRAIN")
epochs = envs.get_global_env("train.epochs")
......
# -*- coding=utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Training use fluid with one node only.
"""
from __future__ import print_function
import logging
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler.distributed_strategy import StrategyFactory
from paddle.fluid.incubate.fleet.base.role_maker import PaddleCloudRoleMaker
from fleetrec.core.utils import envs
from fleetrec.core.trainers.cluster_trainer import ClusterTrainer
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger("fluid")
logger.setLevel(logging.INFO)
special_param = ["TDM_Tree_Travel", "TDM_Tree_Layer", "TDM_Tree_Info"]
class TDMClusterTrainer(ClusterTrainer):
def server(self, context):
namespace = "train.startup"
init_model_path = envs.get_global_env(
"cluster.init_model_path", "", namespace)
assert init_model_path != "", "Cluster train must has init_model for TDM"
fleet.init_server(init_model_path)
logger.info("TDM: load model from {}".format(init_model_path))
fleet.run_server()
context['is_exit'] = True
def startup(self, context):
self._exe.run(fleet.startup_program)
namespace = "train.startup"
load_tree = envs.get_global_env(
"tree.load_tree", True, namespace)
self.tree_layer_path = envs.get_global_env(
"tree.tree_layer_path", "", namespace)
self.tree_travel_path = envs.get_global_env(
"tree.tree_travel_path", "", namespace)
self.tree_info_path = envs.get_global_env(
"tree.tree_info_path", "", namespace)
save_init_model = envs.get_global_env(
"cluster.save_init_model", False, namespace)
init_model_path = envs.get_global_env(
"cluster.init_model_path", "", namespace)
if load_tree:
# 将明文树结构及数据,set到组网中的Variale中
# 不使用NumpyInitialize方法是考虑到树结构相关数据size过大,有性能风险
for param_name in special_param:
param_t = fluid.global_scope().find_var(param_name).get_tensor()
param_array = self.tdm_prepare(param_name)
param_t.set(param_array.astype('int32'), self._place)
if save_init_model:
logger.info("Begin Save Init model.")
fluid.io.save_persistables(
executor=self._exe, dirname=init_model_path)
logger.info("End Save Init model.")
context['status'] = 'train_pass'
def tdm_prepare(self, param_name):
if param_name == "TDM_Tree_Travel":
travel_array = self.tdm_travel_prepare()
return travel_array
elif param_name == "TDM_Tree_Layer":
layer_array, _ = self.tdm_layer_prepare()
return layer_array
elif param_name == "TDM_Tree_Info":
info_array = self.tdm_info_prepare()
return info_array
else:
raise " {} is not a special tdm param name".format(param_name)
def tdm_travel_prepare(self):
"""load tdm tree param from npy/list file"""
travel_array = np.load(self.tree_travel_path)
logger.info("TDM Tree leaf node nums: {}".format(
travel_array.shape[0]))
return travel_array
def tdm_layer_prepare(self):
"""load tdm tree param from npy/list file"""
layer_list = []
layer_list_flat = []
with open(self.tree_layer_path, 'r') as fin:
for line in fin.readlines():
l = []
layer = (line.split('\n'))[0].split(',')
for node in layer:
if node:
layer_list_flat.append(node)
l.append(node)
layer_list.append(l)
layer_array = np.array(layer_list_flat)
layer_array = layer_array.reshape([-1, 1])
logger.info("TDM Tree max layer: {}".format(len(layer_list)))
logger.info("TDM Tree layer_node_num_list: {}".format(
[len(i) for i in layer_list]))
return layer_array, layer_list
def tdm_info_prepare(self):
"""load tdm tree param from list file"""
info_array = np.load(self.tree_info_path)
return info_array
# -*- coding=utf-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Training use fluid with one node only.
"""
from __future__ import print_function
import logging
import paddle.fluid as fluid
from fleetrec.core.trainers.transpiler_trainer import TranspileTrainer
from fleetrec.core.trainers.single_trainer import SingleTrainer
from fleetrec.core.utils import envs
import numpy as np
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger("fluid")
logger.setLevel(logging.INFO)
special_param = ["TDM_Tree_Travel", "TDM_Tree_Layer",
"TDM_Tree_Info", "TDM_Tree_Emb"]
class TDMSingleTrainer(SingleTrainer):
def startup(self, context):
namespace = "train.startup"
load_persistables = envs.get_global_env(
"single.load_persistables", False, namespace)
persistables_model_path = envs.get_global_env(
"single.persistables_model_path", "", namespace)
load_tree = envs.get_global_env(
"tree.load_tree", False, namespace)
self.tree_layer_path = envs.get_global_env(
"tree.tree_layer_path", "", namespace)
self.tree_travel_path = envs.get_global_env(
"tree.tree_travel_path", "", namespace)
self.tree_info_path = envs.get_global_env(
"tree.tree_info_path", "", namespace)
self.tree_emb_path = envs.get_global_env(
"tree.tree_emb_path", "", namespace)
save_init_model = envs.get_global_env(
"single.save_init_model", False, namespace)
init_model_path = envs.get_global_env(
"single.init_model_path", "", namespace)
self._exe.run(fluid.default_startup_program())
if load_persistables:
# 从paddle二进制模型加载参数
fluid.io.load_persistables(
executor=self._exe,
dirname=persistables_model_path,
main_program=fluid.default_main_program())
logger.info("Load persistables from \"{}\"".format(
persistables_model_path))
if load_tree:
# 将明文树结构及数据,set到组网中的Variale中
# 不使用NumpyInitialize方法是考虑到树结构相关数据size过大,有性能风险
for param_name in special_param:
param_t = fluid.global_scope().find_var(param_name).get_tensor()
param_array = self.tdm_prepare(param_name)
if param_name == 'TDM_Tree_Emb':
param_t.set(param_array.astype('float32'), self._place)
else:
param_t.set(param_array.astype('int32'), self._place)
if save_init_model:
logger.info("Begin Save Init model.")
fluid.io.save_persistables(
executor=self._exe, dirname=init_model_path)
logger.info("End Save Init model.")
context['status'] = 'train_pass'
def tdm_prepare(self, param_name):
if param_name == "TDM_Tree_Travel":
travel_array = self.tdm_travel_prepare()
return travel_array
elif param_name == "TDM_Tree_Layer":
layer_array, _ = self.tdm_layer_prepare()
return layer_array
elif param_name == "TDM_Tree_Info":
info_array = self.tdm_info_prepare()
return info_array
elif param_name == "TDM_Tree_Emb":
emb_array = self.tdm_emb_prepare()
return emb_array
else:
raise " {} is not a special tdm param name".format(param_name)
def tdm_travel_prepare(self):
"""load tdm tree param from npy/list file"""
travel_array = np.load(self.tree_travel_path)
logger.info("TDM Tree leaf node nums: {}".format(
travel_array.shape[0]))
return travel_array
def tdm_emb_prepare(self):
"""load tdm tree param from npy/list file"""
emb_array = np.load(self.tree_emb_path)
logger.info("TDM Tree node nums from emb: {}".format(
emb_array.shape[0]))
return emb_array
def tdm_layer_prepare(self):
"""load tdm tree param from npy/list file"""
layer_list = []
layer_list_flat = []
with open(self.tree_layer_path, 'r') as fin:
for line in fin.readlines():
l = []
layer = (line.split('\n'))[0].split(',')
for node in layer:
if node:
layer_list_flat.append(node)
l.append(node)
layer_list.append(l)
layer_array = np.array(layer_list_flat)
layer_array = layer_array.reshape([-1, 1])
logger.info("TDM Tree max layer: {}".format(len(layer_list)))
logger.info("TDM Tree layer_node_num_list: {}".format(
[len(i) for i in layer_list]))
return layer_array, layer_list
def tdm_info_prepare(self):
"""load tdm tree param from list file"""
info_array = np.load(self.tree_info_path)
return info_array
......@@ -48,6 +48,12 @@ class TranspileTrainer(Trainer):
reader_class = envs.get_global_env("class", None, namespace)
reader = dataloader_instance.dataloader(reader_class, state, self._config_yaml)
reader_class = envs.lazy_instance_by_fliename(reader_class, "TrainReader")
reader_ins = reader_class(self._config_yaml)
if hasattr(reader_ins,'generate_batch_from_trainfiles'):
dataloader.set_sample_list_generator(reader)
else:
dataloader.set_sample_generator(reader, batch_size)
return dataloader
......
文件模式从 100644 更改为 100755
......@@ -57,4 +57,9 @@ def dataloader(readerclass, train, yaml_file):
values.append(pased[1])
yield values
def gen_batch_reader():
return reader.generate_batch_from_trainfiles(files)
if hasattr(reader, 'generate_batch_from_trainfiles'):
return gen_batch_reader()
return gen_reader
文件模式从 100644 更改为 100755
......@@ -15,7 +15,8 @@
import os
import copy
import sys
import socket
from contextlib import closing
global_envs = {}
......@@ -170,3 +171,12 @@ def get_platform():
return "DARWIN"
if 'Windows' in plats:
return "WINDOWS"
def find_free_port():
def __free_port():
with closing(socket.socket(socket.AF_INET,
socket.SOCK_STREAM)) as s:
s.bind(('', 0))
return s.getsockname()[1]
new_port = __free_port()
return new_port
......@@ -10,6 +10,8 @@ from fleetrec.core.utils import util
engines = {}
device = ["CPU", "GPU"]
clusters = ["SINGLE", "LOCAL_CLUSTER", "CLUSTER"]
custom_model = ['tdm']
model_name = ""
def engine_registry():
......@@ -28,13 +30,17 @@ def engine_registry():
engines["GPU"] = gpu
def get_engine(engine, device):
def get_engine(args):
device = args.device
d_engine = engines[device]
transpiler = get_transpiler()
engine = args.engine
run_engine = d_engine[transpiler].get(engine, None)
if run_engine is None:
raise ValueError("engine {} can not be supported on device: {}".format(engine, device))
raise ValueError(
"engine {} can not be supported on device: {}".format(engine, device))
return run_engine
......@@ -77,15 +83,21 @@ def set_runtime_envs(cluster_envs, engine_yaml):
print(envs.pretty_print_envs(need_print, ("Runtime Envs", "Value")))
def single_engine(args):
print("use single engine to run model: {}".format(args.model))
def get_trainer_prefix(args):
if model_name in custom_model:
return model_name.upper()
return ""
def single_engine(args):
trainer = get_trainer_prefix(args) + "SingleTrainer"
single_envs = {}
single_envs["train.trainer.trainer"] = "SingleTrainer"
single_envs["train.trainer.trainer"] = trainer
single_envs["train.trainer.threads"] = "2"
single_envs["train.trainer.engine"] = "single"
single_envs["train.trainer.device"] = args.device
single_envs["train.trainer.platform"] = envs.get_platform()
print("use {} engine to run model: {}".format(trainer, args.model))
set_runtime_envs(single_envs, args.model)
trainer = TrainerFactory.create(args.model)
......@@ -93,16 +105,15 @@ def single_engine(args):
def cluster_engine(args):
print("launch cluster engine with cluster to run model: {}".format(args.model))
trainer = get_trainer_prefix(args) + "ClusterTrainer"
cluster_envs = {}
cluster_envs["train.trainer.trainer"] = "ClusterTrainer"
cluster_envs["train.trainer.trainer"] = trainer
cluster_envs["train.trainer.engine"] = "cluster"
cluster_envs["train.trainer.device"] = args.device
cluster_envs["train.trainer.platform"] = envs.get_platform()
print("launch {} engine with cluster to run model: {}".format(trainer, args.model))
set_runtime_envs(cluster_envs, args.model)
trainer = TrainerFactory.create(args.model)
return trainer
......@@ -122,15 +133,15 @@ def cluster_mpi_engine(args):
def local_cluster_engine(args):
print("launch cluster engine with cluster to run model: {}".format(args.model))
from fleetrec.core.engine.local_cluster_engine import LocalClusterEngine
trainer = get_trainer_prefix(args) + "ClusterTrainer"
cluster_envs = {}
cluster_envs["server_num"] = 1
cluster_envs["worker_num"] = 1
cluster_envs["start_port"] = 36001
cluster_envs["start_port"] = envs.find_free_port()
cluster_envs["log_dir"] = "logs"
cluster_envs["train.trainer.trainer"] = "ClusterTrainer"
cluster_envs["train.trainer.trainer"] = trainer
cluster_envs["train.trainer.strategy"] = "async"
cluster_envs["train.trainer.threads"] = "2"
cluster_envs["train.trainer.engine"] = "local_cluster"
......@@ -139,9 +150,9 @@ def local_cluster_engine(args):
cluster_envs["train.trainer.platform"] = envs.get_platform()
cluster_envs["CPU_NUM"] = "2"
print("launch {} engine with cluster to run model: {}".format(trainer, args.model))
set_runtime_envs(cluster_envs, args.model)
launch = LocalClusterEngine(cluster_envs, args.model)
return launch
......@@ -184,8 +195,11 @@ def get_abs_model(model):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='fleet-rec run')
parser.add_argument("-m", "--model", type=str)
parser.add_argument("-e", "--engine", type=str, choices=["single", "local_cluster", "cluster"])
parser.add_argument("-d", "--device", type=str, choices=["cpu", "gpu"], default="cpu")
parser.add_argument("-e", "--engine", type=str,
choices=["single", "local_cluster", "cluster",
"tdm_single", "tdm_local_cluster", "tdm_cluster"])
parser.add_argument("-d", "--device", type=str,
choices=["cpu", "gpu"], default="cpu")
abs_dir = os.path.dirname(os.path.abspath(__file__))
envs.set_runtime_environs({"PACKAGE_BASE": abs_dir})
......@@ -193,10 +207,11 @@ if __name__ == "__main__":
args = parser.parse_args()
args.engine = args.engine.upper()
args.device = args.device.upper()
model_name = args.model.split('.')[-1]
args.model = get_abs_model(args.model)
engine_registry()
which_engine = get_engine(args.engine, args.device)
which_engine = get_engine(args)
engine = which_engine(args)
engine.run()
文件模式从 100644 更改为 100755
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 10
workspace: "fleetrec.models.rank.dcn"
reader:
batch_size: 2
class: "{workspace}/criteo_reader.py"
train_data_path: "{workspace}/data/train"
feat_dict_name: "{workspace}/data/vocab"
model:
models: "{workspace}/model.py"
hyper_parameters:
cross_num: 2
dnn_hidden_units: [128, 128]
l2_reg_cross: 0.00005
dnn_use_bn: False
clip_by_norm: 100.0
cat_feat_num: "{workspace}/data/cat_feature_num.txt"
is_sparse: False
is_test: False
num_field: 39
learning_rate: 0.0001
act: "relu"
optimizer: adam
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import math
import sys
from fleetrec.core.reader import Reader
from fleetrec.core.utils import envs
try:
import cPickle as pickle
except ImportError:
import pickle
from collections import Counter
import os
class TrainReader(Reader):
def init(self):
self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.cont_max_ = [
5775, 257675, 65535, 969, 23159456, 431037, 56311, 6047, 29019, 11,
231, 4008, 7393
]
self.cont_diff_ = [
self.cont_max_[i] - self.cont_min_[i]
for i in range(len(self.cont_min_))
]
self.cont_idx_ = list(range(1, 14))
self.cat_idx_ = list(range(14, 40))
dense_feat_names = ['I' + str(i) for i in range(1, 14)]
sparse_feat_names = ['C' + str(i) for i in range(1, 27)]
target = ['label']
self.label_feat_names = target + dense_feat_names + sparse_feat_names
self.cat_feat_idx_dict_list = [{} for _ in range(26)]
# TODO: set vocabulary dictionary
vocab_dir = envs.get_global_env("feat_dict_name", None, "train.reader")
for i in range(26):
lookup_idx = 1 # remain 0 for default value
for line in open(
os.path.join(vocab_dir, 'C' + str(i + 1) + '.txt')):
self.cat_feat_idx_dict_list[i][line.strip()] = lookup_idx
lookup_idx += 1
def _process_line(self, line):
features = line.rstrip('\n').split('\t')
label_feat_list = [[] for _ in range(40)]
for idx in self.cont_idx_:
if features[idx] == '':
label_feat_list[idx].append(0)
else:
# 0-1 minmax norm
# label_feat_list[idx].append((float(features[idx]) - self.cont_min_[idx - 1]) /
# self.cont_diff_[idx - 1])
# log transform
label_feat_list[idx].append(
math.log(4 + float(features[idx]))
if idx == 2 else math.log(1 + float(features[idx])))
for idx in self.cat_idx_:
if features[idx] == '' or features[
idx] not in self.cat_feat_idx_dict_list[idx - 14]:
label_feat_list[idx].append(0)
else:
label_feat_list[idx].append(self.cat_feat_idx_dict_list[
idx - 14][features[idx]])
label_feat_list[0].append(int(features[0]))
return label_feat_list
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def data_iter():
label_feat_list = self._process_line(line)
yield list(zip(self.label_feat_names, label_feat_list))
return data_iter
\ No newline at end of file
import os
import sys
import io
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
TOOLS_PATH = os.path.join(LOCAL_PATH, "..", "..", "tools")
sys.path.append(TOOLS_PATH)
from fleetrec.tools.tools import download_file_and_uncompress
if __name__ == '__main__':
trainfile = 'train.txt'
url = "https://s3-eu-west-1.amazonaws.com/kaggle-display-advertising-challenge-dataset/dac.tar.gz"
print("download and extract starting...")
download_file_and_uncompress(url)
print("download and extract finished")
count = 0
for _ in io.open(trainfile, 'r', encoding='utf-8'):
count += 1
print("total records: %d" % count)
print("done")
from __future__ import print_function, absolute_import, division
import os
import sys
from collections import Counter
import numpy as np
"""
preprocess Criteo train data, generate extra statistic files for model input.
"""
# input filename
FILENAME = 'train.200000.txt'
# global vars
CAT_FEATURE_NUM = 'cat_feature_num.txt'
INT_FEATURE_MINMAX = 'int_feature_minmax.txt'
VOCAB_DIR = 'vocab'
TRAIN_DIR = 'train'
TEST_VALID_DIR = 'test_valid'
SPLIT_RATIO = 0.9
FREQ_THR = 10
INT_COLUMN_NAMES = ['I' + str(i) for i in range(1, 14)]
CAT_COLUMN_NAMES = ['C' + str(i) for i in range(1, 27)]
def check_statfiles():
"""
check if statistic files of Criteo exists
:return:
"""
statsfiles = [CAT_FEATURE_NUM, INT_FEATURE_MINMAX] + [
os.path.join(VOCAB_DIR, cat_fn + '.txt') for cat_fn in CAT_COLUMN_NAMES
]
if all([os.path.exists(fn) for fn in statsfiles]):
return True
return False
def create_statfiles():
"""
create statistic files of Criteo, including:
min/max of interger features
counts of categorical features
vocabs of each categorical features
:return:
"""
int_minmax_list = [[sys.maxsize, -sys.maxsize]
for _ in range(13)] # count integer feature min max
cat_ct_list = [Counter() for _ in range(26)] # count categorical features
for idx, line in enumerate(open(FILENAME)):
spls = line.rstrip('\n').split('\t')
assert len(spls) == 40
for i in range(13):
if not spls[1 + i]: continue
int_val = int(spls[1 + i])
int_minmax_list[i][0] = min(int_minmax_list[i][0], int_val)
int_minmax_list[i][1] = max(int_minmax_list[i][1], int_val)
for i in range(26):
cat_ct_list[i].update([spls[14 + i]])
# save min max of integer features
with open(INT_FEATURE_MINMAX, 'w') as f:
for name, minmax in zip(INT_COLUMN_NAMES, int_minmax_list):
print("{} {} {}".format(name, minmax[0], minmax[1]), file=f)
# remove '' from all cat_set[i] and filter low freq categorical value
cat_set_list = [set() for i in range(len(cat_ct_list))]
for i, ct in enumerate(cat_ct_list):
if '' in ct: del ct['']
for key in list(ct.keys()):
if ct[key] >= FREQ_THR:
cat_set_list[i].add(key)
del cat_ct_list
# create vocab dir
if not os.path.exists(VOCAB_DIR):
os.makedirs(VOCAB_DIR)
# write vocab file of categorical features
with open(CAT_FEATURE_NUM, 'w') as cat_feat_count_file:
for name, s in zip(CAT_COLUMN_NAMES, cat_set_list):
print('{} {}'.format(name, len(s)), file=cat_feat_count_file)
vocabfile = os.path.join(VOCAB_DIR, name + '.txt')
with open(vocabfile, 'w') as f:
for vocab_val in s:
print(vocab_val, file=f)
def split_data():
"""
split train.txt into train and test_valid files.
:return:
"""
if not os.path.exists(TRAIN_DIR):
os.makedirs(TRAIN_DIR)
if not os.path.exists(TEST_VALID_DIR):
os.makedirs(TEST_VALID_DIR)
fin = open('train.200000.txt', 'r')
data_dir = TRAIN_DIR
fout = open(os.path.join(data_dir, 'part-0'), 'w')
split_idx = int(45840617 * SPLIT_RATIO)
for line_idx, line in enumerate(fin):
if line_idx == split_idx:
fout.close()
data_dir = TEST_VALID_DIR
cur_part_idx = int(line_idx / 200000)
fout = open(
os.path.join(data_dir, 'part-' + str(cur_part_idx)), 'w')
if line_idx % 200000 == 0 and line_idx != 0:
fout.close()
cur_part_idx = int(line_idx / 200000)
fout = open(
os.path.join(data_dir, 'part-' + str(cur_part_idx)), 'w')
fout.write(line)
fout.close()
fin.close()
if __name__ == '__main__':
if not check_statfiles():
print('create statstic files of Criteo...')
create_statfiles()
print('split train.200000.txt...')
split_data()
print('done')
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
from collections import OrderedDict
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def init_network(self):
self.cross_num = envs.get_global_env("hyper_parameters.cross_num", None, self._namespace)
self.dnn_hidden_units = envs.get_global_env("hyper_parameters.dnn_hidden_units", None, self._namespace)
self.l2_reg_cross = envs.get_global_env("hyper_parameters.l2_reg_cross", None, self._namespace)
self.dnn_use_bn = envs.get_global_env("hyper_parameters.dnn_use_bn", None, self._namespace)
self.clip_by_norm = envs.get_global_env("hyper_parameters.clip_by_norm", None, self._namespace)
cat_feat_num = envs.get_global_env("hyper_parameters.cat_feat_num", None, self._namespace)
cat_feat_dims_dict = OrderedDict()
for line in open(cat_feat_num):
spls = line.strip().split()
assert len(spls) == 2
cat_feat_dims_dict[spls[0]] = int(spls[1])
self.cat_feat_dims_dict = cat_feat_dims_dict if cat_feat_dims_dict else OrderedDict(
)
self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse", None, self._namespace)
self.dense_feat_names = ['I' + str(i) for i in range(1, 14)]
self.sparse_feat_names = ['C' + str(i) for i in range(1, 27)]
# {feat_name: dims}
self.feat_dims_dict = OrderedDict(
[(feat_name, 1) for feat_name in self.dense_feat_names])
self.feat_dims_dict.update(self.cat_feat_dims_dict)
self.net_input = None
self.loss = None
def _create_embedding_input(self, data_dict):
# sparse embedding
sparse_emb_dict = OrderedDict((name, fluid.embedding(
input=fluid.layers.cast(
data_dict[name], dtype='int64'),
size=[
self.feat_dims_dict[name] + 1,
6 * int(pow(self.feat_dims_dict[name], 0.25))
],
is_sparse=self.is_sparse)) for name in self.sparse_feat_names)
# combine dense and sparse_emb
dense_input_list = [
data_dict[name] for name in data_dict if name.startswith('I')
]
sparse_emb_list = list(sparse_emb_dict.values())
sparse_input = fluid.layers.concat(sparse_emb_list, axis=-1)
sparse_input = fluid.layers.flatten(sparse_input)
dense_input = fluid.layers.concat(dense_input_list, axis=-1)
dense_input = fluid.layers.flatten(dense_input)
dense_input = fluid.layers.cast(dense_input, 'float32')
net_input = fluid.layers.concat([dense_input, sparse_input], axis=-1)
return net_input
def _deep_net(self, input, hidden_units, use_bn=False, is_test=False):
for units in hidden_units:
input = fluid.layers.fc(input=input, size=units)
if use_bn:
input = fluid.layers.batch_norm(input, is_test=is_test)
input = fluid.layers.relu(input)
return input
def _cross_layer(self, x0, x, prefix):
input_dim = x0.shape[-1]
w = fluid.layers.create_parameter(
[input_dim], dtype='float32', name=prefix + "_w")
b = fluid.layers.create_parameter(
[input_dim], dtype='float32', name=prefix + "_b")
xw = fluid.layers.reduce_sum(x * w, dim=1, keep_dim=True) # (N, 1)
return x0 * xw + b + x, w
def _cross_net(self, input, num_corss_layers):
x = x0 = input
l2_reg_cross_list = []
for i in range(num_corss_layers):
x, w = self._cross_layer(x0, x, "cross_layer_{}".format(i))
l2_reg_cross_list.append(self._l2_loss(w))
l2_reg_cross_loss = fluid.layers.reduce_sum(
fluid.layers.concat(
l2_reg_cross_list, axis=-1))
return x, l2_reg_cross_loss
def _l2_loss(self, w):
return fluid.layers.reduce_sum(fluid.layers.square(w))
def train_net(self):
self.init_network()
self.target_input = fluid.data(
name='label', shape=[None, 1], dtype='float32')
data_dict = OrderedDict()
for feat_name in self.feat_dims_dict:
data_dict[feat_name] = fluid.data(
name=feat_name, shape=[None, 1], dtype='float32')
self.net_input = self._create_embedding_input(data_dict)
deep_out = self._deep_net(self.net_input, self.dnn_hidden_units, self.dnn_use_bn, False)
cross_out, l2_reg_cross_loss = self._cross_net(self.net_input,
self.cross_num)
last_out = fluid.layers.concat([deep_out, cross_out], axis=-1)
logit = fluid.layers.fc(last_out, 1)
self.prob = fluid.layers.sigmoid(logit)
self._data_var = [self.target_input] + [
data_dict[dense_name] for dense_name in self.dense_feat_names
] + [data_dict[sparse_name] for sparse_name in self.sparse_feat_names]
# auc
prob_2d = fluid.layers.concat([1 - self.prob, self.prob], 1)
label_int = fluid.layers.cast(self.target_input, 'int64')
auc_var, batch_auc_var, self.auc_states = fluid.layers.auc(
input=prob_2d, label=label_int, slide_steps=0)
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc_var
# logloss
logloss = fluid.layers.log_loss(self.prob, self.target_input)
self.avg_logloss = fluid.layers.reduce_mean(logloss)
# reg_coeff * l2_reg_cross
l2_reg_cross_loss = self.l2_reg_cross * l2_reg_cross_loss
self.loss = self.avg_logloss + l2_reg_cross_loss
self._cost = self.loss
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate, lazy_mode=True)
return optimizer
def infer_net(self, parameter_list):
self.deepfm_net()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 10
workspace: "fleetrec.models.rank.deepfm"
reader:
batch_size: 2
class: "{workspace}/criteo_reader.py"
train_data_path: "{workspace}/data/train_data"
feat_dict_name: "{workspace}/data/aid_data/feat_dict_10.pkl2"
model:
models: "{workspace}/model.py"
hyper_parameters:
sparse_feature_number: 1086460
sparse_feature_dim: 9
num_field: 39
fc_sizes: [400, 400, 400]
learning_rate: 0.0001
reg: 0.001
act: "relu"
optimizer: SGD
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from fleetrec.core.reader import Reader
from fleetrec.core.utils import envs
try:
import cPickle as pickle
except ImportError:
import pickle
class TrainReader(Reader):
def init(self):
self.cont_min_ = [0, -3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
self.cont_max_ = [
5775, 257675, 65535, 969, 23159456, 431037, 56311, 6047, 29019, 46,
231, 4008, 7393
]
self.cont_diff_ = [
self.cont_max_[i] - self.cont_min_[i]
for i in range(len(self.cont_min_))
]
self.continuous_range_ = range(1, 14)
self.categorical_range_ = range(14, 40)
# load preprocessed feature dict
self.feat_dict_name = envs.get_global_env("feat_dict_name", None, "train.reader")
self.feat_dict_ = pickle.load(open(self.feat_dict_name, 'rb'))
def _process_line(self, line):
features = line.rstrip('\n').split('\t')
feat_idx = []
feat_value = []
for idx in self.continuous_range_:
if features[idx] == '':
feat_idx.append(0)
feat_value.append(0.0)
else:
feat_idx.append(self.feat_dict_[idx])
feat_value.append(
(float(features[idx]) - self.cont_min_[idx - 1]) /
self.cont_diff_[idx - 1])
for idx in self.categorical_range_:
if features[idx] == '' or features[idx] not in self.feat_dict_:
feat_idx.append(0)
feat_value.append(0.0)
else:
feat_idx.append(self.feat_dict_[features[idx]])
feat_value.append(1.0)
label = [int(features[0])]
return feat_idx, feat_value, label
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def data_iter():
feat_idx, feat_value, label = self._process_line(line)
yield [('feat_idx', feat_idx), ('feat_value', feat_value), ('label', label)]
return data_iter
\ No newline at end of file
import os
import shutil
import sys
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
TOOLS_PATH = os.path.join(LOCAL_PATH, "..", "..", "tools")
sys.path.append(TOOLS_PATH)
from fleetrec.tools.tools import download_file_and_uncompress, download_file
if __name__ == '__main__':
url = "https://s3-eu-west-1.amazonaws.com/kaggle-display-advertising-challenge-dataset/dac.tar.gz"
url2 = "https://paddlerec.bj.bcebos.com/deepfm%2Ffeat_dict_10.pkl2"
print("download and extract starting...")
download_file_and_uncompress(url)
download_file(url2, "./aid_data/feat_dict_10.pkl2", True)
print("download and extract finished")
print("preprocessing...")
os.system("python preprocess.py")
print("preprocess done")
shutil.rmtree("raw_data")
print("done")
import os
import numpy
from collections import Counter
import shutil
import pickle
def get_raw_data():
if not os.path.isdir('raw_data'):
os.mkdir('raw_data')
fin = open('train.txt', 'r')
fout = open('raw_data/part-0', 'w')
for line_idx, line in enumerate(fin):
if line_idx % 200000 == 0 and line_idx != 0:
fout.close()
cur_part_idx = int(line_idx / 200000)
fout = open('raw_data/part-' + str(cur_part_idx), 'w')
fout.write(line)
fout.close()
fin.close()
def split_data():
split_rate_ = 0.9
dir_train_file_idx_ = 'aid_data/train_file_idx.txt'
filelist_ = [
'raw_data/part-%d' % x for x in range(len(os.listdir('raw_data')))
]
if not os.path.exists(dir_train_file_idx_):
train_file_idx = list(
numpy.random.choice(
len(filelist_), int(len(filelist_) * split_rate_), False))
with open(dir_train_file_idx_, 'w') as fout:
fout.write(str(train_file_idx))
else:
with open(dir_train_file_idx_, 'r') as fin:
train_file_idx = eval(fin.read())
for idx in range(len(filelist_)):
if idx in train_file_idx:
shutil.move(filelist_[idx], 'train_data')
else:
shutil.move(filelist_[idx], 'test_data')
def get_feat_dict():
freq_ = 10
dir_feat_dict_ = 'aid_data/feat_dict_' + str(freq_) + '.pkl2'
continuous_range_ = range(1, 14)
categorical_range_ = range(14, 40)
if not os.path.exists(dir_feat_dict_):
# print('generate a feature dict')
# Count the number of occurrences of discrete features
feat_cnt = Counter()
with open('train.txt', 'r') as fin:
for line_idx, line in enumerate(fin):
if line_idx % 100000 == 0:
print('generating feature dict', line_idx / 45000000)
features = line.rstrip('\n').split('\t')
for idx in categorical_range_:
if features[idx] == '': continue
feat_cnt.update([features[idx]])
# Only retain discrete features with high frequency
dis_feat_set = set()
for feat, ot in feat_cnt.items():
if ot >= freq_:
dis_feat_set.add(feat)
# Create a dictionary for continuous and discrete features
feat_dict = {}
tc = 1
# Continuous features
for idx in continuous_range_:
feat_dict[idx] = tc
tc += 1
for feat in dis_feat_set:
feat_dict[feat] = tc
tc += 1
# Save dictionary
with open(dir_feat_dict_, 'wb') as fout:
pickle.dump(feat_dict, fout, protocol=2)
print('args.num_feat ', len(feat_dict) + 1)
if __name__ == '__main__':
if not os.path.isdir('train_data'):
os.mkdir('train_data')
if not os.path.isdir('test_data'):
os.mkdir('test_data')
if not os.path.isdir('aid_data'):
os.mkdir('aid_data')
get_raw_data()
split_data()
get_feat_dict()
print('Done!')
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def deepfm_net(self):
init_value_ = 0.1
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
sparse_feature_number = envs.get_global_env("hyper_parameters.sparse_feature_number", None, self._namespace)
sparse_feature_dim = envs.get_global_env("hyper_parameters.sparse_feature_dim", None, self._namespace)
# ------------------------- network input --------------------------
num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace)
raw_feat_idx = fluid.data(name='feat_idx', shape=[None, num_field], dtype='int64') # None * num_field(defalut:39)
raw_feat_value = fluid.data(name='feat_value', shape=[None, num_field], dtype='float32') # None * num_field
self.label = fluid.data(name='label', shape=[None, 1], dtype='float32') # None * 1
feat_idx = fluid.layers.reshape(raw_feat_idx,[-1, 1]) # (None * num_field) * 1
feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1
# ------------------------- set _data_var --------------------------
self._data_var.append(raw_feat_idx)
self._data_var.append(raw_feat_value)
self._data_var.append(self.label)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
#------------------------- first order term --------------------------
reg = envs.get_global_env("hyper_parameters.reg", 1e-4, self._namespace)
first_weights_re = fluid.embedding(
input=feat_idx,
is_sparse=True,
is_distributed=is_distributed,
dtype='float32',
size=[sparse_feature_number + 1, 1],
padding_idx=0,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_),
regularizer=fluid.regularizer.L1DecayRegularizer(reg)))
first_weights = fluid.layers.reshape(
first_weights_re, shape=[-1, num_field, 1]) # None * num_field * 1
y_first_order = fluid.layers.reduce_sum((first_weights * feat_value), 1)
#------------------------- second order term --------------------------
feat_embeddings_re = fluid.embedding(
input=feat_idx,
is_sparse=True,
is_distributed=is_distributed,
dtype='float32',
size=[sparse_feature_number + 1, sparse_feature_dim],
padding_idx=0,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_ / math.sqrt(float(sparse_feature_dim)))))
feat_embeddings = fluid.layers.reshape(
feat_embeddings_re,
shape=[-1, num_field,
sparse_feature_dim]) # None * num_field * embedding_size
feat_embeddings = feat_embeddings * feat_value # None * num_field * embedding_size
# sum_square part
summed_features_emb = fluid.layers.reduce_sum(feat_embeddings,
1) # None * embedding_size
summed_features_emb_square = fluid.layers.square(
summed_features_emb) # None * embedding_size
# square_sum part
squared_features_emb = fluid.layers.square(
feat_embeddings) # None * num_field * embedding_size
squared_sum_features_emb = fluid.layers.reduce_sum(
squared_features_emb, 1) # None * embedding_size
y_second_order = 0.5 * fluid.layers.reduce_sum(
summed_features_emb_square - squared_sum_features_emb, 1,
keep_dim=True) # None * 1
#------------------------- DNN --------------------------
layer_sizes = envs.get_global_env("hyper_parameters.fc_sizes", None, self._namespace)
act = envs.get_global_env("hyper_parameters.act", None, self._namespace)
y_dnn = fluid.layers.reshape(feat_embeddings,
[-1, num_field * sparse_feature_dim])
for s in layer_sizes:
y_dnn = fluid.layers.fc(
input=y_dnn,
size=s,
act=act,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_ / math.sqrt(float(10)))),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_)))
y_dnn = fluid.layers.fc(
input=y_dnn,
size=1,
act=None,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_)))
#------------------------- DeepFM --------------------------
self.predict = fluid.layers.sigmoid(y_first_order + y_second_order + y_dnn)
def train_net(self):
self.deepfm_net()
#------------------------- Cost(logloss) --------------------------
cost = fluid.layers.log_loss(input=self.predict, label=self.label)
avg_cost = fluid.layers.reduce_sum(cost)
self._cost = avg_cost
#------------------------- Metric(Auc) --------------------------
predict_2d = fluid.layers.concat([1 - self.predict, self.predict], 1)
label_int = fluid.layers.cast(self.label, 'int64')
auc_var, batch_auc_var, _ = fluid.layers.auc(input=predict_2d,
label=label_int,
slide_steps=0)
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc_var
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate, lazy_mode=True)
return optimizer
def infer_net(self, parameter_list):
self.deepfm_net()
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 10
workspace: "fleetrec.models.rank.din"
reader:
batch_size: 2
class: "{workspace}/reader.py"
train_data_path: "{workspace}/data/train_data"
dataset_class: "DataLoader"
model:
models: "{workspace}/model.py"
hyper_parameters:
use_DataLoader: True
item_emb_size: 64
cat_emb_size: 64
is_sparse: False
config_path: "data/config.txt"
fc_sizes: [400, 400, 400]
learning_rate: 0.0001
reg: 0.001
act: "sigmoid"
optimizer: SGD
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
from __future__ import print_function
import random
import pickle
random.seed(1234)
print("read and process data")
with open('./raw_data/remap.pkl', 'rb') as f:
reviews_df = pickle.load(f)
cate_list = pickle.load(f)
user_count, item_count, cate_count, example_count = pickle.load(f)
train_set = []
test_set = []
for reviewerID, hist in reviews_df.groupby('reviewerID'):
pos_list = hist['asin'].tolist()
def gen_neg():
neg = pos_list[0]
while neg in pos_list:
neg = random.randint(0, item_count - 1)
return neg
neg_list = [gen_neg() for i in range(len(pos_list))]
for i in range(1, len(pos_list)):
hist = pos_list[:i]
if i != len(pos_list) - 1:
train_set.append((reviewerID, hist, pos_list[i], 1))
train_set.append((reviewerID, hist, neg_list[i], 0))
else:
label = (pos_list[i], neg_list[i])
test_set.append((reviewerID, hist, label))
random.shuffle(train_set)
random.shuffle(test_set)
assert len(test_set) == user_count
def print_to_file(data, fout):
for i in range(len(data)):
fout.write(str(data[i]))
if i != len(data) - 1:
fout.write(' ')
else:
fout.write(';')
print("make train data")
with open("paddle_train.txt", "w") as fout:
for line in train_set:
history = line[1]
target = line[2]
label = line[3]
cate = [cate_list[x] for x in history]
print_to_file(history, fout)
print_to_file(cate, fout)
fout.write(str(target) + ";")
fout.write(str(cate_list[target]) + ";")
fout.write(str(label) + "\n")
print("make test data")
with open("paddle_test.txt", "w") as fout:
for line in test_set:
history = line[1]
target = line[2]
cate = [cate_list[x] for x in history]
print_to_file(history, fout)
print_to_file(cate, fout)
fout.write(str(target[0]) + ";")
fout.write(str(cate_list[target[0]]) + ";")
fout.write("1\n")
print_to_file(history, fout)
print_to_file(cate, fout)
fout.write(str(target[1]) + ";")
fout.write(str(cate_list[target[1]]) + ";")
fout.write("0\n")
print("make config data")
with open('config.txt', 'w') as f:
f.write(str(user_count) + "\n")
f.write(str(item_count) + "\n")
f.write(str(cate_count) + "\n")
from __future__ import print_function
import pickle
import pandas as pd
def to_df(file_path):
with open(file_path, 'r') as fin:
df = {}
i = 0
for line in fin:
df[i] = eval(line)
i += 1
df = pd.DataFrame.from_dict(df, orient='index')
return df
print("start to analyse reviews_Electronics_5.json")
reviews_df = to_df('./raw_data/reviews_Electronics_5.json')
with open('./raw_data/reviews.pkl', 'wb') as f:
pickle.dump(reviews_df, f, pickle.HIGHEST_PROTOCOL)
print("start to analyse meta_Electronics.json")
meta_df = to_df('./raw_data/meta_Electronics.json')
meta_df = meta_df[meta_df['asin'].isin(reviews_df['asin'].unique())]
meta_df = meta_df.reset_index(drop=True)
with open('./raw_data/meta.pkl', 'wb') as f:
pickle.dump(meta_df, f, pickle.HIGHEST_PROTOCOL)
#! /bin/bash
set -e
echo "begin download data"
mkdir raw_data
cd raw_data
wget -c http://snap.stanford.edu/data/amazon/productGraph/categoryFiles/reviews_Electronics_5.json.gz
gzip -d reviews_Electronics_5.json.gz
wget -c http://snap.stanford.edu/data/amazon/productGraph/categoryFiles/meta_Electronics.json.gz
gzip -d meta_Electronics.json.gz
echo "download data successfully"
cd ..
python convert_pd.py
python remap_id.py
from __future__ import print_function
import random
import pickle
import numpy as np
random.seed(1234)
with open('./raw_data/reviews.pkl', 'rb') as f:
reviews_df = pickle.load(f)
reviews_df = reviews_df[['reviewerID', 'asin', 'unixReviewTime']]
with open('./raw_data/meta.pkl', 'rb') as f:
meta_df = pickle.load(f)
meta_df = meta_df[['asin', 'categories']]
meta_df['categories'] = meta_df['categories'].map(lambda x: x[-1][-1])
def build_map(df, col_name):
key = sorted(df[col_name].unique().tolist())
m = dict(zip(key, range(len(key))))
df[col_name] = df[col_name].map(lambda x: m[x])
return m, key
asin_map, asin_key = build_map(meta_df, 'asin')
cate_map, cate_key = build_map(meta_df, 'categories')
revi_map, revi_key = build_map(reviews_df, 'reviewerID')
user_count, item_count, cate_count, example_count =\
len(revi_map), len(asin_map), len(cate_map), reviews_df.shape[0]
print('user_count: %d\titem_count: %d\tcate_count: %d\texample_count: %d' %
(user_count, item_count, cate_count, example_count))
meta_df = meta_df.sort_values('asin')
meta_df = meta_df.reset_index(drop=True)
reviews_df['asin'] = reviews_df['asin'].map(lambda x: asin_map[x])
reviews_df = reviews_df.sort_values(['reviewerID', 'unixReviewTime'])
reviews_df = reviews_df.reset_index(drop=True)
reviews_df = reviews_df[['reviewerID', 'asin', 'unixReviewTime']]
cate_list = [meta_df['categories'][i] for i in range(len(asin_map))]
cate_list = np.array(cate_list, dtype=np.int32)
with open('./raw_data/remap.pkl', 'wb') as f:
pickle.dump(reviews_df, f, pickle.HIGHEST_PROTOCOL) # uid, iid
pickle.dump(cate_list, f, pickle.HIGHEST_PROTOCOL) # cid of iid line
pickle.dump((user_count, item_count, cate_count, example_count), f,
pickle.HIGHEST_PROTOCOL)
pickle.dump((asin_key, cate_key, revi_key), f, pickle.HIGHEST_PROTOCOL)
3737 19450;288 196;18486;674;1
3647 4342 6855 3805;281 463 558 674;4206;463;1
1805 4309;87 87;21354;556;1
18209 20753;649 241;51924;610;0
13150;351;41455;792;1
35120 40418;157 714;52035;724;0
13515 20363 25356 26891 24200 11694 33378 34483 35370 27311 40689 33319 28819;558 123 61 110 738 692 110 629 714 463 281 142 382;45554;558;1
19254 9021 28156 19193 24602 31171;189 462 140 474 157 614;48895;350;1
4716;194;32497;484;1
43799 47108;368 140;3503;25;0
20554 41800 1582 1951;339 776 694 703;4320;234;0
39713 44272 45136 11687;339 339 339 140;885;168;0
14398 33997;756 347;20438;703;1
29341 25727;142 616;4170;512;0
12197 10212;558 694;31559;24;0
11551;351;53485;436;1
4553;196;7331;158;1
15190 19994 33946 30716 31879 45178 51598 46814;249 498 612 142 746 746 558 174;24353;251;0
4931 2200 8338 23530;785 792 277 523;3525;251;0
8881 13274 12683 14696 27693 1395 44373 59704 27762 54268 30326 11811 45371 51598 55859 56039 57678 47250 2073 38932;479 558 190 708 335 684 339 725 446 446 44 575 280 558 262 197 368 111 749 188;12361;616;1
16297 16797 18629 20922 16727 33946 51165 36796;281 436 462 339 611 612 288 64;34724;288;1
22237;188;40786;637;0
5396 39993 42681 49832 11208 34954 36523 45523 51618;351 339 687 281 708 142 629 656 142;38201;571;0
8881 9029 17043 16620 15021 32706;479 110 110 749 598 251;34941;657;0
53255;444;37953;724;1
1010 4172 8613 11562 11709 13118 2027 15446;674 606 708 436 179 179 692 436;36998;703;0
22357 24305 15222 19254 22914;189 504 113 189 714;18201;398;1
1905;694;23877;347;1
8444 17868;765 712;50732;44;0
42301 26186 38086;142 450 744;61547;714;0
18156 35717 32070 45650 47208 20975 36409 44856 48072 15860 47043 53289 53314 33470 47926;157 281 650 142 749 291 707 714 157 205 388 474 708 498 495;48170;746;1
56219;108;1988;389;0
22907;83;752;175;0
22009 32410 42987 48720 683 1289 2731 4736 6306 8442 8946 9928 11536 14947 15793 16694 21736 25156 25797 25874 26573 30318 33946 35420 1492 5236 5555 6625 8867 9638 11443 20225 25965 27273 29001 35302 42336 43347 36907 2012;317 462 291 142 694 10 574 278 708 281 131 142 367 281 258 345 616 708 111 115 339 113 612 24 368 616 39 197 44 214 558 108 616 558 210 210 142 142 262 351;25540;701;0
20434;196;18056;189;0
628 5461;194 234;43677;351;0
16953 15149 45143 23587 5094 25105 51913 54645;484 281 449 792 524 395 388 731;57655;75;1
13584 7509;234 744;33062;749;1
170 208 77 109 738 742 1118 15349 255 12067 21643 55453;330 559 744 115 558 674 111 351 694 694 746 111;9821;694;1
4970 16672;540 746;25685;666;1
17240 60546;708 629;42110;142;1
31503 31226 50628 22444;142 156 142 203;47812;749;0
2443 1763 3403 4225 8951;25 707 351 177 351;7954;351;1
3748;351;9171;657;1
1755 26204 42716 32991;446 188 497 746;23910;395;1
20637 27122;558 44;19669;301;0
406 872 306 218 883 1372 1705 1709 7774 2376 2879 2881 13329 4992 13594 11106 7131 8631 1736 17585 2568 16896 21971 10296 22361 24108 23300 11793 25351 2648 24593 12692 23883 25345 27129 26321 21627 20738 17784 28785 29281 28366 24723 24319 12083 29882 29974 30443 30428 17072 9783 16700 29421 32253 28830 31299 28792 33931 24973 33112 21717 28339 23978 18649 1841 17635 19696 37448 20862 30492 35736 37450 2633 8675 17412 25960 28389 31032 37157 14555 4996 33388 33393 36237 38946 22793 24337 34963 38819 41165 39551 43019 15570 25129 34593 38385 42915 41407 29907 31289 44229 24267 34975 39462 33274 43251 38302 35502 44056 44675 45233 47690 33472 50149 29409 47183 49188 48192 50628 24103 28313 28358 38882 44330 44346 2019 2484 2675 26396 48143 46039 47722 48559 41719 41720 43920 41983 51235 34964 27287 51915 33586 43630 47258 52137 40954 35120 29572 42405 53559 44900 45761;241 558 395 368 498 110 463 611 558 106 10 112 251 241 48 112 601 674 241 347 733 502 194 119 179 179 578 692 281 115 523 113 281 35 765 196 339 115 90 164 790 708 142 115 342 351 391 281 48 119 74 505 606 68 239 687 687 281 110 281 449 351 38 351 164 176 449 115 70 25 687 115 39 756 35 175 704 119 38 53 115 38 38 142 262 188 614 277 388 615 49 738 106 733 486 666 571 385 708 119 331 463 578 288 142 106 611 611 39 523 388 142 726 702 498 61 142 714 142 654 277 733 603 498 299 97 726 115 637 703 558 74 629 142 142 347 629 746 277 8 49 389 629 408 733 345 157 704 115 398 611 239;49174;368;0
29206 60955;351 684;61590;76;1
8427 9692 4411 3266 18234 22774;746 281 396 651 446 44;23393;351;0
13051 15844 9347 21973 18365 24220 28429 4799 27488 21623 13870 29346 27208 31075 31635 28390 30777 29334 33438 16469 29423 29237 25527 34808 37656 21324 38263 6699 33167 9295 40828 18894;339 342 657 194 20 466 179 225 436 364 707 115 36 523 351 674 694 391 674 500 342 216 707 345 616 495 436 363 395 189 203 766;56816;396;0
5653 18042 21137 17277 23847 25109 21837 17163 22786 27380 20789 27737 30164 36402 37166 38647 31746 38915 38366 11151 43757 38284 29817 41717 41899 43279 47539 37850 39789 43817 11208 53361 29247 51483 39940 50917 53618 44055 48997;593 251 616 110 110 110 110 105 436 558 311 142 603 738 398 766 1 351 142 584 674 597 142 483 351 157 373 142 629 39 708 251 339 142 262 1 113 142 462;13418;558;0
8719 11172;311 217;11707;179;1
14968 8297 22914 5998 20253 41425 42664 46745 51179 33481 46814 55135 53124 61559;463 766 714 486 628 444 281 714 142 242 174 118 714 714;61908;714;1
61119;714;22907;83;0
26172;157;54529;44;0
13830 10377 8193 16072 13543 18741 24205 18281 37272 27784 16658 27884;384 739 558 739 135 347 558 687 498 142 197 746;34463;177;1
20842 11756 22110 30562 30697;189 68 483 776 225;49113;483;0
13646 46782 54138;142 798 142;43698;347;0
36434;241;51537;629;0
44121 35325;397 653;43399;397;1
6438 11107 20073 25026 24434 35533 6318 25028 28352 32359 25734 26280 41466 25192 1909 11753 17770 24301 1728 9693 36444 40256 17961 36780 41093 8788 439 46397 46269 50462 40395 437 2582 4455 12361 14325 22294 26153 26607 29205 29878 33491 38795 41585 45480 51567 54245 19796 52446;356 194 389 89 474 330 347 384 330 90 19 385 177 68 624 68 674 463 624 194 177 389 197 642 239 111 115 113 48 251 554 115 36 163 616 524 84 190 465 398 89 166 113 330 616 449 90 140 330;15142;764;0
1573;540;18294;463;1
9837 13438 13690;351 629 24;26044;351;0
1708 2675 4935 7401 14413 22177 30319 32217 34342 40235 42963 43949 54816;463 115 474 616 474 44 113 279 164 142 616 649 36;31992;115;0
8025 11769 36188 42006;142 262 714 142;8209;142;0
30266;176;44167;692;0
13000 14769 2940 27638 23158;765 27 736 554 112;55050;725;0
32557 18668 43441;765 707 396;44217;681;1
5665 5964 18874;542 746 196;16747;179;0
7014 29912 42468;194 612 558;20800;355;0
8320 9743 1735 442 5216 11568;234 251 241 603 476 649;32738;153;0
533 1447;744 744;17843;744;1
48390 48191;714 714;48864;708;1
9312 16166 12754 21433 28142 7486;215 674 241 115 558 241;38629;48;1
10401 11665 10739;142 364 766;5989;463;0
10408 14363 8807 14947 24701 44676 40914 12241 14906 29247 32347 5834 18291 18313 23375 24075 7020 14307 15891;140 140 749 281 444 388 504 385 196 339 746 351 463 746 197 90 746 576 476;37949;330;1
50194;444;15572;216;0
24021;281;25850;140;1
22185 28726 55777;142 766 351;17;541;1
31776 34767 28854 34769 38022 38667 32917 9094 40879 41634 42252 19865 47983 38818 40131 40690 18915 48539 49619 18554 24836;70 239 113 48 486 541 352 197 347 385 34 476 704 388 385 281 225 474 157 706 53;25602;707;1
10544 15159 23606 33556 46886 55061 2079 27022 40345 43556 3807 28732;642 87 641 113 558 157 564 44 194 26 54 113;51293;272;0
19005 41469 42368 5739 30169 32266 54743 56959 26271;145 482 707 790 101 347 197 368 674;5602;158;0
7166 16886 21083 7328 25545;560 213 87 744 87;32494;321;1
2306;260;30286;179;0
57709 55115;351 483;25035;142;0
16641 35845;153 311;36985;68;1
31144 4107;189 168;50619;142;0
36331 9873 10659 14382 21430 28164;680 197 185 11 115 476;37887;484;1
19519 3748 33772 22436 38789 46337;649 351 210 115 113 115;23980;649;1
30789 37586 42354 26171 15017 28654 44960;142 714 142 483 484 474 157;41552;746;1
52662;576;53627;776;0
12258 15133 15681 5066 6420 13421 6577 29202 38939;216 558 111 570 447 5 111 281 347;7818;558;0
610 1258 2332 7508 10814 10797 11710;543 611 611 653 110 201 179;11495;558;1
12584 2707 1664 25878 25949;790 694 694 142 611;25286;792;1
32423 24223;135 90;2323;399;0
11959;197;15349;351;1
44448 58138 41930 57603 59009 61316 61559 599;339 629 115 388 1 142 714 297;54434;142;0
43441 12617 47970 52144;396 196 142 629;29211;351;1
25327 40258;656 398;40261;142;1
4637;474;59864;687;0
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def config_read(self, config_path):
with open(config_path, "r") as fin:
user_count = int(fin.readline().strip())
item_count = int(fin.readline().strip())
cat_count = int(fin.readline().strip())
return user_count, item_count, cat_count
def din_attention(self, hist, target_expand, mask):
"""activation weight"""
hidden_size = hist.shape[-1]
concat = fluid.layers.concat(
[hist, target_expand, hist - target_expand, hist * target_expand],
axis=2)
atten_fc1 = fluid.layers.fc(name="atten_fc1",
input=concat,
size=80,
act=self.act,
num_flatten_dims=2)
atten_fc2 = fluid.layers.fc(name="atten_fc2",
input=atten_fc1,
size=40,
act=self.act,
num_flatten_dims=2)
atten_fc3 = fluid.layers.fc(name="atten_fc3",
input=atten_fc2,
size=1,
num_flatten_dims=2)
atten_fc3 += mask
atten_fc3 = fluid.layers.transpose(x=atten_fc3, perm=[0, 2, 1])
atten_fc3 = fluid.layers.scale(x=atten_fc3, scale=hidden_size**-0.5)
weight = fluid.layers.softmax(atten_fc3)
out = fluid.layers.matmul(weight, hist)
out = fluid.layers.reshape(x=out, shape=[0, hidden_size])
return out
def train_net(self):
seq_len = -1
self.item_emb_size = envs.get_global_env("hyper_parameters.item_emb_size", 64, self._namespace)
self.cat_emb_size = envs.get_global_env("hyper_parameters.cat_emb_size", 64, self._namespace)
self.act = envs.get_global_env("hyper_parameters.act", "sigmoid", self._namespace)
#item_emb_size = 64
#cat_emb_size = 64
self.is_sparse = envs.get_global_env("hyper_parameters.is_sparse", False, self._namespace)
#significant for speeding up the training process
self.config_path = envs.get_global_env("hyper_parameters.config_path", "data/config.txt", self._namespace)
self.use_DataLoader = envs.get_global_env("hyper_parameters.use_DataLoader", False, self._namespace)
user_count, item_count, cat_count = self.config_read(self.config_path)
item_emb_attr = fluid.ParamAttr(name="item_emb")
cat_emb_attr = fluid.ParamAttr(name="cat_emb")
hist_item_seq = fluid.data(
name="hist_item_seq", shape=[None, seq_len], dtype="int64")
self._data_var.append(hist_item_seq)
hist_cat_seq = fluid.data(
name="hist_cat_seq", shape=[None, seq_len], dtype="int64")
self._data_var.append(hist_cat_seq)
target_item = fluid.data(name="target_item", shape=[None], dtype="int64")
self._data_var.append(target_item)
target_cat = fluid.data(name="target_cat", shape=[None], dtype="int64")
self._data_var.append(target_cat)
label = fluid.data(name="label", shape=[None, 1], dtype="float32")
self._data_var.append(label)
mask = fluid.data(name="mask", shape=[None, seq_len, 1], dtype="float32")
self._data_var.append(mask)
target_item_seq = fluid.data(
name="target_item_seq", shape=[None, seq_len], dtype="int64")
self._data_var.append(target_item_seq)
target_cat_seq = fluid.data(
name="target_cat_seq", shape=[None, seq_len], dtype="int64")
self._data_var.append(target_cat_seq)
if self.use_DataLoader:
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=10000, use_double_buffer=False, iterable=False)
hist_item_emb = fluid.embedding(
input=hist_item_seq,
size=[item_count, self.item_emb_size],
param_attr=item_emb_attr,
is_sparse=self.is_sparse)
hist_cat_emb = fluid.embedding(
input=hist_cat_seq,
size=[cat_count, self.cat_emb_size],
param_attr=cat_emb_attr,
is_sparse=self.is_sparse)
target_item_emb = fluid.embedding(
input=target_item,
size=[item_count, self.item_emb_size],
param_attr=item_emb_attr,
is_sparse=self.is_sparse)
target_cat_emb = fluid.embedding(
input=target_cat,
size=[cat_count, self.cat_emb_size],
param_attr=cat_emb_attr,
is_sparse=self.is_sparse)
target_item_seq_emb = fluid.embedding(
input=target_item_seq,
size=[item_count, self.item_emb_size],
param_attr=item_emb_attr,
is_sparse=self.is_sparse)
target_cat_seq_emb = fluid.embedding(
input=target_cat_seq,
size=[cat_count, self.cat_emb_size],
param_attr=cat_emb_attr,
is_sparse=self.is_sparse)
item_b = fluid.embedding(
input=target_item,
size=[item_count, 1],
param_attr=fluid.initializer.Constant(value=0.0))
hist_seq_concat = fluid.layers.concat([hist_item_emb, hist_cat_emb], axis=2)
target_seq_concat = fluid.layers.concat(
[target_item_seq_emb, target_cat_seq_emb], axis=2)
target_concat = fluid.layers.concat(
[target_item_emb, target_cat_emb], axis=1)
out = self.din_attention(hist_seq_concat, target_seq_concat, mask)
out_fc = fluid.layers.fc(name="out_fc",
input=out,
size=self.item_emb_size + self.cat_emb_size,
num_flatten_dims=1)
embedding_concat = fluid.layers.concat([out_fc, target_concat], axis=1)
fc1 = fluid.layers.fc(name="fc1",
input=embedding_concat,
size=80,
act=self.act)
fc2 = fluid.layers.fc(name="fc2", input=fc1, size=40, act=self.act)
fc3 = fluid.layers.fc(name="fc3", input=fc2, size=1)
logit = fc3 + item_b
loss = fluid.layers.sigmoid_cross_entropy_with_logits(x=logit, label=label)
avg_loss = fluid.layers.mean(loss)
self._cost = avg_loss
self.predict = fluid.layers.sigmoid(logit)
predict_2d = fluid.layers.concat([1 - self.predict, self.predict], 1)
label_int = fluid.layers.cast(label, 'int64')
auc_var, batch_auc_var, _ = fluid.layers.auc(input=predict_2d,
label=label_int,
slide_steps=0)
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc_var
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate, lazy_mode=True)
return optimizer
def infer_net(self, parameter_list):
self.deepfm_net()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from fleetrec.core.reader import Reader
from fleetrec.core.utils import envs
import numpy as np
import os
import random
try:
import cPickle as pickle
except ImportError:
import pickle
class TrainReader(Reader):
def init(self):
self.train_data_path = envs.get_global_env("train_data_path", None, "train.reader")
self.res = []
self.max_len = 0
data_file_list = os.listdir(self.train_data_path)
for i in range(0, len(data_file_list)):
train_data_file = os.path.join(self.train_data_path, data_file_list[i])
with open(train_data_file, "r") as fin:
for line in fin:
line = line.strip().split(';')
hist = line[0].split()
self.max_len = max(self.max_len, len(hist))
fo = open("tmp.txt", "w")
fo.write(str(self.max_len))
fo.close()
self.batch_size = envs.get_global_env("batch_size", 32, "train.reader")
self.group_size = self.batch_size * 20
def _process_line(self, line):
line = line.strip().split(';')
hist = line[0].split()
hist = [int(i) for i in hist]
cate = line[1].split()
cate = [int(i) for i in cate]
return [hist, cate, [int(line[2])], [int(line[3])], [float(line[4])]]
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def data_iter():
#feat_idx, feat_value, label = self._process_line(line)
yield self._process_line(line)
return data_iter
def pad_batch_data(self, input, max_len):
res = np.array([x + [0] * (max_len - len(x)) for x in input])
res = res.astype("int64").reshape([-1, max_len])
return res
def make_data(self, b):
max_len = max(len(x[0]) for x in b)
item = self.pad_batch_data([x[0] for x in b], max_len)
cat = self.pad_batch_data([x[1] for x in b], max_len)
len_array = [len(x[0]) for x in b]
mask = np.array(
[[0] * x + [-1e9] * (max_len - x) for x in len_array]).reshape(
[-1, max_len, 1])
target_item_seq = np.array(
[[x[2]] * max_len for x in b]).astype("int64").reshape([-1, max_len])
target_cat_seq = np.array(
[[x[3]] * max_len for x in b]).astype("int64").reshape([-1, max_len])
res = []
for i in range(len(b)):
res.append([
item[i], cat[i], b[i][2], b[i][3], b[i][4], mask[i],
target_item_seq[i], target_cat_seq[i]
])
return res
def batch_reader(self, reader, batch_size, group_size):
def batch_reader():
bg = []
for line in reader:
bg.append(line)
if len(bg) == group_size:
sortb = sorted(bg, key=lambda x: len(x[0]), reverse=False)
bg = []
for i in range(0, group_size, batch_size):
b = sortb[i:i + batch_size]
yield self.make_data(b)
len_bg = len(bg)
if len_bg != 0:
sortb = sorted(bg, key=lambda x: len(x[0]), reverse=False)
bg = []
remain = len_bg % batch_size
for i in range(0, len_bg - remain, batch_size):
b = sortb[i:i + batch_size]
yield self.make_data(b)
return batch_reader
def base_read(self, file_dir):
res = []
for train_file in file_dir:
with open(train_file, "r") as fin:
for line in fin:
line = line.strip().split(';')
hist = line[0].split()
cate = line[1].split()
res.append([hist, cate, line[2], line[3], float(line[4])])
return res
def generate_batch_from_trainfiles(self, files):
data_set = self.base_read(files)
random.shuffle(data_set)
return self.batch_reader(data_set, self.batch_size, self.batch_size * 20)
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 10
workspace: "fleetrec.models.rank.wide_deep"
reader:
batch_size: 2
class: "{workspace}/reader.py"
train_data_path: "{workspace}/data/train_data"
model:
models: "{workspace}/model.py"
hyper_parameters:
hidden1_units: 75
hidden2_units: 50
hidden3_units: 25
learning_rate: 0.0001
reg: 0.001
act: "relu"
optimizer: SGD
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
mkdir train_data
mkdir test_data
mkdir data
train_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/data/adult.data"
test_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/data/adult.test"
train_data_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/train_data/train_data.csv"
test_data_path="/home/yaoxuefeng/repos/models/models/PaddleRec/ctr/wide_deep/test_data/test_data.csv"
#pip install -r requirements.txt
#wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data
#wget -P data/ https://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.test
python data_preparation.py --train_path ${train_path} \
--test_path ${test_path} \
--train_data_path ${train_data_path}\
--test_data_path ${test_data_path}
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def wide_part(self, data):
out = fluid.layers.fc(input=data,
size=1,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.TruncatedNormal(loc=0.0, scale=1.0 / math.sqrt(data.shape[1])),
regularizer=fluid.regularizer.L2DecayRegularizer(regularization_coeff=1e-4)),
act=None,
name='wide')
return out
def fc(self, data, hidden_units, active, tag):
output = fluid.layers.fc(input=data,
size=hidden_units,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.TruncatedNormal(loc=0.0, scale=1.0 / math.sqrt(data.shape[1]))),
act=active,
name=tag)
return output
def deep_part(self, data, hidden1_units, hidden2_units, hidden3_units):
l1 = self.fc(data, hidden1_units, 'relu', 'l1')
l2 = self.fc(l1, hidden2_units, 'relu', 'l2')
l3 = self.fc(l2, hidden3_units, 'relu', 'l3')
return l3
def train_net(self):
wide_input = fluid.data(name='wide_input', shape=[None, 8], dtype='float32')
deep_input = fluid.data(name='deep_input', shape=[None, 58], dtype='float32')
label = fluid.data(name='label', shape=[None, 1], dtype='float32')
self._data_var.append(wide_input)
self._data_var.append(deep_input)
self._data_var.append(label)
hidden1_units = envs.get_global_env("hyper_parameters.hidden1_units", 75, self._namespace)
hidden2_units = envs.get_global_env("hyper_parameters.hidden2_units", 50, self._namespace)
hidden3_units = envs.get_global_env("hyper_parameters.hidden3_units", 25, self._namespace)
wide_output = self.wide_part(wide_input)
deep_output = self.deep_part(deep_input, hidden1_units, hidden2_units, hidden3_units)
wide_model = fluid.layers.fc(input=wide_output,
size=1,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.TruncatedNormal(loc=0.0, scale=1.0)),
act=None,
name='w_wide')
deep_model = fluid.layers.fc(input=deep_output,
size=1,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.TruncatedNormal(loc=0.0, scale=1.0)),
act=None,
name='w_deep')
prediction = fluid.layers.elementwise_add(wide_model, deep_model)
pred = fluid.layers.sigmoid(fluid.layers.clip(prediction, min=-15.0, max=15.0), name="prediction")
num_seqs = fluid.layers.create_tensor(dtype='int64')
acc = fluid.layers.accuracy(input=pred, label=fluid.layers.cast(x=label, dtype='int64'), total=num_seqs)
auc_var, batch_auc, auc_states = fluid.layers.auc(input=pred, label=fluid.layers.cast(x=label, dtype='int64'))
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc
self._metrics["ACC"] = acc
cost = fluid.layers.sigmoid_cross_entropy_with_logits(x=prediction, label=label)
avg_cost = fluid.layers.mean(cost)
self._cost = avg_cost
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate, lazy_mode=True)
return optimizer
def infer_net(self, parameter_list):
self.deepfm_net()
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from fleetrec.core.reader import Reader
from fleetrec.core.utils import envs
try:
import cPickle as pickle
except ImportError:
import pickle
class TrainReader(Reader):
def init(self):
pass
def _process_line(self, line):
line = line.strip().split(',')
features = list(map(float, line))
wide_feat = features[0:8]
deep_feat = features[8:58+8]
label = features[-1]
return wide_feat, deep_feat, [label]
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def data_iter():
wide_feat, deep_deat, label = self._process_line(line)
yield [('wide_input', wide_feat), ('deep_input', deep_deat), ('label', label)]
return data_iter
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 10
workspace: "fleetrec.models.rank.xdeepfm"
reader:
batch_size: 2
class: "{workspace}/criteo_reader.py"
train_data_path: "{workspace}/data/train_data"
model:
models: "{workspace}/model.py"
hyper_parameters:
layer_sizes_dnn: [10, 10, 10]
layer_sizes_cin: [10, 10]
sparse_feature_number: 1086460
sparse_feature_dim: 9
num_field: 39
fc_sizes: [400, 400, 400]
learning_rate: 0.0001
reg: 0.0001
act: "relu"
optimizer: SGD
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from fleetrec.core.reader import Reader
from fleetrec.core.utils import envs
try:
import cPickle as pickle
except ImportError:
import pickle
class TrainReader(Reader):
def init(self):
pass
def _process_line(self, line):
features = line.strip('\n').split('\t')
feat_idx = []
feat_value = []
for idx in range(1, 40):
feat_idx.append(int(features[idx]))
feat_value.append(1.0)
label = [int(features[0])]
return feat_idx, feat_value, label
def generate_sample(self, line):
def data_iter():
feat_idx, feat_value, label = self._process_line(line)
yield [('feat_idx', feat_idx), ('feat_value', feat_value), ('label',
label)]
return data_iter
\ No newline at end of file
import os
import shutil
import sys
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
TOOLS_PATH = os.path.join(LOCAL_PATH, "..", "..", "tools")
sys.path.append(TOOLS_PATH)
from fleetrec.tools.tools import download_file_and_uncompress, download_file
if __name__ == '__main__':
url_train = "https://paddlerec.bj.bcebos.com/xdeepfm%2Ftr"
url_test = "https://paddlerec.bj.bcebos.com/xdeepfm%2Fev"
train_dir = "train_data"
test_dir = "test_data"
if not os.path.exists(train_dir):
os.mkdir(train_dir)
if not os.path.exists(test_dir):
os.mkdir(test_dir)
print("download and extract starting...")
download_file(url_train, "./train_data/tr", True)
download_file(url_test, "./test_data/ev", True)
print("download and extract finished")
print("done")
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
def xdeepfm_net(self):
init_value_ = 0.1
initer = fluid.initializer.TruncatedNormalInitializer(
loc=0.0, scale=init_value_)
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
sparse_feature_number = envs.get_global_env("hyper_parameters.sparse_feature_number", None, self._namespace)
sparse_feature_dim = envs.get_global_env("hyper_parameters.sparse_feature_dim", None, self._namespace)
# ------------------------- network input --------------------------
num_field = envs.get_global_env("hyper_parameters.num_field", None, self._namespace)
raw_feat_idx = fluid.data(name='feat_idx', shape=[None, num_field], dtype='int64')
raw_feat_value = fluid.data(name='feat_value', shape=[None, num_field], dtype='float32')
self.label = fluid.data(name='label', shape=[None, 1], dtype='float32') # None * 1
feat_idx = fluid.layers.reshape(raw_feat_idx, [-1, 1]) # (None * num_field) * 1
feat_value = fluid.layers.reshape(raw_feat_value, [-1, num_field, 1]) # None * num_field * 1
feat_embeddings = fluid.embedding(
input=feat_idx,
is_sparse=True,
dtype='float32',
size=[sparse_feature_number + 1, sparse_feature_dim],
padding_idx=0,
param_attr=fluid.ParamAttr(initializer=initer))
feat_embeddings = fluid.layers.reshape(
feat_embeddings,
[-1, num_field, sparse_feature_dim]) # None * num_field * embedding_size
feat_embeddings = feat_embeddings * feat_value # None * num_field * embedding_size
# ------------------------- set _data_var --------------------------
self._data_var.append(raw_feat_idx)
self._data_var.append(raw_feat_value)
self._data_var.append(self.label)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
# -------------------- linear --------------------
weights_linear = fluid.embedding(
input=feat_idx,
is_sparse=True,
dtype='float32',
size=[sparse_feature_number + 1, 1],
padding_idx=0,
param_attr=fluid.ParamAttr(initializer=initer))
weights_linear = fluid.layers.reshape(
weights_linear, [-1, num_field, 1]) # None * num_field * 1
b_linear = fluid.layers.create_parameter(
shape=[1],
dtype='float32',
default_initializer=fluid.initializer.ConstantInitializer(value=0))
y_linear = fluid.layers.reduce_sum(
(weights_linear * feat_value), 1) + b_linear
# -------------------- CIN --------------------
layer_sizes_cin = envs.get_global_env("hyper_parameters.layer_sizes_cin", None, self._namespace)
Xs = [feat_embeddings]
last_s = num_field
for s in layer_sizes_cin:
# calculate Z^(k+1) with X^k and X^0
X_0 = fluid.layers.reshape(
fluid.layers.transpose(Xs[0], [0, 2, 1]),
[-1, sparse_feature_dim, num_field,
1]) # None, embedding_size, num_field, 1
X_k = fluid.layers.reshape(
fluid.layers.transpose(Xs[-1], [0, 2, 1]),
[-1, sparse_feature_dim, 1, last_s]) # None, embedding_size, 1, last_s
Z_k_1 = fluid.layers.matmul(
X_0, X_k) # None, embedding_size, num_field, last_s
# compresses Z^(k+1) to X^(k+1)
Z_k_1 = fluid.layers.reshape(Z_k_1, [
-1, sparse_feature_dim, last_s * num_field
]) # None, embedding_size, last_s*num_field
Z_k_1 = fluid.layers.transpose(
Z_k_1, [0, 2, 1]) # None, s*num_field, embedding_size
Z_k_1 = fluid.layers.reshape(
Z_k_1, [-1, last_s * num_field, 1, sparse_feature_dim]
) # None, last_s*num_field, 1, embedding_size (None, channal_in, h, w)
X_k_1 = fluid.layers.conv2d(
Z_k_1,
num_filters=s,
filter_size=(1, 1),
act=None,
bias_attr=False,
param_attr=fluid.ParamAttr(
initializer=initer)) # None, s, 1, embedding_size
X_k_1 = fluid.layers.reshape(
X_k_1, [-1, s, sparse_feature_dim]) # None, s, embedding_size
Xs.append(X_k_1)
last_s = s
# sum pooling
y_cin = fluid.layers.concat(Xs[1:],
1) # None, (num_field++), embedding_size
y_cin = fluid.layers.reduce_sum(y_cin, -1) # None, (num_field++)
y_cin = fluid.layers.fc(input=y_cin,
size=1,
act=None,
param_attr=fluid.ParamAttr(initializer=initer),
bias_attr=None)
y_cin = fluid.layers.reduce_sum(y_cin, dim=-1, keep_dim=True)
# -------------------- DNN --------------------
layer_sizes_dnn = envs.get_global_env("hyper_parameters.layer_sizes_dnn", None, self._namespace)
act = envs.get_global_env("hyper_parameters.act", None, self._namespace)
y_dnn = fluid.layers.reshape(feat_embeddings,
[-1, num_field * sparse_feature_dim])
for s in layer_sizes_dnn:
y_dnn = fluid.layers.fc(input=y_dnn,
size=s,
act=act,
param_attr=fluid.ParamAttr(initializer=initer),
bias_attr=None)
y_dnn = fluid.layers.fc(input=y_dnn,
size=1,
act=None,
param_attr=fluid.ParamAttr(initializer=initer),
bias_attr=None)
# ------------------- xDeepFM ------------------
self.predict = fluid.layers.sigmoid(y_linear + y_cin + y_dnn)
def train_net(self):
self.xdeepfm_net()
cost = fluid.layers.log_loss(input=self.predict, label=self.label, epsilon=0.0000001)
batch_cost = fluid.layers.reduce_mean(cost)
self._cost = batch_cost
# for auc
predict_2d = fluid.layers.concat([1 - self.predict, self.predict], 1)
label_int = fluid.layers.cast(self.label, 'int64')
auc_var, batch_auc_var, _ = fluid.layers.auc(input=predict_2d,
label=label_int,
slide_steps=0)
self._metrics["AUC"] = auc_var
self._metrics["BATCH_AUC"] = batch_auc_var
def optimizer(self):
learning_rate = envs.get_global_env("hyper_parameters.learning_rate", None, self._namespace)
optimizer = fluid.optimizer.Adam(learning_rate, lazy_mode=True)
return optimizer
def infer_net(self, parameter_list):
self.xdeepfm_net()
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
train:
trainer:
# for cluster training
strategy: "async"
epochs: 4
workspace: "fleetrec.models.recall.tdm"
reader:
batch_size: 32
class: "{workspace}/tdm_reader.py"
train_data_path: "{workspace}/data/train"
test_data_path: "{workspace}/data/test"
model:
models: "{workspace}/model.py"
hyper_parameters:
node_emb_size: 64
input_emb_size: 768
neg_sampling_list: [1, 2, 3, 4]
output_positive: True
topK: 1
learning_rate: 0.0001
act: "tanh"
optimizer: ADAM
tree_parameters:
max_layers: 4
node_nums: 26
leaf_node_nums: 13
layer_node_num_list: [2, 4, 7, 12]
child_nums: 2
startup:
tree:
# 单机训练建议tree只load一次,保存为paddle tensor,之后从paddle模型热启
# 分布式训练trainer需要独立load
load_tree: True
tree_layer_path: "{workspace}/tree/layer_list.txt"
tree_travel_path: "{workspace}/tree/travel_list.npy"
tree_info_path: "{workspace}/tree/tree_info.npy"
tree_emb_path: "{workspace}/tree/tree_emb.npy"
single:
load_persistables: False
persistables_model_path: ""
save_init_model: True
init_model_path: "{workspace}/init_model"
cluster:
init_model_path: "{workspace}/init_model"
save:
increment:
dirname: "increment"
epoch_interval: 2
save_last: True
inference:
dirname: "inference"
epoch_interval: 4
save_last: True
-0.9480544328689575 0.8702829480171204 -0.5691063404083252 0.3169376850128174 0.33546653389930725 0.6142528057098389 0.9836248755455017 0.7275366187095642 0.34716030955314636 -0.4742715358734131 0.7336636781692505 0.04654106870293617 -0.20831291377544403 -0.4481041431427002 -0.525967001914978 -0.9841973781585693 0.9882086515426636 -0.027082571759819984 0.4890778362751007 -0.6382721662521362 0.6899682283401489 -0.38224223256111145 0.9986906051635742 -0.00020712893456220627 -0.9492083787918091 0.7107149958610535 0.9882597923278809 0.26990818977355957 -0.9535472393035889 -0.1559726893901825 0.5885211825370789 0.17909500002861023 -0.8814249038696289 0.22860293090343475 -0.4293985366821289 -0.9652239084243774 -0.9447112679481506 -0.15577830374240875 -0.6805497407913208 0.6221181154251099 0.6404242515563965 0.1448463648557663 -0.5257529020309448 0.12640823423862457 -0.9252819418907166 -0.3268739581108093 0.8049232363700867 0.9740312099456787 0.33457809686660767 -0.8270060420036316 -0.06581008434295654 -0.2529199719429016 0.9999998807907104 -0.999997079372406 0.85847008228302 0.2864536643028259 -0.3738149404525757 -0.9884565472602844 -0.7930119037628174 -0.7500308156013489 0.6861344575881958 -0.999988853931427 0.9873992204666138 -0.08930592238903046 0.21770808100700378 -0.27190539240837097 -0.679567277431488 0.35942506790161133 0.3420848250389099 0.9409869909286499 0.8751437664031982 0.1828843057155609 0.9682096838951111 -0.15853644907474518 -0.1601446568965912 0.018752284348011017 0.4887729287147522 0.9999999403953552 -0.042612675577402115 0.7375731468200684 -0.5218359231948853 -0.24672318994998932 -0.4977828562259674 -0.19678929448127747 0.9999991655349731 0.6342479586601257 -0.0021091371309012175 -0.7912644147872925 -0.8245441913604736 -0.8503287434577942 0.24679483473300934 -0.13345639407634735 -0.4827110767364502 -0.5528470873832703 0.8379515409469604 0.3365992307662964 -0.23395371437072754 0.30137789249420166 0.6554356217384338 0.7143795490264893 -0.9463644027709961 0.9353510737419128 0.9723365306854248 0.9699011445045471 -0.5872727036476135 -0.1483442187309265 0.31902876496315 -0.7438213229179382 -0.267783522605896 0.6152546405792236 0.1536981612443924 -0.34291231632232666 -0.7694793939590454 -0.034810300916433334 -0.9282379150390625 -0.07565831393003464 -0.6177279949188232 0.9257068037986755 -0.5235797762870789 0.5636262893676758 -0.3359975516796112 0.43728208541870117 -0.8021860718727112 0.19429314136505127 -0.4344169795513153 0.8767723441123962 0.9269178509712219 0.051557812839746475 0.24033737182617188 0.30773890018463135 0.20496289432048798 0.25821128487586975 0.19531913101673126 0.9219141602516174 0.731987714767456 -0.147042915225029 0.16378480195999146 0.632792592048645 0.8717026710510254 -0.48667508363723755 -0.9421618580818176 0.9999619126319885 -0.805202066898346 -0.17740759253501892 -0.8062586784362793 0.8192204236984253 0.16358403861522675 -0.4860736131668091 -0.15365929901599884 -0.24501872062683105 -0.4635009169578552 0.8367763757705688 0.8025020360946655 0.9206703901290894 -0.6961565017700195 0.9824270009994507 0.6540120244026184 -0.9255840182304382 -0.9892961382865906 0.5214773416519165 -0.9747877717018127 0.5705400705337524 -0.6537265777587891 -0.778024435043335 -0.5822799205780029 -0.3444381654262543 -0.9259541630744934 -0.7785751819610596 0.9946136474609375 -0.13341118395328522 0.9719166159629822 -0.6909202337265015 0.4316543638706207 -0.998206377029419 -0.6213840246200562 -0.8628668189048767 -0.9943762421607971 -0.34595680236816406 -0.5426640510559082 -0.9999998211860657 0.9983428120613098 -0.3715406060218811 0.6625471711158752 0.991619348526001 -0.49132609367370605 -0.1220390647649765 -0.37494972348213196 0.1248563677072525 -0.7821731567382812 -0.9299596548080444 0.2194000482559204 -0.8428259491920471 0.970793604850769 0.6422125101089478 0.7525355219841003 -0.7346519827842712 0.9936985969543457 0.9903855919837952 -0.6798727512359619 -0.958869457244873 -0.17401373386383057 0.2149842083454132 -0.33881470561027527 0.44143083691596985 -0.19633857905864716 -0.28943580389022827 0.7924275398254395 -0.12080565094947815 -0.9177014827728271 0.2820487320423126 -0.02769811823964119 -0.10200914740562439 -0.06917869299650192 0.9082639217376709 0.9715079665184021 0.226842001080513 0.7940313816070557 -0.895995557308197 0.8971980214118958 -0.8616856336593628 -0.9642254710197449 0.7317101359367371 0.22378572821617126 0.21944323182106018 -0.10572226345539093 0.8330612778663635 0.8941041827201843 -0.15057823061943054 0.9597658514976501 -0.8842217326164246 -0.9673177003860474 0.2529902458190918 -0.5620787143707275 -0.3615722060203552 0.6900137066841125 -0.5109211206436157 -0.44103994965553284 -0.0687171071767807 0.7434810996055603 -0.182602658867836 0.39662590622901917 -0.3907196819782257 -0.897222638130188 0.3724368214607239 -0.4301374554634094 0.2966064214706421 0.8454129099845886 -0.028251996263861656 -0.5071316957473755 0.6883982419967651 0.6440052390098572 -0.20861011743545532 0.04759825021028519 0.40223830938339233 0.9302568435668945 -0.4137932360172272 0.842984676361084 0.5772297382354736 0.8739673495292664 0.30998045206069946 -0.12564733624458313 -0.2287536859512329 -0.5028448700904846 -0.024959489703178406 0.9076481461524963 -0.27138394117355347 -0.07135224342346191 0.545727550983429 -0.9364309310913086 -0.9002553820610046 0.8205761313438416 -0.7223145961761475 -0.5490404367446899 -0.06743834167718887 0.9957395195960999 0.2774718701839447 -0.9589726328849792 -0.5507056713104248 0.5633119940757751 0.19945119321346283 0.5139588117599487 -0.29365769028663635 0.2589624226093292 0.8205932378768921 -0.4676002562046051 0.76975417137146 -0.8745505809783936 0.7822840213775635 0.06313923001289368 -0.3085383176803589 0.7778911590576172 0.43714478611946106 -0.7621023058891296 -0.8691548109054565 0.9862700700759888 0.5158051252365112 0.9535803198814392 -0.9357631802558899 -0.09678655117750168 0.46401405334472656 0.37646034359931946 0.2493618130683899 -0.37036871910095215 0.44625458121299744 -0.6171474456787109 -0.9426791667938232 0.9215719699859619 0.6411372423171997 0.9866154193878174 0.9951940774917603 -0.5669248700141907 0.11575046926736832 -0.5614780187606812 -0.7779515981674194 0.8369724750518799 -0.9703425765037537 0.40080270171165466 0.11768914014101028 0.7245428562164307 0.2860548496246338 0.04602481797337532 0.9351804256439209 -0.07499267160892487 -0.8416668176651001 -0.5895882844924927 0.13477011024951935 0.6291114091873169 0.6402739882469177 0.9226657152175903 0.5935531854629517 -0.9426186680793762 -0.12135419249534607 -0.7278366684913635 -0.32659876346588135 -0.24795940518379211 -0.4225684702396393 0.8498977422714233 0.2410791963338852 0.4565314054489136 0.6556909084320068 0.9958739280700684 0.6878873705863953 0.35973143577575684 -0.2159281224012375 0.918936550617218 0.9210938215255737 0.7355884313583374 0.03672586381435394 -0.9918020963668823 0.13276700675487518 -0.819240391254425 -0.6741007566452026 -0.3729737102985382 0.962843656539917 0.801899254322052 -1.3634562492370605e-05 0.9992533326148987 -0.18473781645298004 0.484240859746933 -0.7103394865989685 -0.4507419168949127 0.5676354169845581 -0.7187457084655762 -0.007040190510451794 -0.7229022979736328 -0.7384819984436035 -0.0011260672472417355 0.47381356358528137 -0.6711698174476624 0.4395555257797241 -0.5639545321464539 0.9019620418548584 -0.26680004596710205 0.06668909639120102 0.45266470313072205 0.3126644492149353 0.21646928787231445 0.7008609771728516 0.8514515161514282 0.8900784254074097 -0.1714429408311844 -0.26488712430000305 0.822838306427002 0.7984057664871216 0.6937839984893799 -0.8676029443740845 -0.912756085395813 0.9952932000160217 0.6609575748443604 0.26621559262275696 -0.9338613748550415 -0.36709266901016235 0.7006247639656067 -0.13456419110298157 -0.9860835075378418 -0.0030525538604706526 0.7741578817367554 -0.9949805736541748 0.9944281578063965 -0.2773399353027344 -0.27521568536758423 0.873822808265686 -0.8505438566207886 0.9219252467155457 0.9506142139434814 -0.9931663274765015 0.2094208300113678 0.8989949226379395 -0.8263533711433411 0.7351751327514648 0.5883669853210449 0.5063994526863098 -0.010385761968791485 -0.16071544587612152 0.5572603344917297 0.9975792169570923 -0.37277889251708984 0.2969944179058075 0.4745742082595825 0.46699199080467224 0.8417081832885742 -0.9797468781471252 -0.9320610761642456 0.9814757108688354 -0.9080472588539124 -0.9693748950958252 -0.8820312023162842 0.6830160617828369 0.4663272798061371 -0.41925233602523804 0.9973465800285339 0.43139731884002686 -0.9547752141952515 -0.5317192077636719 -0.43856996297836304 -0.3189813494682312 -0.6222522258758545 0.8899905681610107 -0.17611898481845856 0.6160949468612671 -0.6759617328643799 0.8942022323608398 -0.9842967987060547 -0.785462498664856 -0.2826366126537323 -0.7371346354484558 -0.22471989691257477 0.2516322731971741 -0.8836060762405396 -0.2880946099758148 -0.17259182035923004 -0.6435271501541138 0.5081992149353027 0.9502476453781128 0.08592165261507034 0.7430248856544495 0.8532432317733765 -0.8040454387664795 0.39988598227500916 0.0879754051566124 -0.8446376919746399 0.9410332441329956 0.539093017578125 -0.9971553087234497 -0.34493348002433777 -0.6404770612716675 -0.9350262880325317 -0.30913853645324707 -0.5400170087814331 -0.3888579308986664 0.3861994743347168 0.9555003046989441 -0.045868881046772 -0.7948362827301025 -0.9847419857978821 0.28334569931030273 0.9829130172729492 0.9022071957588196 0.8460508584976196 0.8827956914901733 0.5217266082763672 0.8388376235961914 -0.4737439751625061 -0.7367866039276123 0.6091421842575073 0.6935133934020996 0.3292695879936218 0.6819717884063721 0.6288567781448364 -0.9852819442749023 -0.9325888156890869 0.7079160809516907 -0.2522243857383728 0.29061344265937805 0.4168351888656616 -0.17085932195186615 -0.9821330308914185 -0.08980102837085724 0.1705685257911682 0.4749109447002411 0.4352322518825531 -0.7320473194122314 0.1749090999364853 -0.6090183258056641 -0.9999996423721313 0.11408602446317673 0.636742115020752 -0.9999560713768005 -0.528277575969696 -0.04669971019029617 0.16914376616477966 0.9975010752677917 -0.27113035321235657 -0.91581791639328 -0.5396955013275146 -0.6812576651573181 0.06048664078116417 0.8771249055862427 -0.28968605399131775 0.9606621265411377 0.3088063597679138 0.7697410583496094 0.8498889803886414 -0.9196180105209351 0.4746979773044586 0.3220598101615906 -0.05602428317070007 0.025225158780813217 0.849922239780426 -0.5332156419754028 0.8905873894691467 -0.9448485374450684 0.5047239065170288 -0.42437779903411865 0.34286928176879883 0.16571670770645142 -0.762528657913208 0.9356507658958435 -0.7949631214141846 0.2949289083480835 0.9999998211860657 -0.6150386333465576 -0.25548219680786133 0.33827531337738037 -0.07054266333580017 -0.19729135930538177 0.1573161482810974 -0.05042427405714989 0.7093042135238647 -0.5755212903022766 0.2684229016304016 -0.47712960839271545 0.38820967078208923 -0.9981969594955444 -0.8153560757637024 -0.4216618835926056 -0.9716876745223999 -0.815239667892456 0.5604946613311768 0.9999800324440002 -0.2431851029396057 -0.8518420457839966 0.8317279815673828 -0.457424134016037 -0.9970026016235352 0.07626709342002869 -0.7131580114364624 0.8536381125450134 -0.6067755222320557 0.9662473201751709 -0.5714900493621826 0.8362305164337158 -0.980530321598053 0.70802903175354 -0.37646859884262085 0.32210078835487366 0.7595421671867371 -0.9303385019302368 0.9706170558929443 0.4516674876213074 -0.5156293511390686 -0.976844847202301 0.7429302930831909 -0.6604652404785156 0.9990907907485962 0.7750779390335083 -0.19849209487438202 -0.9612411856651306 0.30241459608078003 -0.6022388339042664 0.3015974462032318 0.7366891503334045 0.3670116066932678 0.5230567455291748 -0.6088293790817261 -0.4208856225013733 0.5257665514945984 0.9806308150291443 0.9732798933982849 0.17695142328739166 0.9426453113555908 -0.46514642238616943 0.03982577100396156 -0.3363747298717499 -0.6221197843551636 -0.0697956532239914 -0.5869790315628052 -0.9999690651893616 -0.4498049020767212 0.8669120073318481 -0.858860194683075 -0.6967154741287231 -0.5750628709793091 -0.7230195999145508 0.7416847348213196 0.9926733374595642 -0.8363938331604004 0.28877055644989014 0.5237901210784912 0.48778727650642395 0.18297745287418365 -0.6391410827636719 0.09669660031795502 -0.2378515601158142 0.39897438883781433 -0.9066296815872192 -0.4131527543067932 0.04222756251692772 -0.165339395403862 0.3569805324077606 0.9393270611763 -0.09812773019075394 0.3301991820335388 0.4748114049434662 -0.5020018815994263 0.3381231129169464 -0.45781758427619934 0.6777403354644775 -0.6713153123855591 -0.997413694858551 0.21004293859004974 0.2904956340789795 -0.14372330904006958 -0.7342061996459961 -0.3686257004737854 -0.9125393629074097 -0.35528478026390076 0.5280635356903076 0.8971065878868103 -0.38137340545654297 -0.518653154373169 0.9467587471008301 -0.6497949957847595 0.901367723941803 0.8000989556312561 -0.23680521547794342 -0.37908563017845154 0.5199007987976074 -0.6792205572128296 -0.7659827470779419 0.7048740386962891 -0.9803741574287415 -0.9400724172592163 0.9975001215934753 -0.49684441089630127 0.6033856272697449 0.8939706683158875 0.9930545091629028 0.9207314848899841 0.9822189807891846 0.14152127504348755 -0.4911538064479828 0.778936505317688 -0.9736407399177551 -0.1471155285835266 -0.09989407658576965 0.8127882480621338 0.9998739361763 0.9998676180839539 -0.2282087206840515 -0.9569512009620667 -0.6656563878059387 0.2089112401008606 -0.47332215309143066 0.9998306035995483 0.9494611620903015 -0.059000447392463684 -0.7915271520614624 -0.9219574928283691 -0.006159229204058647 0.37470048666000366 -0.894044041633606 -0.9407564401626587 0.9027561545372009 0.33179017901420593 0.8828270435333252 0.5471377372741699 0.5865803956985474 0.42496320605278015 0.22990956902503967 -0.4480423331260681 0.5281573534011841 -0.7434720396995544 0.4271245300769806 0.0095954155549407 0.9575615525245667 0.3099426329135895 -0.8156846761703491 0.09816870093345642 -0.4230731427669525 0.7467312216758728 0.9663402438163757 -0.9331629276275635 0.11204610764980316 -0.012516902759671211 -0.8933897018432617 0.7708922624588013 0.9589368104934692 0.47833549976348877 -0.9270305037498474 -0.8773804306983948 0.37763848900794983 -0.591926097869873 -0.8916716575622559 -0.16091710329055786 0.11995232850313187 -0.5159960985183716 -0.005725824274122715 0.9832568168640137 -0.5129912495613098 -0.8562396168708801 0.539741039276123 -0.7333481311798096 -0.8360357284545898 -0.22443562746047974 0.5057839155197144 -0.9373593926429749 0.3069112002849579 -0.3884895443916321 -0.9998724460601807 -0.5230441093444824 -0.7358835935592651 -0.011600411497056484 0.29529768228530884 0.8183577656745911 -0.2606428265571594 0.35493770241737366 0.015006400644779205 0.5407224893569946 0.7436368465423584 -0.3411730229854584 -0.7252484560012817 0.8832615613937378 -0.9372369647026062 -0.9267153739929199 0.3185942471027374 -0.0641348659992218 0.9999977350234985 -0.08007889240980148 -0.33639591932296753 0.2531973123550415 0.7970564365386963 -0.2841281294822693 -0.4326792359352112 -0.04391402751207352 -0.5352795124053955 -0.9972627758979797 0.9397293329238892
-0.9596502780914307 -0.2064797282218933 -0.9381784200668335 0.9166239500045776 0.5455021858215332 0.9475671648979187 0.5881674289703369 -0.6455081701278687 0.9966173768043518 -0.9805982112884521 -0.6139155626296997 -0.24035650491714478 0.46691080927848816 0.43893274664878845 -0.5183154344558716 -0.8582708835601807 0.9830869436264038 0.044357720762491226 -0.6102583408355713 -0.21133452653884888 0.7775817513465881 -0.9840980172157288 0.9301184415817261 0.5516620874404907 -0.7257400155067444 0.08694414794445038 0.995104968547821 0.2764783203601837 -0.8865673542022705 0.6926647424697876 -0.15952430665493011 0.492216557264328 0.6023021936416626 -0.3244847059249878 0.58124178647995 -0.054057762026786804 -0.6212912797927856 -0.6048703789710999 -0.38891369104385376 0.5239801406860352 0.6215460300445557 -0.6993294358253479 -0.8135843276977539 0.49972474575042725 -0.5532220602035522 -0.09299888461828232 -0.4588266909122467 0.7316563129425049 0.9643253684043884 -0.24524424970149994 0.5146752595901489 -0.7655612826347351 0.9999998211860657 -0.9999908208847046 -0.9373854994773865 0.1253662109375 -0.9880169630050659 -0.8978806734085083 -0.6812539100646973 -0.7012249231338501 -0.06390327215194702 -0.9999490976333618 0.9766309857368469 0.0287643875926733 -0.032423969358205795 -0.9134105443954468 0.24829328060150146 0.9951854348182678 -0.9632328152656555 0.778976321220398 0.2652217149734497 -0.6897242069244385 0.20308157801628113 0.7152289152145386 -0.7160001993179321 -0.5131860971450806 0.4696525037288666 0.9999997615814209 -0.6833992600440979 0.8251846432685852 0.7956312298774719 0.29183754324913025 -0.9787519574165344 -0.08049725741147995 0.9999969005584717 0.5331002473831177 -0.08372920751571655 -0.6042588353157043 -0.9572256803512573 -0.3983420133590698 -0.6631897687911987 -0.6948000192642212 -0.6551510691642761 -0.7035293579101562 0.9888321161270142 -0.9661588072776794 -0.9979309439659119 0.6261214017868042 0.6562008857727051 -0.9745887517929077 -0.16800448298454285 0.8456803560256958 0.632979691028595 0.9245152473449707 -0.7019068002700806 -0.6195677518844604 -0.934018611907959 -0.9834660887718201 -0.5953845977783203 -0.9835080504417419 -0.6180385947227478 -0.9425384402275085 -0.18191774189472198 0.742606520652771 0.0555659718811512 0.004454104695469141 0.757578432559967 0.9956852197647095 -0.9904167652130127 0.9858271479606628 0.9876990914344788 0.9800713062286377 -0.9568817615509033 0.7770229578018188 0.9419116377830505 0.9847861528396606 -0.1785806268453598 0.5304444432258606 0.9004863500595093 0.07155163586139679 -0.9598394632339478 0.6090109944343567 0.20461292564868927 0.9990237355232239 0.9135745763778687 -0.685508668422699 0.20175974071025848 -0.6681017875671387 0.6980068683624268 0.13149060308933258 -0.8983231782913208 0.9999948143959045 0.49216926097869873 -0.6280950307846069 -0.06456910073757172 0.8074672818183899 0.9820876717567444 -0.16259713470935822 -0.05005798488855362 -0.750557005405426 -0.054315026849508286 0.9979204535484314 0.37431439757347107 0.22760047018527985 -0.763473391532898 0.9794737100601196 -0.6684877872467041 0.19290126860141754 0.8257688879966736 -0.4921981394290924 -0.886863112449646 -0.027595821768045425 0.3064476549625397 -0.9801270961761475 0.8297362923622131 0.5600531101226807 -0.5216299295425415 -0.9164299964904785 -0.34069201350212097 0.9138628840446472 0.09580680727958679 -0.9501163363456726 0.5240556001663208 -0.11128894984722137 0.9692385792732239 -0.7172938585281372 -0.38459068536758423 -0.2466081827878952 -0.44932663440704346 -0.9999997019767761 0.9113821983337402 0.8850970268249512 -0.19087862968444824 0.5921463966369629 -0.3822471797466278 0.7260957956314087 0.6769115924835205 -0.5168147683143616 -0.4553408920764923 -0.9907394647598267 0.9376416802406311 -0.9220575094223022 0.9922822713851929 0.5081039667129517 0.7256200313568115 0.9871693253517151 0.9972239136695862 0.9847745895385742 0.10291305929422379 -0.6537336111068726 0.6085623502731323 -0.8914912939071655 0.24634301662445068 0.7290909290313721 -0.8103992938995361 0.6112951040267944 0.9372643232345581 0.6112270951271057 -0.8108884692192078 0.9503621459007263 0.6209818720817566 0.4344196617603302 0.07972149550914764 0.8929663896560669 0.9041445255279541 -0.7399629354476929 -0.7281633615493774 -0.8557561635971069 0.8464692831039429 -0.9779404997825623 0.3316557705402374 0.40076181292533875 0.633644700050354 -0.8787956833839417 0.6620866060256958 -0.8612970113754272 -0.6801326274871826 0.5389697551727295 0.9433333277702332 -0.9450985193252563 -0.688960075378418 0.6862993240356445 0.9955192804336548 -0.5419031977653503 0.8112773895263672 0.45067739486694336 0.028887160122394562 0.9211795926094055 -0.06282729655504227 -0.2332378625869751 -0.5878286957740784 -0.8907042741775513 0.29944276809692383 -0.16063743829727173 0.05254950374364853 -0.594499945640564 0.9649887084960938 0.6937626600265503 0.8075132369995117 -0.9388220906257629 0.6497811675071716 -0.679775595664978 0.3266030251979828 -0.23307138681411743 0.4550355076789856 -0.8110816478729248 0.9751114249229431 -0.30157455801963806 0.9275716543197632 0.8904926776885986 -0.07696086913347244 0.10537611693143845 -0.6375821232795715 0.11500296741724014 0.5183850526809692 -0.6421386003494263 -0.6301305294036865 0.6190533638000488 0.25911644101142883 -0.8938463926315308 0.9006668329238892 -0.31818029284477234 -0.9804210662841797 0.41685950756073 0.9319990873336792 0.5303524732589722 -0.4140307903289795 0.8353828191757202 -0.90266352891922 0.8773803114891052 0.8420872688293457 -0.9995040893554688 -0.9341684579849243 0.963284432888031 -0.9818620681762695 0.4304698407649994 -0.9822965860366821 0.9949061274528503 -0.40578579902648926 0.30490949749946594 0.9951508641242981 0.44930604100227356 -0.7546215057373047 0.584186315536499 0.7601777911186218 0.08968625962734222 0.4928723871707916 -0.9728593230247498 0.2715146541595459 -0.7455699443817139 0.46102938055992126 -0.9451732039451599 -0.7525827884674072 0.954595148563385 0.975450873374939 0.4175814986228943 0.29125291109085083 -0.2734059989452362 0.9907719492912292 0.987868070602417 0.9596401453018188 -0.2962920665740967 -0.07588173449039459 -0.9211491346359253 0.9027813076972961 -0.6194667816162109 0.5845260620117188 -0.2822529971599579 0.7833741903305054 -0.5442430973052979 -0.6471794247627258 0.9926791787147522 -0.33050793409347534 0.42991021275520325 -0.05765031650662422 -0.2883135974407196 0.41375648975372314 0.9425656199455261 0.9143874645233154 -0.11701837182044983 0.9843810200691223 -0.8602148294448853 -0.2694736421108246 0.4101297855377197 0.9235814213752747 -0.5873287320137024 0.9358413219451904 0.8809385299682617 0.4097065031528473 0.08688364923000336 0.9776933789253235 0.22549568116664886 0.8745632171630859 -0.23294687271118164 -0.12387227267026901 -0.9048081636428833 0.8706071376800537 0.44999921321868896 -0.9883831143379211 0.19377750158309937 -0.12186551839113235 -0.893645703792572 -0.1519896388053894 -0.9260224103927612 0.9022954106330872 0.619497537612915 0.9863616228103638 0.24169684946537018 0.8421893119812012 0.8189255595207214 -0.2657428979873657 0.23915109038352966 -0.7431600093841553 -0.9900652766227722 0.6908107399940491 0.7959895133972168 -0.27204111218452454 0.3594563603401184 0.40387383103370667 -0.8998348116874695 -0.9854731559753418 0.32438114285469055 -0.4063967168331146 -0.11363716423511505 0.8705536127090454 -0.7959452271461487 0.1399015188217163 0.15803225338459015 0.6170897483825684 0.9185621738433838 0.5345101356506348 -0.8056715726852417 -0.8152719140052795 0.04699842259287834 -0.7990778684616089 -0.9460972547531128 0.8609306216239929 0.9984018802642822 -0.6334623098373413 0.798535168170929 -0.9844101667404175 -0.04490230232477188 -0.015621582977473736 -0.4759414494037628 -0.8449337482452393 -0.057339586317539215 -0.8107334971427917 -0.9993109703063965 -0.7487984895706177 0.38014188408851624 -0.6851873397827148 -0.9227011203765869 -0.18570774793624878 0.9404839873313904 0.02813592739403248 -0.9927362203598022 0.7849021553993225 -0.5160990953445435 -0.9003106355667114 0.4176490902900696 0.4857790768146515 0.4328361451625824 -0.9891844987869263 -0.7999477982521057 -0.3776840567588806 0.999720573425293 -0.4740419089794159 -0.7009365558624268 0.1739397794008255 0.1447550654411316 0.9029195308685303 -0.36368992924690247 -0.8722325563430786 -0.7760541439056396 0.2244994044303894 -0.995566725730896 -0.9907702207565308 0.7101743817329407 0.2307601273059845 -0.9849338531494141 0.46691739559173584 0.2574957013130188 -0.6413506269454956 -0.752801775932312 -0.8428092002868652 -0.5446478724479675 0.5254182815551758 0.14829367399215698 0.9582751393318176 -0.48360249400138855 -0.3782227635383606 0.8424695730209351 -0.9976290464401245 -0.9305392503738403 -0.41923701763153076 -0.012544169090688229 0.156739741563797 0.012769616208970547 -0.26843270659446716 -0.8953796625137329 0.20917712152004242 -0.9912516474723816 0.921676754951477 0.8447271585464478 -0.4566822052001953 0.6293162107467651 -0.10567216575145721 0.09374704957008362 0.711970865726471 -0.9324489235877991 -0.959712564945221 0.9695629477500916 0.7486410140991211 -0.9992644786834717 0.0009681728552095592 0.49360570311546326 -0.8166959285736084 -0.991202712059021 -0.681251049041748 0.21945315599441528 0.9081464409828186 0.14210286736488342 0.12018608301877975 0.5607103109359741 -0.9890189170837402 0.9124644994735718 0.03937431052327156 0.5994923114776611 0.9442055225372314 0.18213605880737305 0.7976791858673096 0.40158647298812866 -0.3530034124851227 -0.9428473711013794 0.320056289434433 0.295796275138855 0.21220320463180542 0.8437003493309021 0.0015410433989018202 -0.9948502779006958 -0.9998620748519897 -0.32205578684806824 0.04696835204958916 0.6685200929641724 -0.9769102931022644 0.7003451585769653 -0.678346574306488 -0.6842209100723267 0.7392256259918213 0.8466584086418152 -0.09104838967323303 -0.9698086977005005 -0.3553086817264557 -0.14689183235168457 -0.9999979138374329 0.25570711493492126 0.4162403345108032 -0.9999741315841675 -0.5645678043365479 -0.7554400563240051 0.29122209548950195 0.841404139995575 0.9503129720687866 -0.8096387982368469 -0.08889972418546677 -0.27386370301246643 -0.7869387865066528 -0.21777692437171936 -0.4651670455932617 0.7273702025413513 0.7361825704574585 -0.33330094814300537 -0.9545770883560181 -0.7870736718177795 0.34222373366355896 0.9032168388366699 -0.9031824469566345 0.922326922416687 0.44009244441986084 0.589470386505127 0.4006035625934601 -0.5470263361930847 0.705573320388794 0.1317070722579956 0.18768435716629028 -0.6138356924057007 -0.2860749661922455 0.9350007176399231 -0.9690470695495605 0.3349202871322632 0.9999955892562866 -0.5764836668968201 -0.2922370135784149 -0.1328992247581482 -0.8435767292976379 0.9581644535064697 0.5723854899406433 0.4993044137954712 0.8593176007270813 0.8138836622238159 -0.6870840191841125 0.6742106080055237 -0.07549509406089783 -0.9857076406478882 -0.6968787908554077 -0.6881418228149414 -0.008866294287145138 0.6416144967079163 -0.30477094650268555 0.9993024468421936 0.17465351521968842 0.1218600794672966 0.565860390663147 0.37779858708381653 -0.9964986443519592 -0.8114742040634155 -0.9200358986854553 0.9812940955162048 0.1505051851272583 0.31057941913604736 0.9262369871139526 0.9263561964035034 0.6310976147651672 0.5855847001075745 0.1814797967672348 -0.19257093966007233 -0.888957142829895 -0.26799914240837097 0.34537240862846375 0.7395550012588501 -0.7709877490997314 -0.9996141791343689 -0.6747870445251465 0.9310287237167358 0.9924641847610474 -0.9080540537834167 0.16740719974040985 -0.6673338413238525 0.9558010101318359 0.8411006927490234 -0.5819200277328491 0.4337579309940338 0.8094193339347839 0.8890191912651062 -0.6253347396850586 -0.9939602017402649 -0.11074978113174438 0.9993400573730469 0.05683848261833191 0.271261602640152 -0.8530968427658081 0.10610270500183105 0.046886276453733444 0.3823574483394623 -0.8085089921951294 0.4194796681404114 -0.43031635880470276 -0.9999825358390808 0.7591289281845093 0.8349006772041321 -0.9385051727294922 0.44208481907844543 -0.9389144778251648 -0.18419739603996277 0.4949185252189636 -0.9488257169723511 0.06615981459617615 -0.9511430263519287 -0.8538532257080078 -0.2850521206855774 -0.9147129654884338 -0.11733616143465042 -0.5880653262138367 0.400350421667099 -0.7046691179275513 -0.927417516708374 0.7585986852645874 0.06886781007051468 -0.4107540249824524 -0.9538805484771729 -0.9665305614471436 0.6568830013275146 -0.9477615356445312 0.5415265560150146 0.0485813170671463 -0.47218579053878784 -0.9319704174995422 -0.522771954536438 -0.1831921488046646 -0.8124995827674866 -0.7902088761329651 0.5472965240478516 0.9090238809585571 -0.9753492474555969 0.7577872276306152 0.804438054561615 -0.9855633974075317 0.5466248393058777 0.29196402430534363 -0.17334449291229248 -0.677698016166687 -0.6165058016777039 -0.18742027878761292 0.9968538880348206 -0.27589672803878784 -0.9985958933830261 -0.699254035949707 0.5278486013412476 0.6157553791999817 0.8563226461410522 0.9392122626304626 -0.6770635843276978 0.5359516739845276 -0.15162968635559082 -0.6692591309547424 0.11615140736103058 0.9535456895828247 0.39289161562919617 0.8668711185455322 0.8570224642753601 0.6573432683944702 0.7933343052864075 -0.6742348670959473 -0.9941197037696838 -0.1421027034521103 0.6552557945251465 0.28649717569351196 0.9999691843986511 0.9999357461929321 -0.21423092484474182 -0.8572723865509033 -0.9467157125473022 -0.4988013207912445 0.05062434449791908 0.9998016357421875 0.8949066996574402 -0.5397440791130066 -0.7687239050865173 -0.9295721054077148 0.8672899007797241 0.8968647718429565 0.3609580993652344 -0.8039904832839966 -0.5393902659416199 -0.29361194372177124 0.9285497069358826 -0.5500197410583496 -0.26956379413604736 0.8468214273452759 0.5545109510421753 -0.7601782083511353 0.8072186708450317 -0.7937279939651489 -0.7693913578987122 -0.8461584448814392 0.9992853999137878 -0.9134976863861084 0.1379109025001526 0.9319933652877808 -0.3417957127094269 0.7673779726028442 0.9735485315322876 0.7903128862380981 0.14975348114967346 -0.8625637292861938 -0.4166991710662842 0.9170993566513062 0.9829961657524109 0.839741587638855 0.5538949966430664 0.8606570363044739 0.9822486639022827 -0.9587833881378174 -0.8662456274032593 0.3948107361793518 0.8752477169036865 -0.9839447140693665 0.948847770690918 0.4600456655025482 -0.4787324368953705 0.4595402479171753 0.8566659092903137 0.3381724953651428 -0.9854671359062195 0.5484231114387512 -0.948381245136261 -0.5174341201782227 -0.08140261471271515 0.5154271125793457 -0.9999244213104248 0.11100739985704422 -0.9462094902992249 0.186512291431427 0.9293719530105591 0.8924813866615295 -0.828249990940094 0.7554921507835388 -0.8861995339393616 -0.7886381149291992 0.9317048788070679 -0.9854114651679993 -0.9879768490791321 -0.4291635751724243 0.39645037055015564 -0.889588475227356 0.04584860801696777 0.782598078250885 0.9999837875366211 -0.6569949388504028 0.8434237837791443 0.31243735551834106 -0.3282431364059448 -0.9132001399993896 0.42198705673217773 -0.8079311847686768 -0.1451597362756729 -0.9998031258583069 0.5344711542129517
-0.9819943308830261 -0.6087946891784668 0.859183132648468 -0.983273446559906 0.797048807144165 0.9103080630302429 -0.6219169497489929 -0.9926773309707642 -0.26852813363075256 -0.6657518148422241 -0.5957551598548889 0.9344533085823059 -0.8691120743751526 0.7599462866783142 -0.2131129503250122 -0.9967828392982483 0.6892639398574829 0.7288747429847717 0.06793728470802307 -0.7881258726119995 -0.8359565734863281 -0.9131909608840942 0.6000761985778809 -0.0065151602029800415 0.9278653860092163 0.8056466579437256 0.5456365942955017 -0.8493762016296387 -0.9396949410438538 0.7910171151161194 0.996759295463562 0.9122201800346375 -0.9977548122406006 -0.7371652126312256 -0.6027944684028625 -0.9238994121551514 -0.8385819792747498 -0.9367251396179199 0.32676276564598083 0.035066332668066025 -0.20687052607536316 0.19228293001651764 -0.9999608397483826 0.08867006003856659 -0.4440855085849762 0.2142147272825241 -0.8572258949279785 0.9749176502227783 0.9366978406906128 -0.9565855264663696 -0.9095792770385742 0.4492352604866028 0.9999999403953552 -0.9998767971992493 0.40293046832084656 0.9079778790473938 -0.8540505766868591 -0.9984763264656067 0.7886567711830139 0.9635951519012451 -0.9843449592590332 -0.9999982714653015 0.9999273419380188 -0.8491008281707764 0.7497864365577698 -0.5151928663253784 0.7619741559028625 0.7340207099914551 -0.9525341987609863 0.8689636588096619 0.997380793094635 0.3616555631160736 0.7831524014472961 -0.9730858206748962 0.1503751277923584 0.5584217309951782 0.07499183714389801 0.9999998807907104 0.8942924737930298 0.7514846324920654 -0.82839435338974 -0.8248488306999207 -0.949485719203949 -0.3223923444747925 0.9999998211860657 0.6278060674667358 -0.9919754266738892 -0.18053101003170013 -0.7143975496292114 0.85383141040802 -0.8225189447402954 -0.5715323686599731 0.030773065984249115 -0.6741209030151367 -0.7051780223846436 0.7562412619590759 -0.35521170496940613 0.9894410967826843 0.7684874534606934 0.9902123808860779 0.20551788806915283 0.8905207514762878 -0.6565403938293457 0.9528544545173645 -0.7037167549133301 0.4292162358760834 -0.7393039464950562 -0.9558682441711426 0.46497368812561035 -0.28335052728652954 0.8229628801345825 0.5570942163467407 -0.7593809366226196 0.9999527931213379 0.3540444076061249 0.6625353097915649 0.7282856106758118 0.10301348567008972 0.582134485244751 0.5873897075653076 0.9546599388122559 0.8536266088485718 -0.8694678544998169 0.9046027064323425 0.6659797430038452 0.06074392795562744 -0.853825032711029 -0.4943075478076935 -0.9357261061668396 0.6813305616378784 0.27289673686027527 0.643314778804779 0.35562923550605774 -0.43827661871910095 -0.2856452167034149 -0.9451760053634644 -0.867946207523346 -0.7813084125518799 0.954599142074585 0.9250571727752686 0.893010139465332 0.9999504685401917 -0.9983440637588501 -0.7652709484100342 -0.5277147889137268 -0.04516422003507614 0.512884259223938 0.990243673324585 -0.043279070407152176 0.9857334494590759 -0.15612228214740753 0.9998961687088013 0.23287911713123322 -0.6661590337753296 -0.6555914282798767 0.8792271614074707 -0.9375394582748413 -0.9984673261642456 0.9980185031890869 0.09783867746591568 0.3505319058895111 -0.6787382960319519 0.4313735067844391 0.9076853394508362 -0.799612283706665 -0.9816087484359741 0.636951208114624 0.8842228651046753 0.8586099147796631 -0.348893404006958 0.9687497615814209 0.3831712305545807 0.9643605351448059 -0.1887379139661789 0.243865966796875 0.4226577579975128 0.967470645904541 -0.6135863065719604 0.7630619406700134 -0.9999999403953552 -0.5851907730102539 -0.47057369351387024 -0.0379830040037632 -0.6299744248390198 -0.12214747071266174 0.4622882604598999 -0.9683384299278259 0.6338699460029602 0.8815500140190125 -0.7670792937278748 -0.6275978088378906 -0.814092218875885 -0.6768944263458252 -0.8783774375915527 0.08631603419780731 -0.7415952682495117 -0.7235586643218994 0.987977921962738 -0.4075098931789398 0.12527374923229218 0.09420106559991837 -0.9389654994010925 0.9819018244743347 0.90291428565979 -0.978052020072937 -0.5378007292747498 0.8380855917930603 -0.505064845085144 -0.9875465631484985 0.42349258065223694 -0.7062909603118896 0.3350772559642792 0.0757780373096466 -0.49496886134147644 -0.8128948211669922 0.6145254373550415 -0.7016576528549194 -0.9989475607872009 0.9567514657974243 -0.22723254561424255 -0.9988652467727661 0.3347572982311249 -0.9092339873313904 0.6748671531677246 -0.6755574941635132 -0.7603794932365417 0.918760359287262 0.88441002368927 0.9730527400970459 0.9944483041763306 0.45319366455078125 -0.6379220485687256 0.8810961246490479 -0.27520883083343506 -0.6553760766983032 0.8780144453048706 -0.8394063115119934 -0.5460824370384216 0.9782527089118958 0.8297527432441711 0.942881166934967 0.9921565055847168 0.8438965082168579 -0.8361127972602844 0.8904916048049927 -0.946161687374115 0.9819004535675049 -0.9488556385040283 -0.6201429963111877 0.24810275435447693 0.8849233388900757 -0.36257684230804443 0.5672297477722168 -0.8556897640228271 0.4082223176956177 -0.07340965420007706 -0.31691813468933105 -0.7572690844535828 -0.6771864891052246 0.954736053943634 0.5281695127487183 0.4319961369037628 0.015294245444238186 -0.5378773212432861 0.7087184190750122 -0.5424768924713135 -0.9883925914764404 0.862438440322876 -0.9742833971977234 0.7273039817810059 0.1882435381412506 -0.8217709064483643 0.9407497644424438 0.9984374046325684 0.9994364380836487 0.8955920338630676 -0.9778535962104797 0.9988973736763 -0.9334495663642883 -0.935737133026123 -0.5183208584785461 -0.22772003710269928 -0.9844503998756409 0.9680661559104919 -0.581834077835083 -0.7029778957366943 -0.9926983714103699 -0.9981753826141357 0.6097074747085571 -0.898431122303009 -0.9144176244735718 0.47491103410720825 0.5979466438293457 0.6574770212173462 0.39919155836105347 0.034868739545345306 -0.9962570667266846 -0.07528559863567352 -0.9969465732574463 0.2971994876861572 0.38598349690437317 -0.980704665184021 -0.9401220083236694 0.09267162531614304 -0.729860782623291 -0.9556913375854492 -0.40225857496261597 0.5909700393676758 -0.7526032328605652 -0.14056478440761566 -0.7802788615226746 -0.7693593502044678 -0.06684950739145279 0.058800291270017624 0.12619836628437042 -0.9099240303039551 0.29843267798423767 0.31023168563842773 -0.9577066898345947 -0.5577014684677124 -0.7229357957839966 0.7381565570831299 -0.4920915365219116 -0.5550907850265503 0.9982325434684753 0.8825686573982239 0.8581408262252808 -0.3185787796974182 0.9954338669776917 -0.1011701449751854 -0.9269765615463257 -0.4451010823249817 0.9957761764526367 0.6737316846847534 0.4840974509716034 -0.04972267150878906 -0.06339619308710098 0.3991025984287262 -0.9526911377906799 0.1748419851064682 0.17948698997497559 0.9819663763046265 0.36360272765159607 0.9344719648361206 -0.9348444938659668 -0.9234305620193481 0.3052522838115692 0.9873104095458984 -0.82667076587677 0.39375388622283936 -0.5852971076965332 0.249672994017601 -0.7361690402030945 0.35203438997268677 0.22623120248317719 0.49832257628440857 0.9898509979248047 -0.9025830030441284 0.38795411586761475 -0.9759591221809387 -0.2554302215576172 -0.9302499294281006 -0.49152183532714844 -0.9506711363792419 -0.9690101742744446 0.925317108631134 0.3077224791049957 -0.3837798833847046 -0.4211290776729584 0.8614925742149353 -0.5740160942077637 -0.06475133448839188 -0.5896559953689575 -0.025084098801016808 0.9398859143257141 0.5929712653160095 -0.7511130571365356 -0.4376617670059204 0.48605307936668396 -0.11448554694652557 0.5459489822387695 -0.9488306641578674 0.9499348402023315 0.12113580852746964 -0.8286058902740479 -0.04292665421962738 -0.45295804738998413 0.9999579191207886 -0.6318700313568115 -0.7948052287101746 -0.8268913626670837 0.8125172853469849 0.12225548923015594 0.18208126723766327 0.03428712114691734 0.8976204991340637 -0.5480687618255615 0.8544545769691467 0.19493137300014496 0.9847245812416077 -0.8342499732971191 -0.9950253963470459 0.820842981338501 0.9890046119689941 -0.6833829879760742 0.7716840505599976 -0.7151999473571777 0.5704295635223389 0.7766841053962708 -0.5384132862091064 0.626134991645813 0.4388386309146881 -0.9902383089065552 0.8207403421401978 0.6232967376708984 0.9996857047080994 0.16376332938671112 -0.7243064045906067 -0.8092244863510132 -0.8027898073196411 -0.29515379667282104 -0.7604160308837891 0.4923965036869049 -0.3765600323677063 0.9737321734428406 -0.8148157596588135 0.9982852935791016 -0.26119089126586914 0.2971683442592621 0.7580579519271851 -0.7429221868515015 0.9017102718353271 -0.460612416267395 -0.5842317342758179 -0.9971014857292175 -0.8746302127838135 0.5100994110107422 0.0977487564086914 0.9993090033531189 -0.9596915245056152 -0.5116672515869141 0.8409489989280701 0.5358712673187256 0.9698880314826965 0.861503005027771 0.493622750043869 -0.9893440008163452 -0.7910298109054565 0.9811879992485046 -0.6464530229568481 -0.9583016633987427 -0.9585335850715637 0.9702970385551453 -0.20004919171333313 -0.0910387709736824 0.4089479148387909 0.9403504133224487 -0.9980465173721313 0.30500710010528564 -0.7519183158874512 0.9372002482414246 0.7513259053230286 0.6901008486747742 -0.998539388179779 -0.1958625763654709 0.25331056118011475 -0.8442347645759583 -0.8999717831611633 -0.8734889626502991 -0.7683739066123962 0.9683176875114441 0.9916722178459167 0.9203127026557922 -0.17964747548103333 -0.7709736824035645 -0.36164751648902893 0.2739790380001068 0.9800567030906677 0.954170823097229 0.8999356031417847 -0.8733585476875305 0.4883716404438019 0.6612241268157959 0.038225915282964706 0.8059022426605225 0.06904290616512299 -0.985135018825531 0.7773241400718689 -0.8753491640090942 -0.7917454838752747 -0.755567193031311 0.7694631814956665 0.3701954483985901 -0.7279458045959473 -0.8330774903297424 0.958429217338562 -0.8145005702972412 0.9861174821853638 0.8976685404777527 -0.8748409748077393 -0.6493749618530273 -0.2513003945350647 -0.4269128143787384 0.7066437005996704 -0.9999995231628418 -0.70537269115448 -0.04648031294345856 -0.9999927282333374 -0.6674887537956238 0.586550235748291 0.8232792615890503 -0.8828374147415161 -0.09488964080810547 0.8574788570404053 -0.00352774397470057 0.9783908128738403 -0.5701379776000977 0.23341715335845947 0.974222719669342 -0.9780635833740234 -0.34847769141197205 -0.5881773829460144 -0.6304804682731628 -0.9340027570724487 0.9963149428367615 0.10157723724842072 -0.9020417332649231 0.4138144850730896 0.9644505977630615 0.38842421770095825 0.9716229438781738 0.9970515966415405 0.2928294837474823 -0.47330382466316223 -0.37437283992767334 -0.9986504912376404 -0.3523704707622528 0.9923434853553772 0.8218880891799927 0.8088809251785278 0.9999997019767761 -0.863376259803772 0.8926507830619812 0.9166349768638611 0.2018546164035797 0.9999566078186035 -0.9963250756263733 0.3382048010826111 0.4684171676635742 -0.5341031551361084 0.7114289402961731 0.03140635788440704 0.76720130443573 -0.9873952865600586 -0.8574717044830322 0.8825310468673706 -0.8675215244293213 -0.042591679841279984 0.49222004413604736 -0.7975814938545227 -0.8426669239997864 -0.4812050461769104 0.05566604807972908 0.9097651243209839 -0.8095917701721191 -0.14901617169380188 0.9700632691383362 0.5638502836227417 -0.6842082738876343 -0.879408597946167 0.8868293166160583 -0.8773154020309448 -0.9904080033302307 0.032330162823200226 0.9976377487182617 -0.48396822810173035 -0.7046958804130554 -0.9511998891830444 -0.061670154333114624 0.15263663232326508 0.1651269793510437 -0.8812402486801147 -0.36621519923210144 0.9846217036247253 0.9992508888244629 -0.9704905152320862 -0.9452013969421387 -0.8989675641059875 0.7763258218765259 -0.8981406092643738 -0.6640944480895996 -0.31057217717170715 0.9146919846534729 -0.38823989033699036 -0.20877791941165924 0.6885253190994263 0.9947206377983093 0.91923987865448 0.9641292691230774 0.8738672137260437 0.9204655885696411 0.41640913486480713 0.40938255190849304 0.9687093496322632 -0.25279274582862854 0.5886802077293396 -0.2316686064004898 -0.9997870326042175 -0.9790729880332947 0.7721111178398132 -0.756446897983551 0.9737720489501953 -0.9427914023399353 -0.515524685382843 0.4596113860607147 0.9832396507263184 -0.8951584696769714 -0.9172690510749817 -0.7072581648826599 0.9486494660377502 0.040974706411361694 0.31997066736221313 0.9553179740905762 0.7674952149391174 -0.991145670413971 -0.9990615844726562 -0.7100650072097778 -0.46685856580734253 -0.32026442885398865 -0.18860122561454773 -0.3872559368610382 0.9450666904449463 -0.7113939523696899 0.9464039206504822 0.9979666471481323 0.11555620282888412 -0.7539344429969788 -0.9660058617591858 -0.9920265674591064 0.9990989565849304 0.08057250827550888 0.7502471208572388 -0.23049816489219666 0.9448494911193848 0.7503770589828491 0.7927546501159668 -0.9789159893989563 0.18420849740505219 0.9783790111541748 -0.9354249835014343 -0.5499496459960938 -0.7483214139938354 0.8807390928268433 -0.919266402721405 -0.03329405188560486 -0.996189296245575 0.840603232383728 0.686627984046936 0.4402451515197754 -0.3778637647628784 -0.887471079826355 0.9825628399848938 -0.8839284777641296 -0.9940012097358704 -0.998353123664856 0.850349485874176 -0.978069007396698 -0.9636051058769226 0.8338729739189148 -0.754462718963623 -0.8965480923652649 -0.4594397246837616 -0.7738823294639587 -0.9944971203804016 0.4918370544910431 0.9489733576774597 0.9428050518035889 0.9876422882080078 0.9999781250953674 -0.4578966796398163 0.9279544353485107 -0.02699955552816391 0.05634092912077904 -0.2890649735927582 0.998075544834137 0.8768430352210999 -0.9690365791320801 -0.994491457939148 -0.7456222772598267 0.37161698937416077 0.7211840748786926 0.4620376527309418 -0.9958587288856506 -0.9911951422691345 0.8173792958259583 0.8550256490707397 0.3557003140449524 0.8498564958572388 0.9803568720817566 0.5277930498123169 -0.45829617977142334 0.809897780418396 -0.65177321434021 -0.9843565225601196 -0.10284754633903503 0.9962646961212158 -0.9641441106796265 0.975204348564148 0.5943067669868469 0.9935153126716614 -0.8324995040893555 0.5882806777954102 0.9216038584709167 -0.798406720161438 0.9240990877151489 0.8008762001991272 0.8621461987495422 0.8193068504333496 -0.08866249024868011 0.11604064702987671 0.9442986845970154 0.9618589878082275 0.970311164855957 -0.783947229385376 -0.7088874578475952 -0.24785099923610687 -0.81263267993927 -0.22875533998012543 0.9564161896705627 -0.24682269990444183 0.6770049333572388 0.8059893250465393 -0.8937212228775024 -0.988437294960022 0.8103142976760864 0.21763023734092712 0.8419808149337769 0.4316667914390564 0.8081092834472656 -0.34348416328430176 -0.07934466749429703 -0.9632827043533325 -0.06726188212633133 -0.7332714796066284 -0.4111296534538269 -0.22462888062000275 0.7460122108459473 -0.7719792127609253 0.3137677013874054 -0.8766632676124573 -0.9982799291610718 -0.9999551177024841 0.8882688283920288 0.20318114757537842 0.7359954118728638 0.0865168422460556 -0.9975415468215942 0.9999960660934448 0.9531588554382324 0.8884801864624023 0.9692590236663818 -0.9569952487945557 0.3904445469379425 -0.9831523299217224 -0.9992799758911133 -0.3996957540512085 -0.9999988675117493 0.5490082502365112
0.2941231429576874 0.9417648911476135 0.18034711480140686 -0.7838178873062134 -0.9864234924316406 -0.9378126263618469 0.8258167505264282 0.2843712270259857 -0.9943631291389465 0.7896615862846375 0.9305263161659241 0.24941758811473846 -0.9768286943435669 -0.6869116425514221 0.7627158164978027 0.44521698355674744 -0.20851215720176697 0.18844902515411377 0.2345835268497467 -0.6460893154144287 -0.9984443187713623 -0.5454869270324707 -0.036080654710531235 0.8830222487449646 0.9415892362594604 0.5764482021331787 -0.9182308912277222 0.275160014629364 0.9869658946990967 -0.1885753720998764 0.8327422142028809 0.8072424530982971 0.44112902879714966 -0.9378207325935364 0.9628525972366333 -0.9863554239273071 -0.9478052854537964 -0.7176017761230469 0.9875723123550415 0.8348251581192017 -0.8510919809341431 0.9011352062225342 -0.9380925893783569 0.9057415127754211 0.7022466659545898 -0.5166741013526917 -0.41077157855033875 0.9280776977539062 0.9002009630203247 0.19051265716552734 -0.9565770030021667 -0.07839642465114594 1.0 -0.9999995231628418 0.991786539554596 0.6017481088638306 -0.08449025452136993 -0.9042455554008484 -0.2302260398864746 0.3165512979030609 -0.954978346824646 -0.9999974966049194 0.9999958872795105 -0.012185919098556042 0.443025141954422 -0.2857276499271393 -0.5734269618988037 0.9040958285331726 0.975525438785553 0.31302177906036377 -0.8651318550109863 0.7604998350143433 0.8531094193458557 -0.9520543813705444 -0.8950911164283752 -0.9227620363235474 -0.9746671915054321 0.9999988079071045 0.999847948551178 0.7352158427238464 -0.9224191904067993 -0.9980810880661011 -0.8886569738388062 -0.9348877668380737 0.9999986290931702 -0.9447373747825623 0.7324023842811584 0.8816617727279663 0.2959931492805481 0.9864057898521423 0.9822344183921814 0.4242340326309204 -0.5654476881027222 0.13883627951145172 -0.966386079788208 0.6334303617477417 0.954526424407959 0.9572066068649292 0.4724254906177521 -0.6087014675140381 0.9805004596710205 -0.872924268245697 -0.8794035315513611 0.9466924071311951 -0.9553242325782776 -0.9894970655441284 -0.791699230670929 0.8030877709388733 -0.6709434986114502 0.8620471954345703 0.495980829000473 0.2938247621059418 0.3956131041049957 0.9994227290153503 0.9928016662597656 -0.7986979484558105 0.5579723715782166 -0.2219805270433426 -0.2518814206123352 -0.9392572641372681 0.48410764336586 0.7123992443084717 0.45866864919662476 -0.37170863151550293 0.10359622538089752 0.7693184018135071 -0.9855445027351379 -0.3545401990413666 -0.7555093169212341 -0.881708025932312 0.2328004539012909 -0.7854822874069214 0.9179819226264954 0.9404168725013733 -0.8507998585700989 -0.9908145666122437 0.8926056623458862 -0.8047842383384705 -0.41276925802230835 0.6085608601570129 0.19004660844802856 0.9999604225158691 -0.9347096681594849 -0.9600505232810974 -0.03265229985117912 -0.2413599044084549 -0.05912184715270996 0.8247655630111694 0.3170086145401001 0.8649889826774597 -0.27077817916870117 0.9999970197677612 -0.7878894209861755 -0.8281303644180298 -0.32574141025543213 0.8537264466285706 -0.4518641531467438 0.6470291018486023 0.31537967920303345 -0.8636688590049744 0.29256483912467957 -0.1938198357820511 0.34507858753204346 -0.49519988894462585 0.6419654488563538 -0.8874695301055908 -0.8779069185256958 0.10109046846628189 0.1868584007024765 0.34924229979515076 -0.6216521263122559 0.9940370321273804 0.80409836769104 0.9263572692871094 -0.9806784391403198 -0.5938369035720825 0.9308153390884399 -0.8801698684692383 0.1544598639011383 -1.0 -0.9535186290740967 0.49378517270088196 -0.9305201768875122 -0.9723935127258301 0.027719784528017044 0.22406157851219177 0.5773910284042358 0.8430540561676025 0.893265962600708 -0.663015604019165 0.9616007804870605 0.06261181831359863 0.4405698776245117 0.791255533695221 0.9078362584114075 -0.377646803855896 -0.9049851894378662 0.9352447986602783 0.19723239541053772 -0.38378190994262695 0.8619516491889954 0.5164735317230225 -0.9962881803512573 0.06919800490140915 -0.2885485589504242 -0.2848009467124939 0.32074466347694397 0.5135598182678223 0.06807306408882141 -0.7691582441329956 -0.39536765217781067 0.9549834132194519 -0.44120702147483826 -0.9879766702651978 -0.5853581428527832 0.914124608039856 -0.863673746585846 -0.9462234377861023 0.5017303228378296 0.6320328712463379 -0.7953612208366394 -0.1950751394033432 0.18296414613723755 -0.9913934469223022 -0.5373122692108154 0.9843475222587585 0.10855113714933395 0.947052538394928 -0.057096779346466064 0.7020364999771118 0.46614953875541687 -0.4916359782218933 -0.8831270933151245 0.9235716462135315 -0.07596702873706818 0.22829915583133698 0.9381342530250549 0.5870418548583984 -0.854494571685791 -0.4798763394355774 -0.4946877360343933 0.9711129069328308 0.47455957531929016 0.9847931265830994 0.9934849143028259 0.9422540068626404 0.9899693727493286 -0.9994568824768066 -0.7684197425842285 0.838047444820404 0.9842079877853394 0.7658769488334656 0.9784794449806213 -0.9864665269851685 0.013364318758249283 -0.9009328484535217 0.40978702902793884 -0.8763516545295715 -0.9726290106773376 0.9857415556907654 0.928224503993988 0.24851150810718536 -0.9463648796081543 0.5945342779159546 0.011351298540830612 0.1273861974477768 -0.6209009289741516 0.5709378719329834 -0.20172390341758728 0.8449498414993286 -0.08766390383243561 -0.02623402327299118 -0.9994930624961853 0.999990701675415 0.9949731230735779 0.03213176131248474 -0.8725185394287109 0.9005549550056458 -0.6975206136703491 0.17600107192993164 -0.3889962434768677 0.9856216907501221 -0.579156756401062 0.5045604705810547 -0.8212713003158569 -0.9888656139373779 -0.9514374732971191 -0.9587780237197876 -0.9471371173858643 -0.4246289134025574 0.5169318318367004 -0.7888304591178894 0.8502781391143799 0.7514036297798157 0.9625710248947144 -0.9977539777755737 -0.8739041090011597 0.808371901512146 -0.17620022594928741 -0.11422956734895706 0.2519405484199524 0.385785847902298 -0.7592517733573914 -0.5514622926712036 0.24025246500968933 0.8076422214508057 0.14549152553081512 0.9522901177406311 -0.19110161066055298 -0.7646772265434265 -0.08039364218711853 -0.8806523084640503 -0.8517820239067078 -0.7897356748580933 -0.2881322503089905 0.9388256669044495 -0.5552338361740112 0.1058807373046875 0.7038215398788452 0.12496073544025421 0.761786699295044 0.9678831100463867 -0.2651195824146271 0.32169637084007263 0.8248852491378784 -0.3673244118690491 0.4645076394081116 -0.108807772397995 0.9113019704818726 0.4737950563430786 0.22759538888931274 0.6638040542602539 0.9752286076545715 -0.889775276184082 -0.7643176317214966 -0.9419150352478027 0.5560631155967712 0.7575507760047913 -0.7166141271591187 0.8185102939605713 -0.1439039260149002 0.7214473485946655 0.9771601557731628 -0.770378828048706 -0.9584628939628601 0.8976219296455383 -0.5084915161132812 -0.9862747192382812 0.7518492937088013 -0.5794402956962585 0.29771679639816284 0.4049144685268402 -0.21574266254901886 0.9352385997772217 0.04103691503405571 0.7523030638694763 -0.727803111076355 -0.5048920512199402 -0.3473367691040039 0.9864460229873657 -0.7545449733734131 -0.8649749755859375 0.34192317724227905 -0.9969120025634766 0.9969592690467834 0.958519697189331 -0.9616337418556213 0.6167271137237549 -0.712746262550354 0.8499452471733093 -0.20792098343372345 0.27501431107521057 -0.8428483009338379 0.5505547523498535 -0.8319393396377563 0.35804975032806396 0.4846612513065338 0.4000539183616638 -0.7557128667831421 -0.5886823534965515 -0.680801272392273 0.9840302467346191 -0.3764522969722748 -0.28730183839797974 -0.9098669290542603 -0.9835590720176697 0.056271813809871674 0.9999989867210388 -0.9854345321655273 -0.5380675196647644 0.19780682027339935 0.8241158723831177 -0.8101099133491516 0.5202009677886963 0.9823956489562988 0.08595316857099533 -0.45300960540771484 -0.6554263830184937 -0.9881077408790588 0.9987761378288269 0.6946462392807007 0.9803464412689209 0.9769893884658813 -0.06703174859285355 0.05774905905127525 0.7125744819641113 -0.4358384609222412 0.9580307602882385 -0.27399858832359314 -0.8354081511497498 0.8871289491653442 -0.23547537624835968 -0.5149954557418823 0.9796339869499207 -0.7783279418945312 0.99884432554245 0.23403172194957733 0.9902622699737549 0.985957682132721 0.6006615161895752 -0.7008138298988342 0.4461291432380676 -0.16990387439727783 0.660934329032898 0.26803892850875854 -0.1358318030834198 0.9995012283325195 0.1542963832616806 0.29843926429748535 0.7877322435379028 -0.9732252955436707 0.38655343651771545 -0.8877769708633423 0.0717533752322197 -0.28545132279396057 -0.5956921577453613 0.7021741271018982 -0.4102434515953064 0.9993207454681396 -0.8868755102157593 -0.4307684302330017 -0.19528384506702423 0.9830406308174133 0.6290225982666016 0.8554345369338989 -0.11391590535640717 -0.28695300221443176 -0.9267684817314148 0.9690308570861816 0.15801821649074554 -0.46650147438049316 0.5793250799179077 0.8655264377593994 0.828683614730835 0.6240943074226379 -0.6627134084701538 -0.9234969019889832 -0.9235276579856873 0.13826841115951538 0.7855452299118042 -0.14558355510234833 0.7064474821090698 0.42694491147994995 -0.8281552195549011 -0.9665374755859375 0.7777535915374756 -0.16691845655441284 0.08527275919914246 0.9240834712982178 0.9099249243736267 0.969174325466156 -0.15734827518463135 0.47287172079086304 -0.17381611466407776 0.9536782503128052 -0.9546228051185608 -0.9846850633621216 0.9986129403114319 0.6269676089286804 0.7031228542327881 0.999273419380188 0.7539530396461487 0.9241862297058105 0.045646823942661285 0.7673521637916565 0.21338139474391937 -0.5173183679580688 -0.9497919082641602 -0.7301309108734131 0.9759853482246399 -0.5990898013114929 0.6267902851104736 -0.10627495497465134 0.039520375430583954 -0.3870893716812134 0.9562870860099792 -0.9877520203590393 0.061572082340717316 0.2864767909049988 -0.12330148369073868 -0.8694430589675903 0.33326950669288635 -0.5260723829269409 -0.0242657121270895 -1.0 0.44580334424972534 -0.791846513748169 -0.9999284744262695 0.24456743896007538 -0.511303186416626 0.4661511480808258 0.6931791305541992 -0.9962255358695984 0.9895666241645813 0.1400449424982071 0.9548261761665344 0.6848708391189575 -0.9291096329689026 0.9985777735710144 -0.987131655216217 -0.6301409006118774 -0.9564224481582642 0.5386161804199219 0.3153403103351593 -0.2792358696460724 -0.9377968311309814 0.24639081954956055 0.07987359166145325 0.4932112395763397 -0.47367000579833984 0.5512610673904419 0.9982474446296692 -0.18226800858974457 -0.7380998134613037 0.6767478585243225 0.9490689039230347 -0.9449708461761475 0.689225971698761 -0.42754676938056946 -0.3619299530982971 0.9999995827674866 -0.9858531951904297 0.9526899456977844 0.003762193489819765 -0.658852219581604 0.9006423354148865 -0.05117965489625931 -0.8598503470420837 0.7279094457626343 -0.89795982837677 -0.6264833807945251 0.8318637013435364 0.49765655398368835 -0.9910193085670471 -0.4503428041934967 -0.9625239968299866 0.5545195937156677 -0.858494758605957 -0.9015396237373352 0.9999843239784241 0.3363568186759949 -0.9145820140838623 -0.993005633354187 0.4251539409160614 -0.8576251864433289 -0.4163610339164734 0.9919531941413879 -0.733979344367981 -0.8122689127922058 0.963901937007904 0.0832342654466629 -0.9752357602119446 0.9693867564201355 -0.9654273390769958 0.9885706901550293 0.526453971862793 0.42293426394462585 0.9710913896560669 -0.933231770992279 -0.8843258619308472 -0.36742693185806274 -0.7687548398971558 -0.9085416197776794 0.6236912608146667 0.9978145360946655 -0.9924187660217285 -0.6351922750473022 -0.0029837065376341343 -0.9476575255393982 -0.6142946481704712 0.9290221929550171 0.7875880599021912 0.621361494064331 0.5153423547744751 0.7650240063667297 0.8755903244018555 0.7709474563598633 -0.12595711648464203 0.995305597782135 -0.9226306676864624 -0.3353596329689026 0.10067184269428253 0.6796863079071045 -0.003958197310566902 0.4878568649291992 -0.33006420731544495 -0.008993148803710938 -0.9900591969490051 0.9330134987831116 -0.9938629269599915 -0.8222671747207642 0.8051787614822388 0.7275761961936951 -0.9676761627197266 -0.03127346560359001 -0.9851003885269165 0.4123259484767914 0.9513087868690491 0.9571298360824585 0.5373837947845459 -0.974649965763092 -0.17553170025348663 0.993395209312439 0.2840087115764618 0.3491339385509491 -0.771466851234436 -0.6247351169586182 0.0647655799984932 -0.13739976286888123 0.9997878670692444 -0.17455661296844482 0.9646780490875244 -0.16020846366882324 0.496258944272995 -0.6107081770896912 -0.9617633819580078 -0.8807752728462219 0.9506903290748596 -0.9758931398391724 -0.990232527256012 0.9080689549446106 0.21328942477703094 -0.8320362567901611 0.9090167284011841 0.814937174320221 -0.2769765555858612 -0.5802568793296814 -0.9103478789329529 -0.09045197069644928 -0.4916762411594391 0.8841406106948853 0.7102477550506592 -0.1548852026462555 -0.9962082505226135 -0.9466464519500732 -0.750174880027771 -0.8402763605117798 -0.7546045184135437 0.7186223864555359 0.29566189646720886 -0.7649415731430054 0.9840645790100098 -0.27388685941696167 0.2733015716075897 -0.9957600831985474 -0.3237324655056 0.40242430567741394 -0.9984753727912903 0.8913990259170532 -0.5332787036895752 -0.17912821471691132 -0.9325794577598572 0.6029490828514099 -0.8591446876525879 -0.13328735530376434 0.09732262045145035 -0.0018648892873898149 0.9999954104423523 0.9999271035194397 0.9346771240234375 0.9903432130813599 0.3521190285682678 0.31527018547058105 -0.6037003993988037 0.9221051931381226 -0.9834731817245483 -0.8095639944076538 0.9698671102523804 0.4678766429424286 -0.9316929578781128 0.5666738152503967 0.9972735643386841 -0.7353538274765015 -0.7431178092956543 0.49760040640830994 0.9467936754226685 -0.9142003655433655 0.6277437210083008 0.7287383079528809 -0.11814574152231216 -0.22651268541812897 0.4382725954055786 0.9916440844535828 0.35188642144203186 -0.19269314408302307 -0.811120867729187 -0.9230272173881531 -0.999381422996521 0.36997148394584656 0.9026758670806885 -0.6587700247764587 -0.5311373472213745 -0.9916341304779053 0.12936526536941528 0.7097424268722534 -0.7881748080253601 0.6634371280670166 -0.9294499754905701 0.07128726691007614 -0.9646162986755371 0.6684131622314453 0.9810345768928528 -0.4206107258796692 0.3172490894794464 -0.8532720804214478 -0.9014676809310913 0.9686335325241089 -0.7195867300033569 -0.6772724390029907 -0.1568593680858612 -0.595146656036377 0.8333296179771423 0.7292803525924683 0.9562914967536926 -0.4832039177417755 0.8453595042228699 0.8407657146453857 0.905857503414154 0.796329140663147 -0.9997722506523132 -0.8081425428390503 -0.211269348859787 0.3419640064239502 0.7964414358139038 -0.9986434578895569 0.3094451427459717 0.972170352935791 -0.7002102136611938 -0.7351416349411011 -0.015632502734661102 -0.23331069946289062 -0.9998897314071655 -0.9968687295913696 0.7878446578979492 0.8588045835494995 0.8434306383132935 -0.5086991786956787 0.9999831914901733 0.7946533560752869 -0.6831030249595642 0.28748956322669983 -0.18713440001010895 -0.5129774808883667 -0.985756516456604 0.47893375158309937 0.4575202763080597 -0.9995043277740479 -0.9336799383163452
-0.5424473285675049 -0.6542235612869263 0.8534773588180542 -0.8626450300216675 0.981153666973114 0.8918763399124146 -0.8385143876075745 -0.9734196662902832 0.044841669499874115 -0.5897011756896973 0.9052789211273193 -0.06045566871762276 -0.4221346974372864 -0.7234916687011719 -0.24933592975139618 -0.9716793894767761 0.9990417957305908 0.9397166967391968 -0.8250637054443359 -0.706929087638855 0.9886355996131897 -0.9936962723731995 0.9290345311164856 0.8283860683441162 0.2864586412906647 -0.9968518614768982 0.9966832399368286 -0.6894985437393188 0.6685117483139038 0.05799790471792221 0.9898056983947754 -0.9993687868118286 0.7202150821685791 0.11763262003660202 0.9676729440689087 -0.9007759690284729 0.784937858581543 -0.25221309065818787 -0.9643262624740601 -0.3606451749801636 0.9386203289031982 -0.4128645658493042 0.9977111220359802 0.9436334371566772 -0.5776652097702026 0.9898664355278015 0.03007814660668373 -0.9085773229598999 0.9977928400039673 -0.432509183883667 0.7517815828323364 -0.9811686873435974 0.9999988079071045 -0.9999832510948181 -0.9712976813316345 0.07132333517074585 -0.9065465927124023 -0.9890310764312744 -0.16426724195480347 -0.16325104236602783 0.41220393776893616 -0.9999935626983643 -0.4384971261024475 -0.2490760087966919 -0.8714478015899658 -0.9403195977210999 0.6777293682098389 0.987697184085846 -0.9805111289024353 0.9897050857543945 0.3764820992946625 -0.9444749355316162 0.8942221403121948 0.6810187697410583 0.6928499341011047 0.10390245169401169 0.8495879769325256 0.9999901652336121 0.14294657111167908 0.8186184167861938 0.9663469791412354 0.6030193567276001 -0.9869192242622375 -0.09748685359954834 0.9999995827674866 -0.7969289422035217 0.6494126319885254 -0.7322978973388672 -0.4109220802783966 -0.04881870374083519 -0.9823694229125977 -0.8629171848297119 0.24556207656860352 0.6939328908920288 0.9844695925712585 -0.9716308116912842 -0.9980396628379822 -0.7413840293884277 -0.9480299353599548 -0.9883617758750916 -0.3363858759403229 0.8795991539955139 0.8842755556106567 0.5079092383384705 -0.9970438480377197 -0.9875009655952454 -0.9195537567138672 0.984548807144165 0.6796532273292542 0.7748785018920898 -0.9831380844116211 -0.8109080791473389 -0.9901788830757141 0.949184000492096 -0.9066345691680908 -0.1350855529308319 -0.08083300292491913 -0.7097941637039185 -0.9987143874168396 0.9978277683258057 -0.6345762014389038 0.12592674791812897 -0.9703794717788696 0.46374017000198364 -0.6504827737808228 0.9912102818489075 0.8990898132324219 -0.3754090964794159 0.9875372052192688 0.39405959844589233 0.5924948453903198 0.5092290639877319 0.8582595586776733 0.7532297372817993 0.1276676058769226 0.12431703507900238 -0.9767467975616455 0.22530142962932587 0.9999839663505554 0.5234768390655518 -0.18344706296920776 0.9971884489059448 0.6943796277046204 0.8198920488357544 -0.7815725803375244 0.4107070565223694 -0.9617518782615662 0.9573325514793396 0.0012832016218453646 -0.9881818890571594 0.18516112864017487 0.999742865562439 0.9351254105567932 0.82779461145401 -0.37630611658096313 0.9999898076057434 0.6504104137420654 0.6919201612472534 0.995967447757721 -0.6190992593765259 -0.882090151309967 0.9919934272766113 0.9253573417663574 -0.6677879095077515 -0.9428871870040894 -0.9209138751029968 -0.9991527199745178 -0.9773187041282654 0.9583163857460022 0.4327608644962311 0.9908812642097473 -0.06885112822055817 -0.33207547664642334 -0.9820556640625 0.13549818098545074 -0.9895209670066833 -0.9867827296257019 -0.752505362033844 0.1957676112651825 -0.9999697208404541 0.8278456330299377 0.9993044137954712 0.9534973502159119 0.998548686504364 -0.7992485761642456 -0.8352560997009277 -0.9711016416549683 -0.06645087897777557 -0.793744683265686 0.8912412524223328 0.47688058018684387 -0.9618693590164185 0.9336537718772888 -0.3232685625553131 -0.13570265471935272 0.9899782538414001 0.9980865716934204 0.6381466388702393 0.7009053230285645 -0.9877298474311829 0.9308908581733704 -0.9021196365356445 0.7664312720298767 -0.4280371069908142 -0.9284768104553223 -0.7899312376976013 0.43690645694732666 0.8919738531112671 -0.8109422326087952 0.539269745349884 0.1999482661485672 -0.9998682737350464 -0.225492462515831 0.9927176237106323 0.9967712163925171 0.6371937990188599 -0.5582318305969238 -0.8376414775848389 0.937445878982544 -0.965948224067688 0.8187917470932007 0.19923511147499084 -0.19613593816757202 0.35233914852142334 -0.8396471738815308 -0.9877029061317444 0.2029401957988739 0.6119487285614014 0.9991239905357361 -0.9988061785697937 -0.8743462562561035 0.7167760133743286 0.9979044198989868 -0.8752163052558899 0.8466333150863647 -0.614558219909668 -0.6045899391174316 -0.9330644607543945 -0.6494351625442505 0.7548693418502808 0.2864763140678406 -0.9772359728813171 0.2953947186470032 0.9677399396896362 -0.894547700881958 0.2925786077976227 0.8498842120170593 0.9721589684486389 0.9118428826332092 0.5180705785751343 0.9821950197219849 -0.944776713848114 -0.989701509475708 0.9594348669052124 0.7260740995407104 0.6518310308456421 0.8675745129585266 -0.4311922788619995 0.9225252270698547 0.8904073238372803 0.6677221059799194 -0.41747888922691345 -0.8407541513442993 -0.10585100948810577 0.9434925317764282 0.883874773979187 -0.21932049095630646 0.7680319547653198 0.38906049728393555 -0.7108912467956543 0.7567257881164551 -0.9945929050445557 -0.4814539849758148 -0.006056502927094698 0.9998953342437744 -0.2521761655807495 0.7860371470451355 0.987524688243866 0.2590625286102295 -0.5231071710586548 0.5569134950637817 -0.7324727773666382 -0.976435661315918 -0.01846952736377716 -0.984243631362915 0.978855550289154 -0.981599748134613 0.9990885853767395 -0.9920715689659119 -0.19139452278614044 0.8313941955566406 0.8288268446922302 -0.950209379196167 -0.8746360540390015 0.9499374628067017 0.9815186858177185 -0.9005094170570374 -0.9971644878387451 0.18053676187992096 -0.7600315809249878 0.012917809188365936 -0.02663584239780903 0.6138715744018555 0.984285831451416 -0.853355884552002 -0.9796380400657654 0.9667790532112122 -0.27985328435897827 0.9924936294555664 0.9992493987083435 0.9951493144035339 -0.6883038878440857 -0.672966480255127 -0.9845194220542908 0.9305036067962646 -0.885358452796936 -0.8883954882621765 -0.08582047373056412 -0.678096354007721 -0.050563450902700424 0.873704731464386 0.6609541177749634 -0.8031955361366272 -0.3821437954902649 0.07158619910478592 -0.692893385887146 0.9509634375572205 0.9142793416976929 0.40391892194747925 0.96696537733078 0.994145929813385 -0.3113807737827301 -0.8932379484176636 -0.8852756023406982 -0.1462412029504776 0.12052392959594727 0.8138381242752075 0.8877543210983276 -0.6801784038543701 0.6051244735717773 0.8867040872573853 0.5615682601928711 -0.9867998361587524 0.7975692749023438 0.963912844657898 0.7648021578788757 -0.5873117446899414 -0.9527394771575928 -0.9058901071548462 -0.16729699075222015 -0.7255487442016602 -0.740958571434021 -0.8152764439582825 0.4062390625476837 0.9888427257537842 0.41501522064208984 0.9994173645973206 0.9155376553535461 0.9397020936012268 0.31138795614242554 0.9401882886886597 0.47634515166282654 -0.7059298157691956 -0.9074510335922241 0.46058639883995056 -0.5314998626708984 0.7035543918609619 0.8115494251251221 -0.5458451509475708 -0.848965048789978 -0.9909810423851013 -0.8991947770118713 -0.9076380729675293 -0.8286615610122681 0.6040023565292358 0.09391777962446213 0.7498325109481812 -0.7551029324531555 0.8477710485458374 0.9231322407722473 -0.9726585745811462 -0.37510108947753906 -0.8631353378295898 0.25354886054992676 0.20776180922985077 0.9317208528518677 0.9968391060829163 0.9971315860748291 0.9600093960762024 -0.8758602142333984 -0.8556288480758667 -0.8512424230575562 -0.9912359714508057 0.9430620074272156 -0.9077736735343933 0.2951391935348511 -0.9913200736045837 -0.9820615649223328 -0.7626196146011353 -0.9891430735588074 -0.9990029335021973 -0.9541650414466858 -0.8703165650367737 -0.5268619060516357 0.4880947768688202 -0.9983528852462769 0.13121464848518372 0.41237694025039673 -0.9694167971611023 0.952811062335968 0.5764601230621338 0.6955610513687134 0.8544780015945435 -0.4764275550842285 -0.7218429446220398 0.9988029599189758 0.8119656443595886 -0.9036626815795898 -0.9356501698493958 0.7569131255149841 0.14539329707622528 0.8713188767433167 -0.9174239039421082 -0.5246036648750305 -0.14936226606369019 -0.9994593262672424 -0.9634352922439575 0.9397545456886292 0.4222683310508728 -0.914982795715332 0.7418925762176514 -0.8609765768051147 -0.8559685945510864 -0.6046903133392334 -0.9823276996612549 0.08891797065734863 -0.5803908109664917 -0.1309165507555008 0.9992274045944214 0.7094466090202332 -0.6395182609558105 0.381255179643631 -0.00018115341663360596 -0.9966076016426086 0.011815698817372322 -0.40989258885383606 0.8989182114601135 0.3469913899898529 -0.9318786263465881 0.39856576919555664 -0.8581798672676086 -0.9863375425338745 -0.40126368403434753 0.2872498035430908 -0.064063660800457 0.9783943295478821 0.999654233455658 -0.933605432510376 0.7044805288314819 -0.9704009294509888 -0.9708255529403687 0.9981842041015625 0.3364351987838745 -0.9995886087417603 -0.3164173662662506 -0.7550404071807861 -0.9593132138252258 0.9691135883331299 0.9037312865257263 0.47822991013526917 0.9618768692016602 0.9966834187507629 0.6607629060745239 0.5400328636169434 -0.9999676942825317 -0.9980376362800598 0.915206253528595 -0.9935541749000549 0.948076605796814 -0.764202356338501 0.9644969701766968 0.6071227192878723 -0.886471152305603 -0.4379446506500244 -0.7231802344322205 -0.02021307311952114 0.6515175104141235 0.8717151284217834 -0.3603591322898865 -0.9972367286682129 -0.9979392886161804 0.9995270371437073 0.31402388215065 0.8641151189804077 -0.999968945980072 -0.9573761224746704 -0.7905573844909668 0.3401721715927124 0.49315500259399414 0.9184514284133911 0.7837188839912415 -0.5707123875617981 0.9395180344581604 0.25122880935668945 -0.9999958276748657 -0.3995135724544525 -0.05005773529410362 -0.9999604225158691 -0.5218110084533691 -0.8316022157669067 -0.5439488887786865 0.7090575695037842 0.8782322406768799 -0.9831904172897339 -0.8634610176086426 -0.34159156680107117 0.8470114469528198 -0.7912201285362244 -0.8577646613121033 0.8755990266799927 0.718772292137146 -0.5321105718612671 -0.21924203634262085 -0.7835743427276611 0.9904210567474365 0.9349312782287598 -0.9247592091560364 -0.99866783618927 0.9298154711723328 -0.2573598623275757 -0.2913314998149872 0.591780424118042 0.7436532378196716 0.7560627460479736 -0.48315566778182983 0.9390907883644104 -0.6521522402763367 0.8678378462791443 -0.784652590751648 -0.0889783576130867 0.9999903440475464 -0.7628026008605957 -0.913654625415802 -0.32906055450439453 -0.5271762013435364 0.9665658473968506 0.3087107241153717 0.16098082065582275 -0.6718098521232605 0.8286920189857483 0.3840508759021759 0.8855394721031189 0.3910716474056244 0.9956410527229309 -0.7937930226325989 -0.9645018577575684 -0.992195188999176 -0.6223686933517456 0.06361202895641327 0.9995920658111572 -0.44497114419937134 0.45615142583847046 0.9709680676460266 0.17930179834365845 -0.9982851147651672 -0.8285029530525208 -0.9044049978256226 0.38896554708480835 0.022345876321196556 0.9841583371162415 -0.8089691400527954 0.9641178846359253 -0.9893639087677002 0.43293407559394836 -0.36983832716941833 0.7599327564239502 0.17827436327934265 0.5112693309783936 0.4616501033306122 -0.13933713734149933 -0.46470949053764343 -0.9158459901809692 -0.9885824918746948 0.25206637382507324 0.9998376965522766 0.3227667212486267 0.5049262046813965 -0.7819505929946899 -0.2425856739282608 0.8566190600395203 -0.32919153571128845 0.6119629144668579 0.8429257273674011 0.9654734134674072 -0.9165400862693787 -0.9988515377044678 -0.7530518770217896 0.9307423830032349 0.19007648527622223 -0.5590183734893799 0.4569115936756134 -0.14681465923786163 0.42188918590545654 -0.8354926109313965 -0.9629023671150208 0.5856429934501648 0.7434726357460022 -0.9999719262123108 0.853630542755127 0.8746627569198608 -0.9977747797966003 0.2692681550979614 -0.7214502096176147 -0.8931028842926025 -0.37222665548324585 -0.7611544728279114 -0.4354725778102875 -0.999565064907074 0.15388308465480804 -0.9974947571754456 -0.8115214109420776 0.04334978386759758 -0.46112170815467834 0.6278285980224609 0.5426307916641235 0.7440416812896729 -0.03877870365977287 -0.6255345344543457 0.5504106879234314 -0.8158239722251892 0.8026912808418274 0.9484363794326782 -0.8130136728286743 0.7331814765930176 -0.3615966737270355 -0.09456291049718857 0.28533735871315 -0.4533485174179077 0.8268210291862488 -0.9424437880516052 0.8010592460632324 0.4269654452800751 0.3819871246814728 -0.9592167139053345 0.8045377135276794 0.9593328833580017 0.9094128012657166 0.9141890406608582 0.9979597330093384 -0.9557380080223083 0.9338038563728333 -0.6686311960220337 -0.5814545154571533 0.9999858736991882 0.8654943704605103 -0.995890736579895 -0.9171662926673889 0.9321022629737854 0.7576206922531128 0.9332111477851868 -0.9329850077629089 -0.5776526927947998 -0.0930856317281723 0.9596203565597534 -0.9544216990470886 0.9233924150466919 0.9745930433273315 -0.9781537652015686 -0.8615937232971191 0.9812334179878235 0.7891008853912354 -0.29503512382507324 -0.025857074186205864 -0.9997898936271667 -0.738528311252594 0.6291980743408203 -0.24945777654647827 0.9998964071273804 0.1628832221031189 0.43202245235443115 0.10475848615169525 -0.686420738697052 -0.838324785232544 0.9327747821807861 -0.7091851234436035 0.7968577146530151 0.19636107981204987 -0.8870015740394592 -0.9785339832305908 0.983383297920227 0.9532043933868408 -0.9598100781440735 -0.4488702118396759 0.3759850561618805 -0.6273351907730103 0.7383325695991516 0.8791539669036865 -0.7777729630470276 0.9165589809417725 0.9692395329475403 -0.5534887313842773 0.7911339998245239 -0.876264214515686 -0.6149973273277283 0.4754934310913086 0.99196857213974 -0.3579162359237671 -0.022177070379257202 0.9614899754524231 0.993582546710968 0.4723412096500397 0.9857503175735474 0.6522881984710693 0.7639652490615845 -0.4206160008907318 -0.9214658141136169 0.9185479283332825 0.9915074706077576 0.5672706365585327 0.09648718684911728 0.5660525560379028 0.955895721912384 0.5718620419502258 -0.992347240447998 -0.9440262317657471 0.9292446374893188 -0.9997057318687439 -0.9981149435043335 0.9735960364341736 -0.453189879655838 -0.7687191963195801 0.8159512877464294 0.19989491999149323 0.3632502853870392 0.964040994644165 -0.9868549108505249 -0.8423110246658325 0.9322851300239563 -0.8909116387367249 -0.998289167881012 -0.4371659755706787 -0.49588483572006226 -0.16004808247089386 0.7333648800849915 -0.9486174583435059 -0.7794433236122131 -0.9199823141098022 0.7357779741287231 -0.09466873109340668 0.9989833235740662 0.4336737394332886 0.6040970087051392 0.9857020378112793 -0.8314292430877686 -0.9605001211166382 0.7328076362609863 0.133247971534729 0.9999994039535522 0.9732485413551331 -0.8673349022865295 -0.7433487176895142 -0.7477996349334717 -0.9975274801254272 -0.9688952565193176 0.767198920249939 -0.16974106431007385 -0.9853267669677734 0.96354740858078
-0.7267916202545166 0.9517587423324585 -0.9656736850738525 0.9974058270454407 -0.5680054426193237 0.43421101570129395 0.7884993553161621 0.5779798030853271 -0.26718488335609436 0.24584119021892548 -0.4762629568576813 0.5418537259101868 0.010727370157837868 -0.6658957004547119 0.5062700510025024 -0.8730462789535522 0.8657907843589783 0.061575036495923996 -0.7555705904960632 -0.9411331415176392 -0.3343934118747711 0.9237785339355469 0.5302959680557251 0.33142662048339844 -0.9661083221435547 -0.7208627462387085 0.7705793380737305 0.6783236861228943 0.7880982160568237 -0.3675384819507599 -0.9727123975753784 -0.14636166393756866 -0.28254762291908264 -0.5185055732727051 0.8418774604797363 -0.4665498733520508 0.930784285068512 -0.5476490259170532 0.7644214630126953 -0.14136141538619995 -0.19465872645378113 -0.1096198558807373 -0.2568924129009247 0.06984829157590866 -0.8318607807159424 0.1287578046321869 0.9013899564743042 0.972512423992157 0.7340455055236816 -0.663719892501831 -0.5800732374191284 0.16412287950515747 0.9999993443489075 -0.9999963045120239 0.5470786094665527 0.07297173142433167 -0.40640291571617126 -0.9947819709777832 -0.4626381993293762 -0.7241005897521973 -0.27241435647010803 -0.9999899864196777 0.9991165399551392 0.6507875919342041 0.5728244781494141 -0.6869997382164001 -0.9089059829711914 0.1263178437948227 0.8305606842041016 -0.12884365022182465 -0.009459138847887516 -0.037907347083091736 0.8834304809570312 -0.9191545248031616 0.552681565284729 0.4683115780353546 -0.9327936172485352 0.9999999403953552 -0.03017827682197094 0.6954363584518433 -0.8949682116508484 0.51426762342453 -0.47465336322784424 -0.4587123692035675 0.9999985694885254 0.8298081755638123 0.869621217250824 -0.0371515117585659 -0.3656318485736847 -0.550335168838501 0.8035919666290283 0.07619907706975937 -0.026685893535614014 -0.939845621585846 0.49619144201278687 -0.32932063937187195 -0.14787134528160095 0.532059907913208 -0.634562075138092 -0.606809675693512 0.31349337100982666 0.8482951521873474 0.036515623331069946 0.7176769971847534 -0.5016316175460815 -0.8948096036911011 0.1932574212551117 -0.9854769110679626 -0.9332336187362671 0.4433598220348358 0.17003773152828217 -0.7125917673110962 0.02409401349723339 -0.4768587350845337 0.23473434150218964 -0.20505431294441223 -0.6234874725341797 0.6520618200302124 -0.988105297088623 -0.6124423742294312 -0.46134042739868164 0.08183438330888748 -0.4179959297180176 -0.024392837658524513 -0.9859306812286377 0.0987790897488594 -0.6915909647941589 0.6653308868408203 0.7869357466697693 -0.21207189559936523 -0.1334911286830902 0.6192314028739929 -0.1234501376748085 0.9961607456207275 0.961881697177887 -0.7605515718460083 0.6103672385215759 -0.6454657912254333 -0.7563059329986572 -0.5263611078262329 -0.947725236415863 0.9999879598617554 0.391597181558609 -0.01569305546581745 -0.43059948086738586 0.7800993323326111 0.9260629415512085 -0.6653827428817749 -0.6465016603469849 -0.9334741830825806 -0.41610783338546753 0.739091157913208 -0.8022575378417969 -0.5228196978569031 0.8786749839782715 0.9619042277336121 0.5039271116256714 -0.9958286285400391 -0.61705482006073 -0.6981970071792603 -0.8216084241867065 0.5241207480430603 -0.7704734802246094 0.4826367199420929 0.26678723096847534 -0.7874084711074829 -0.706109881401062 -0.0953519344329834 0.9670754075050354 0.42161935567855835 0.9923095703125 -0.2871939539909363 -0.7616084814071655 -0.9298102259635925 -0.9502874612808228 -0.7955902814865112 -0.8975285291671753 0.3671714663505554 -0.11035844683647156 -0.9999994039535522 0.934959352016449 0.7354542016983032 -0.574270486831665 0.03024655021727085 -0.5418407917022705 0.5041975975036621 0.002420973964035511 0.5745887756347656 -0.5431252717971802 -0.9555613398551941 0.7162744998931885 -0.3186134397983551 0.9837191104888916 0.9454010725021362 0.2457716315984726 0.774971604347229 0.041599661111831665 0.34456345438957214 -0.42886120080947876 -0.970212996006012 0.3534294664859772 0.6854578852653503 -0.788320779800415 -0.24786493182182312 0.2500362992286682 -0.5117312073707581 0.5951623320579529 -0.5357043743133545 -0.9639508724212646 0.4595314562320709 0.4690656363964081 -0.6071624159812927 -0.6396008729934692 0.37436529994010925 0.8589012026786804 0.6285848617553711 0.21967536211013794 -0.008045470342040062 -0.0030074960086494684 -0.2717561423778534 -0.8001586198806763 -0.46998167037963867 0.28979507088661194 -0.9677157402038574 0.06775692850351334 -0.6490768790245056 0.4335889518260956 -0.37940821051597595 -0.5197404623031616 0.40674808621406555 -0.2191610187292099 -0.2718626856803894 0.9021461009979248 -0.4965267777442932 -0.15776191651821136 -0.8053905963897705 0.274995356798172 0.39119288325309753 0.45110052824020386 0.19930703938007355 -0.854232668876648 -0.3795754015445709 -0.27314361929893494 0.12453537434339523 0.3696901500225067 0.7650217413902283 0.42392197251319885 -0.6654641628265381 0.6158542633056641 0.8988224267959595 0.8758817911148071 -0.3216926157474518 -0.794763445854187 0.7070927619934082 0.4511253237724304 -0.8597011566162109 0.8709322810173035 -0.32358941435813904 -0.692389965057373 -0.8419278860092163 0.6302063465118408 -0.23708879947662354 -0.5141392946243286 0.013262933120131493 -0.09399572014808655 -0.21053019165992737 -0.5931261777877808 0.8260257244110107 -0.9385488033294678 -0.6359248161315918 0.5147304534912109 -0.42692798376083374 -0.912426233291626 -0.4024588465690613 0.9190801978111267 -0.3432163894176483 -0.3978642225265503 -0.9897370338439941 0.011234913021326065 0.4871501922607422 -0.5847290754318237 0.6773648262023926 -0.7938096523284912 0.9240239262580872 -0.4771578013896942 -0.8117018938064575 -0.950771152973175 0.8719505071640015 -0.7223786115646362 0.2919679582118988 0.9810298085212708 0.14608009159564972 -0.6300666332244873 -0.2683422565460205 0.973477303981781 0.36424922943115234 0.9918904304504395 0.13886280357837677 0.9024729132652283 -0.5571651458740234 0.1486920416355133 0.3856969177722931 -0.742355227470398 -0.5163899064064026 0.8892852067947388 0.5234581232070923 0.8961799144744873 0.3615470826625824 0.9802389144897461 -0.7129800319671631 0.010615389794111252 -0.3593132495880127 0.21622774004936218 -0.9720564484596252 0.8620151281356812 -0.8096725940704346 0.748810887336731 0.8332391977310181 0.9623805284500122 0.2570037543773651 0.5547186136245728 0.9142141342163086 0.43317192792892456 -0.5287197828292847 -0.12373623251914978 -0.7958809733390808 0.866494357585907 0.9089963436126709 0.516231894493103 -0.13665007054805756 0.1650133728981018 0.8751146793365479 0.2087269276380539 0.2744905352592468 0.07874505966901779 -0.9321214556694031 0.9222212433815002 0.8756757378578186 0.3383117616176605 -0.32513776421546936 0.9954643249511719 -0.5821652412414551 0.9964534044265747 -0.9410219788551331 0.23529045283794403 -0.5407971143722534 0.3733474016189575 -0.6470935344696045 0.5961669683456421 -0.12390553951263428 0.12851734459400177 0.017995424568653107 -0.8281404376029968 0.4505579471588135 0.8653284311294556 0.4510369300842285 0.644685685634613 0.5056955218315125 0.3341670632362366 -0.7531179189682007 -0.973880410194397 -0.24134472012519836 0.15448719263076782 -0.979390025138855 0.9851616024971008 0.827644944190979 -0.8051934838294983 -0.598249077796936 0.456396222114563 0.3864385783672333 -0.47316819429397583 0.49540984630584717 0.22758233547210693 0.6321887969970703 0.5068685412406921 0.09754309058189392 0.12648774683475494 0.6487116813659668 0.736301064491272 0.5013072490692139 -0.5677312612533569 0.8197712898254395 0.4134218692779541 -0.5275448560714722 0.8543081879615784 -0.8025511503219604 -0.3796429932117462 0.9997522234916687 0.9312594532966614 -0.8531396389007568 -0.7155462503433228 0.47625041007995605 0.9318913221359253 0.6303098797798157 -0.9723155498504639 -0.3197614848613739 -0.2583957612514496 -0.9876193404197693 -0.9242255091667175 0.9700198173522949 0.5199466943740845 0.95709627866745 -0.30374589562416077 -0.5190407037734985 0.9639009833335876 -0.9732816219329834 -0.769793689250946 0.9434778094291687 -0.8851994276046753 -0.1573624163866043 -0.20646467804908752 -0.019809674471616745 0.04094990715384483 -0.8843016028404236 0.13014449179172516 0.9994223117828369 -0.8017675280570984 0.1572437584400177 0.9459110498428345 0.2722238600254059 0.1523129791021347 -0.7894805073738098 -0.8585131764411926 0.987256646156311 -0.8993797302246094 -0.8288474082946777 -0.6581373810768127 0.7511052489280701 0.7902730703353882 -0.2648125886917114 0.8416327834129333 -0.09577126055955887 -0.9946786761283875 0.9241882562637329 0.16708916425704956 0.6988922357559204 -0.16110782325267792 0.8701155781745911 -0.6829743385314941 -0.1160978376865387 0.12156409025192261 -0.6291344165802002 -0.41590529680252075 -0.9029307961463928 -0.9061862230300903 -0.6367998123168945 -0.8611434102058411 -0.5132834911346436 0.7328901290893555 -0.6440154314041138 0.2108113169670105 -0.3596849739551544 0.3753594160079956 0.7616169452667236 -0.2545155882835388 0.8643957376480103 -0.010432088747620583 -0.02207718789577484 0.6510047912597656 -0.35952433943748474 -0.9400728344917297 0.935956597328186 0.21756473183631897 -0.993541955947876 0.5385186672210693 0.10963831096887589 -0.8717231750488281 0.6408601999282837 0.7912675738334656 -0.024669140577316284 -0.8870426416397095 0.4490838944911957 -0.9379115104675293 -0.7018114328384399 -0.9019915461540222 -0.9376693964004517 0.9385101795196533 0.34029826521873474 0.31635427474975586 -0.18019509315490723 0.9571493268013 0.18434980511665344 0.2860875427722931 0.95599764585495 0.21525630354881287 -0.18705366551876068 0.7650429606437683 -0.0017348570981994271 -0.026276137679815292 -0.1804942488670349 -0.9683849811553955 0.7356042861938477 0.15337707102298737 0.033338747918605804 0.9478790163993835 0.09039559960365295 -0.9979711771011353 -0.6216408014297485 -0.056985851377248764 0.5052269697189331 -0.3563421666622162 -0.6293191313743591 -0.06831976026296616 -0.07631821930408478 -0.9999975562095642 0.9122409224510193 0.45653674006462097 -0.9983619451522827 0.1984664350748062 -0.7554943561553955 0.00018222644575871527 0.9956564903259277 -0.5499650239944458 -0.6146572828292847 -0.3820923864841461 -0.5173435211181641 0.7761579751968384 0.5530175566673279 0.7061769962310791 0.9684311151504517 0.4199288785457611 -0.023344432935118675 0.6253619194030762 -0.2066635638475418 0.8428283929824829 0.872810959815979 0.2340478003025055 -0.27663442492485046 0.8246588110923767 -0.6214039325714111 0.7364145517349243 0.05339054390788078 -0.19769488275051117 -0.5904501676559448 0.7778137922286987 0.9491318464279175 -0.947740912437439 0.6009149551391602 -0.9855785369873047 -0.5016777515411377 0.9999995231628418 -0.029406661167740822 -0.7818728089332581 -0.5678502321243286 -0.9448407888412476 0.765792965888977 -0.08166684210300446 -0.17824389040470123 0.4747496545314789 -0.2981230914592743 -0.14391663670539856 -0.4008742868900299 0.954817533493042 -0.9996734857559204 -0.7139458656311035 -0.7867069840431213 -0.22028084099292755 -0.07530190050601959 0.8582659959793091 0.9999788999557495 0.3477615714073181 -0.6191858053207397 0.9062501192092896 0.5362228155136108 -0.9737657904624939 -0.8902416229248047 0.9755667448043823 0.795047402381897 -0.7321468591690063 0.5208030939102173 0.04364980012178421 0.7321678400039673 -0.00041643527220003307 0.5219171643257141 0.9048759937286377 0.3543062210083008 -0.5942806005477905 0.50433748960495 0.7526752948760986 -0.525005578994751 -0.005120929330587387 -0.9986710548400879 0.504142701625824 -0.8857471942901611 0.9954526424407959 -0.840526819229126 0.6315229535102844 -0.870103657245636 0.0524461604654789 -0.21768566966056824 0.7106375098228455 0.22233135998249054 -0.09389934688806534 0.5620757341384888 -0.39720702171325684 -0.7649492025375366 -0.6379207372665405 0.9896281957626343 0.8250276446342468 0.15696430206298828 -0.9731034636497498 0.2493896186351776 0.42564597725868225 0.2412462681531906 -0.4715852737426758 0.45027586817741394 0.544299840927124 -0.9999675154685974 -0.2726784944534302 -0.7888737320899963 0.453116238117218 0.8630055785179138 0.07268960028886795 -0.2837595045566559 -0.1696508824825287 0.29060810804367065 -0.3094518184661865 -0.10396456718444824 0.8518422842025757 -0.3899264633655548 -0.6689444184303284 0.27999669313430786 0.03534775972366333 0.7496001720428467 0.930418074131012 -0.8237346410751343 -0.4400685727596283 0.7021187543869019 0.09562774747610092 0.7420130372047424 0.9136468172073364 -0.536846399307251 0.696271538734436 -0.30572831630706787 0.36549490690231323 0.21109206974506378 -0.5162267684936523 0.9083933234214783 -0.28180789947509766 -0.9962428212165833 -0.3897465765476227 0.3075682520866394 0.3946693539619446 -0.5264585018157959 -0.11666954308748245 -0.790557861328125 -0.8084743022918701 -0.04398343712091446 -0.8738278150558472 -0.9706835746765137 -0.708341121673584 0.4042271077632904 -0.7995944619178772 -0.4485807418823242 -0.7960805892944336 0.12881998717784882 -0.9332721829414368 -0.23203083872795105 -0.4235963821411133 0.9007571339607239 0.8062885999679565 -0.7879893779754639 0.1678204983472824 0.995475709438324 -0.5682202577590942 -0.7699863314628601 0.7310534119606018 0.9144854545593262 0.8952412605285645 0.9569882154464722 0.689923882484436 0.34520572423934937 0.9316376447677612 -0.8075591921806335 0.35772451758384705 -0.8126907348632812 -0.2406502366065979 0.9999967813491821 0.999992847442627 0.25707653164863586 -0.7092907428741455 0.8900690078735352 0.6728895306587219 -0.5017980337142944 0.9996931552886963 -0.7524490356445312 -0.9857315421104431 0.9078391194343567 -0.39326372742652893 0.6022437810897827 -0.6926673054695129 -0.5179498195648193 -0.1603633016347885 0.88604736328125 0.30617621541023254 0.7188043594360352 0.6362261772155762 0.20435205101966858 0.5695759654045105 -0.5179659128189087 0.6370044350624084 0.10020680725574493 -0.45426952838897705 -0.30801138281822205 -0.35401585698127747 0.8420365452766418 -0.6580020189285278 -0.9782149195671082 0.888623833656311 -0.17281733453273773 0.8304152488708496 0.2702934741973877 -0.957091212272644 -0.8565883040428162 -0.6884855628013611 -0.9024695754051208 0.5970045328140259 0.7404979467391968 0.5154614448547363 -0.93409663438797 0.10358813405036926 -0.3687366247177124 -0.8055002093315125 -0.7524464130401611 0.2210594266653061 -0.38541194796562195 -0.3698371350765228 0.6299501061439514 -0.6869086027145386 -0.4702903926372528 -0.4191085994243622 0.9057010412216187 -0.607915997505188 -0.036469489336013794 -0.27041947841644287 0.3512585461139679 -0.49292874336242676 -0.22840671241283417 0.04365191236138344 -0.9999017119407654 0.38923022150993347 -0.3467789888381958 0.5747507810592651 0.788312554359436 0.44300952553749084 0.8981963992118835 0.9795095324516296 -0.11671499907970428 0.2651389241218567 0.41584068536758423 0.9303041696548462 -0.2807850241661072 0.9072498083114624 -0.9851317405700684 -0.9749690890312195 0.7063748836517334 -0.3555915951728821 0.9999745488166809 0.03417248651385307 0.6930094957351685 -0.23265019059181213 0.5357322096824646 -0.04481179639697075 -0.6367021799087524 -0.7651946544647217 0.46259042620658875 -0.9989505410194397 -0.4908497631549835
0.7807494401931763 0.7809333801269531 0.7848272323608398 -0.6071844100952148 -0.9883114099502563 0.8268844485282898 0.9800326228141785 -0.6646305918693542 -0.9632079601287842 0.45085129141807556 0.9945095181465149 0.7101765275001526 -0.6694371700286865 0.3264409303665161 -0.019075851887464523 -0.755631148815155 0.9992819428443909 0.5331296920776367 0.4606398046016693 0.1787686049938202 -0.9999513030052185 0.9793780446052551 0.4728037416934967 0.7673900127410889 0.8933066725730896 -0.6747622489929199 -0.42694059014320374 0.49756544828414917 0.7433912754058838 -0.4173305630683899 0.9662639498710632 -0.6886231303215027 0.9064551591873169 -0.1952122002840042 -0.969974160194397 0.0016672745114192367 -0.6520066261291504 -0.7829684019088745 0.9516867995262146 0.17054983973503113 -0.8992577195167542 -0.08344247937202454 -0.8964132070541382 0.8992326259613037 0.7776856422424316 -0.04225875437259674 -0.9397880434989929 -0.44174742698669434 0.1866929978132248 -0.8951548337936401 -0.8358204364776611 -0.3841249644756317 0.9999993443489075 -0.9999621510505676 -0.339434415102005 -0.3704259991645813 -0.34867173433303833 -0.5526336431503296 -0.6394872665405273 -0.21825645864009857 0.7334787845611572 -0.9999725222587585 0.9999985098838806 -0.8639466762542725 -0.6595171093940735 0.9660065770149231 -0.7975364327430725 0.778401255607605 0.967434823513031 0.8165250420570374 0.7224107980728149 -0.16651038825511932 0.9506092071533203 -0.12815015017986298 -0.06003856658935547 -0.966255247592926 -0.969335675239563 0.9999991059303284 0.7526621222496033 0.6881328225135803 -0.45171067118644714 -0.9996581673622131 0.8796321749687195 -0.7747440934181213 0.9999997019767761 -0.9867618680000305 -0.8368546962738037 -0.2351432740688324 -0.14764095842838287 -0.9661152958869934 0.9907602667808533 -0.8117325305938721 -0.5769038796424866 0.47021013498306274 -0.30499738454818726 -0.5162541270256042 0.934349000453949 0.945207417011261 0.8880209922790527 0.9502005577087402 0.4290374219417572 -0.9520120620727539 -0.8751878142356873 0.2586744427680969 -0.9876563549041748 -0.8449591398239136 0.12614589929580688 0.9900839328765869 0.4181566834449768 0.7852931022644043 0.7283384799957275 0.8561723232269287 0.5834828019142151 0.9020965695381165 -0.8011603951454163 -0.9921838045120239 0.2838417589664459 -0.4890616536140442 -0.30518990755081177 0.9764720797538757 0.2376204878091812 0.8650728464126587 0.9562292695045471 0.7760542035102844 -0.8016288876533508 -0.7407200336456299 -0.5609173774719238 -0.5420851111412048 0.9367965459823608 -0.5363600254058838 0.3522815704345703 -0.7679665684700012 0.9963011741638184 0.9064530730247498 -0.9661626219749451 -0.26031309366226196 -0.5235435962677002 -0.3306257724761963 -0.9344596862792969 -0.4643157720565796 0.887833297252655 0.9999988079071045 0.25621503591537476 -0.922317624092102 -0.5313804149627686 0.5216445326805115 0.4025707244873047 -0.09623442590236664 -0.6337611675262451 0.9987528324127197 -0.2243904173374176 0.9999975562095642 0.8795535564422607 0.3420840799808502 0.22333137691020966 0.9967681765556335 -0.022946672514081 0.5385688543319702 -0.920645534992218 -0.9167963266372681 -0.07202639430761337 -0.9927797913551331 0.9571864604949951 -0.003817889839410782 0.1848444640636444 -0.8039121031761169 0.29607370495796204 -0.9771929979324341 0.6506907939910889 0.07309292256832123 0.9862523078918457 0.3934731185436249 0.716391384601593 -0.9432490468025208 -0.9997708201408386 -0.2935563027858734 0.9669514894485474 -0.6592175960540771 -0.12127827852964401 -0.9999999403953552 -0.5835727453231812 0.7432948350906372 -0.16586649417877197 -0.6583302021026611 -0.5632277727127075 0.23081834614276886 -0.861813485622406 0.47825273871421814 0.9302090406417847 -0.8058942556381226 -0.8178507089614868 -0.9341305494308472 0.6797914505004883 0.3928544223308563 0.33617669343948364 -0.4745149314403534 0.8745993375778198 0.9932746887207031 -0.9466976523399353 0.8075168132781982 0.9505585432052612 -0.01120378915220499 -0.3018272817134857 0.8425800800323486 0.33145278692245483 -0.23329590260982513 0.4371531009674072 0.8366189002990723 0.6558314561843872 -0.9839299917221069 0.13808085024356842 -0.7952492237091064 -0.7875534296035767 -0.6156893372535706 -0.6254180073738098 0.5739796757698059 0.6992193460464478 -0.6954901814460754 -0.5498892068862915 0.9202524423599243 0.40436437726020813 0.8577019572257996 -0.8301336169242859 0.6136289834976196 -0.2054271399974823 0.9954891800880432 -0.8496402502059937 -0.12289099395275116 -0.5206649303436279 0.8148974180221558 -0.41090989112854004 0.36459431052207947 -0.9808284640312195 0.866303563117981 -0.9996837377548218 0.42869532108306885 -0.20188100636005402 -0.978351891040802 -0.8250664472579956 -0.6117265224456787 0.031815290451049805 -0.3263826072216034 -0.21450582146644592 0.9953648447990417 0.5090211033821106 0.9572229385375977 0.9870596528053284 0.8277152180671692 -0.7373942136764526 -0.1054060086607933 0.6975263357162476 0.7935209274291992 0.8710335493087769 -0.15379397571086884 0.4305143356323242 -0.6640900373458862 0.9914087057113647 0.7962601780891418 -0.5966415405273438 0.5287249088287354 0.57735276222229 -0.8427956104278564 -0.9553767442703247 -0.3013526201248169 0.313721239566803 -0.9943503737449646 0.8675937652587891 0.7692195177078247 0.7985422015190125 0.5214242339134216 -0.3139992654323578 -0.19937412440776825 -0.9536923766136169 0.9999545812606812 0.9980261921882629 0.16550491750240326 0.9950502514839172 0.9242883324623108 0.8044170141220093 -0.784490704536438 -0.5459022521972656 0.8158356547355652 0.7221397161483765 -0.8361477851867676 -0.9036083221435547 -0.8154321908950806 -0.6089887619018555 -0.804524838924408 -0.9272236824035645 0.09715887159109116 -0.9640277028083801 -0.9920601844787598 -0.3315610885620117 -0.5246645212173462 -0.5789972543716431 -0.7105439901351929 0.5615383386611938 0.8950800895690918 0.4626496732234955 -0.9615793228149414 0.2720894515514374 0.8862379193305969 -0.7552317380905151 0.7227339744567871 0.7446775436401367 -0.10721531510353088 -0.5084713697433472 -0.8002328276634216 0.8555522561073303 0.3305860161781311 -0.9826071858406067 -0.9575358629226685 -0.3885861337184906 -0.11805698275566101 0.013333975337445736 0.9612985253334045 -0.9683430194854736 -0.6023399829864502 0.323371946811676 0.7261687517166138 0.9023948311805725 0.579271674156189 -0.7082117199897766 0.9178897738456726 0.5497199296951294 -0.7558343410491943 0.0799226462841034 0.1253412812948227 -0.5954004526138306 0.7726684212684631 0.28015586733818054 -0.9242373704910278 -0.8954138159751892 -0.6333010792732239 -0.4886329174041748 -0.32730427384376526 -0.5145993828773499 -0.17884811758995056 -0.8756799697875977 0.7790016531944275 0.922269880771637 0.9890215992927551 0.7620134949684143 -0.11370346695184708 -0.7115720510482788 -0.9437072277069092 -0.8585702776908875 0.09446628391742706 0.17423632740974426 0.5724895596504211 -0.29230982065200806 -0.8764363527297974 0.737256646156311 0.9745979309082031 0.7766638994216919 0.5740008354187012 0.9652376174926758 0.5956985950469971 0.9104105234146118 -0.9846939444541931 -0.9874125123023987 0.7674770355224609 -0.6278111934661865 0.42978763580322266 -0.36748048663139343 0.98074871301651 0.6371504068374634 0.8660079836845398 0.7330803871154785 0.6282758712768555 -0.06011238694190979 0.9590619206428528 0.4981462061405182 -0.3050402104854584 -0.7222942113876343 0.9255295991897583 0.1922173649072647 -0.5685547590255737 -0.3513019382953644 -0.9245710968971252 -0.94679856300354 0.873347282409668 -0.8485638499259949 0.4540597200393677 -0.6172357797622681 -0.9680294990539551 0.8621605038642883 0.9999852180480957 0.12103135883808136 0.9861083626747131 0.9952923059463501 -0.09018319100141525 -0.9212624430656433 0.9467319846153259 -0.5535407066345215 0.8134970664978027 -0.8934110403060913 -0.9908460974693298 0.8138691186904907 0.7816593647003174 0.09782538563013077 0.9482041597366333 -0.7071301341056824 -0.2583058178424835 -0.060822367668151855 0.8507344722747803 -0.2532612681388855 0.40220221877098083 -0.7167752981185913 -0.9328767657279968 0.6748438477516174 0.15437695384025574 0.9795252084732056 0.9849043488502502 -0.8769189715385437 0.9963489174842834 0.9874557852745056 0.3407696783542633 0.9932191371917725 -0.4800136983394623 -0.45757025480270386 -0.9694917798042297 -0.8413001894950867 0.3705117702484131 -0.5680527091026306 -0.699642539024353 0.9987561106681824 0.7245868444442749 0.920759379863739 -0.38453343510627747 -0.7207469940185547 0.1465045064687729 -0.8422682285308838 0.1658090502023697 -0.8423430919647217 -0.019439665600657463 0.6161633729934692 -0.9638157486915588 0.9999524354934692 -0.9666372537612915 -0.48981815576553345 -0.7546089887619019 -0.47433173656463623 0.7836153507232666 0.9849879741668701 -0.6780228614807129 -0.8985952734947205 -0.0930199846625328 0.9839240908622742 0.9510113000869751 -0.03296511992812157 0.14083150029182434 -0.2718427777290344 0.9629216194152832 0.9446461796760559 0.9679654240608215 -0.9049450159072876 0.2435135543346405 0.14887656271457672 0.8165917992591858 -0.9779908061027527 0.035376179963350296 -0.018890276551246643 -0.5795116424560547 -0.8651928305625916 -0.5491433143615723 0.22335445880889893 -0.04062357172369957 0.9534841775894165 0.6171145439147949 0.9901906847953796 -0.42080482840538025 -0.0005313511355780065 -0.7834076881408691 0.8666591644287109 -0.29012754559516907 0.9893029928207397 0.9147478938102722 -0.011305534280836582 0.4919678866863251 0.9606536626815796 0.9677324295043945 0.5133556127548218 0.9975011348724365 -0.16520299017429352 0.9329929947853088 -0.8032835721969604 0.2559090852737427 0.40325304865837097 0.9861091375350952 -0.6525470018386841 0.720916748046875 -0.33647072315216064 -0.4946942627429962 -0.8048444986343384 -0.9644500017166138 -0.25505921244621277 -0.9447284936904907 -0.4662947654724121 -0.9479032754898071 -0.7960237264633179 0.7716381549835205 0.5662903785705566 0.4069715738296509 -0.9999995827674866 -0.7058547735214233 -0.9328874349594116 -0.9999957084655762 -0.3920696973800659 -0.1750658005475998 -0.8787025213241577 0.9841864109039307 -0.3443329632282257 0.9253692626953125 0.33349621295928955 0.7580519318580627 0.7387676239013672 -0.9037484526634216 0.7133350372314453 -0.9846031069755554 -0.9238222241401672 -0.7252442240715027 0.8105571269989014 0.9052258133888245 -0.6414041519165039 0.3428402543067932 0.7736756801605225 -0.9720767736434937 0.8311645984649658 0.6599589586257935 0.21182158589363098 -0.8761662244796753 0.7617878913879395 0.8478502035140991 -0.23552869260311127 0.38555842638015747 0.5474547147750854 0.8698747754096985 -0.8125086426734924 -0.742965579032898 0.9999986886978149 -0.4047011137008667 0.9943925142288208 -0.14337214827537537 -0.7257510423660278 0.7257179617881775 0.9007732272148132 0.3550495207309723 0.8157641887664795 -0.9486923217773438 -0.7450476884841919 0.11530313640832901 -0.9361270666122437 -0.987384557723999 -0.6603449583053589 0.029994778335094452 -0.9882314205169678 0.9712017178535461 0.11234806478023529 0.999961793422699 -0.3808753788471222 0.8785080313682556 -0.930540919303894 0.1798119992017746 -0.011514483019709587 -0.36315158009529114 0.7162957787513733 0.8844559788703918 -0.8435156345367432 0.9928496479988098 0.2674400210380554 -0.9267610311508179 -0.8102846145629883 -0.6851086616516113 0.8827716112136841 0.5388311147689819 0.9174622297286987 0.9546645879745483 0.8786671757698059 -0.8033220767974854 -0.7793790698051453 -0.7825308442115784 0.11747173219919205 0.3636033535003662 0.9991230368614197 -0.46437519788742065 0.8610716462135315 -0.9436357617378235 -0.9886301755905151 -0.8246070146560669 0.9663519859313965 0.5658964514732361 0.8026878237724304 -0.4018450081348419 -0.12987340986728668 0.4789738059043884 0.27306467294692993 -0.2718333303928375 0.010732677765190601 -0.9101426601409912 0.7948753833770752 -0.22188718616962433 -0.3962440490722656 0.9644924998283386 -0.6612288951873779 -0.2566451132297516 0.5217751264572144 -0.9999096393585205 0.9918709397315979 -0.0968332439661026 -0.3178311288356781 0.7880996465682983 0.9652309417724609 -0.9270269870758057 0.3050156235694885 0.9221760034561157 0.012495370581746101 -0.6968399882316589 0.9058359861373901 -0.8368300199508667 0.8125592470169067 -0.8332525491714478 0.9595077037811279 -0.9645699858665466 0.1328885853290558 -0.366748571395874 -0.9993916153907776 -0.12308798730373383 0.6657741665840149 0.37586450576782227 0.9947840571403503 -0.7076055407524109 0.17749017477035522 0.9742070436477661 -0.6709648370742798 -0.9339616894721985 -0.6475658416748047 0.9600319266319275 -0.1799367517232895 -0.999325692653656 -0.827728271484375 -0.5889943242073059 -0.8771181106567383 -0.8483433723449707 0.15153053402900696 0.028920846059918404 -0.9231660962104797 -0.09088404476642609 -0.2142404317855835 -0.9942392110824585 0.39192792773246765 -0.7513214349746704 0.5389680862426758 -0.634513258934021 0.7760187983512878 -0.8624922037124634 -0.670456051826477 0.07246758788824081 -0.9022791981697083 0.7337125539779663 0.44884374737739563 -0.721455991268158 0.9012424349784851 0.9762679934501648 -0.9201392531394958 0.8586233854293823 -0.5041422843933105 -0.9836832880973816 0.4736953377723694 -0.6298403739929199 -0.2916460633277893 0.27372533082962036 0.3551899492740631 -0.13247227668762207 -0.6059690713882446 -0.7467349767684937 -0.9210512638092041 0.9999974966049194 0.9997711181640625 0.44099268317222595 0.863728940486908 0.5954385995864868 0.27172377705574036 -0.3895365595817566 0.9952741861343384 -0.9016956686973572 -0.12610238790512085 0.943328857421875 -0.8442933559417725 0.9396188259124756 0.3722359836101532 0.5049495697021484 -0.9877894520759583 -0.2335072159767151 -0.2580098807811737 -0.4923427700996399 -0.8311837315559387 0.32886168360710144 -0.9017153978347778 0.4470977187156677 0.6842589378356934 -0.42923682928085327 0.8410854339599609 0.16204655170440674 -0.7616226673126221 0.8115532398223877 0.9862496852874756 -0.9860538840293884 -0.8652103543281555 0.9996868371963501 0.8166418075561523 0.8826515674591064 -0.7662855386734009 -0.8337327241897583 0.6976772546768188 -0.7739385366439819 -0.019563285633921623 -0.9601554870605469 -0.045341912657022476 -0.9640174508094788 -0.8368509411811829 0.9642446637153625 0.7508050203323364 0.9199525117874146 -0.9937981963157654 -0.7127553224563599 -0.43917879462242126 -0.9752092957496643 0.8486047983169556 0.2082393318414688 -0.6387321949005127 0.04577837139368057 -0.16804274916648865 0.9421594142913818 -0.8966966271400452 0.9817973971366882 -0.9882621765136719 0.5570027828216553 0.4841572046279907 -0.9994178414344788 -0.8764650821685791 0.598757266998291 -0.3358069062232971 -0.5725694894790649 -0.9938691854476929 -0.3252944350242615 -0.9682091474533081 0.7310398817062378 -0.9965400695800781 0.9825366735458374 0.8817487359046936 -0.9987205266952515 0.6235256195068359 0.7938828468322754 -0.876288652420044 0.526809811592102 -0.8088257312774658 0.9999255537986755 0.9998545050621033 -0.9784322381019592 0.6106517314910889 -0.36646729707717896 0.5337097644805908 -0.37442547082901 0.21689358353614807 -0.6329466104507446 -0.9999113082885742 -0.8756048679351807
-0.2502939701080322 -0.9634893536567688 -0.9632988572120667 0.5902067422866821 0.15801580250263214 0.9907017946243286 0.7667999267578125 0.02879064716398716 0.998364269733429 -0.9803722500801086 -0.9371600151062012 -0.6785478591918945 0.5527103543281555 0.8156415224075317 -0.277105450630188 -0.9629749655723572 0.930778443813324 -0.09116171300411224 -0.33187198638916016 0.7712274193763733 0.9692374467849731 -0.0859764814376831 0.9199544191360474 -0.36025986075401306 -0.9000489711761475 -0.6903029680252075 -0.7793629169464111 0.1420748084783554 -0.8041236996650696 0.6781104803085327 -0.8291195631027222 -0.6699893474578857 0.9031834602355957 0.04301099851727486 0.1910187155008316 -0.6386851072311401 -0.8189338445663452 -0.345770925283432 -0.46651649475097656 -0.555850088596344 0.9404999017715454 -0.8447884917259216 -0.858500599861145 0.5171996355056763 -0.6904634833335876 0.25662627816200256 -0.9776427745819092 0.7853582501411438 0.9828592538833618 -0.3148001730442047 0.22342528402805328 -0.9310557842254639 1.0 -0.999991238117218 -0.9518335461616516 -0.8629559874534607 -0.9480292797088623 -0.7256567478179932 -0.8752232193946838 -0.21854880452156067 -0.5261622071266174 -0.9999917149543762 0.9998739957809448 0.17103618383407593 -0.28120407462120056 -0.9053770899772644 -0.7886658906936646 0.8227619528770447 -0.7847462892532349 0.6483637690544128 -0.8992343544960022 -0.8085388541221619 0.6527299880981445 0.8608200550079346 -0.8927769064903259 -0.997256338596344 0.9704058766365051 0.9999999403953552 -0.8895114064216614 -0.6098159551620483 0.9289858937263489 -0.9960821270942688 -0.6886312961578369 -0.7463172674179077 0.9999998807907104 0.9636603593826294 0.1040673479437828 -0.8758856058120728 -0.7556024789810181 -0.036974240094423294 -0.8757975697517395 -0.7942827343940735 -0.07007627934217453 -0.35238343477249146 0.9524179100990295 -0.7825778722763062 -0.994912326335907 -0.9561590552330017 0.49477478861808777 -0.9979046583175659 -0.7003147602081299 0.8581991791725159 -0.9944299459457397 0.7271066904067993 -0.17722156643867493 -0.9899269342422485 -0.20352177321910858 -0.9760158658027649 0.26194095611572266 -0.7994361519813538 -0.25438499450683594 -0.7757953405380249 -0.6238867044448853 0.0003211498260498047 0.2194092571735382 -0.28752589225769043 0.12427463382482529 0.9956806302070618 -0.9534074664115906 0.9874403476715088 0.8600199222564697 0.9983980655670166 -0.9395011067390442 0.06694522500038147 -0.6976804733276367 0.9819597005844116 0.20153126120567322 0.9880075454711914 -0.9038437604904175 0.27912309765815735 -0.7093797326087952 0.46518105268478394 0.4161478877067566 0.9781110286712646 0.9913612604141235 0.1757201850414276 0.3251155614852905 -0.5369092226028442 0.4104755222797394 0.8664208650588989 -0.9512521624565125 0.9999997019767761 0.9959427714347839 0.17816530168056488 -0.43889743089675903 0.5132170915603638 0.944280743598938 0.12309522926807404 -0.059386249631643295 -0.8784735202789307 0.6889968514442444 0.9999678730964661 0.5522308349609375 0.80478435754776 -0.6268811225891113 0.9243713617324829 -0.941194474697113 -0.7341949343681335 0.9494633674621582 -0.5532263517379761 -0.9874575734138489 0.6334222555160522 -0.9655497670173645 -0.9883921146392822 0.8310916423797607 0.8352119326591492 0.512945830821991 -0.4335310161113739 -0.6379265785217285 0.9545102715492249 0.738932728767395 0.9131490588188171 -0.5051528215408325 0.6609350442886353 0.9837526082992554 0.1609075665473938 0.3986389935016632 0.9329327940940857 0.3719547986984253 -0.9999999403953552 0.28287652134895325 0.9849554300308228 0.6659762859344482 0.8760385513305664 0.15640684962272644 -0.02914176508784294 -0.7337304353713989 -0.7685533761978149 0.13838626444339752 -0.8301816582679749 0.916691780090332 0.06822635978460312 0.9893552660942078 0.1897776573896408 -0.35285764932632446 0.7250109910964966 0.9875606894493103 0.9749919772148132 0.6109304428100586 -0.6661586761474609 0.4616193473339081 -0.436176598072052 -0.5968761444091797 -0.040147747844457626 -0.008451547473669052 0.47847113013267517 0.7454614639282227 -0.22900687158107758 -0.8553386330604553 0.9486101865768433 0.8403457999229431 0.9675399661064148 -0.37951502203941345 0.5927628874778748 0.9837657809257507 -0.42123010754585266 -0.960385262966156 -0.8527063131332397 0.18526747822761536 -0.928067147731781 0.9460742473602295 0.08482146263122559 0.7049294114112854 -0.9278892278671265 -0.43950358033180237 -0.9075995683670044 -0.6275699734687805 0.09443486481904984 0.9980204701423645 -0.8853088021278381 0.6235230565071106 0.7944888472557068 0.9682819247245789 -0.5946915149688721 0.9006720781326294 0.3917408287525177 0.7896034717559814 0.9631827473640442 0.15413156151771545 -0.7857275009155273 -0.6823006868362427 -0.7926655411720276 0.43552663922309875 0.866523027420044 -0.35603225231170654 -0.04064704850316048 0.9737449884414673 -0.4958438277244568 0.9528944492340088 -0.9596176743507385 0.9609999656677246 -0.7550796270370483 0.29771459102630615 -0.7908090353012085 -0.7590465545654297 0.21580708026885986 0.804183840751648 0.6886999011039734 0.7361898422241211 -0.7421896457672119 0.4865095317363739 0.8329172730445862 -0.9366014003753662 0.8082515001296997 -0.5334666967391968 0.23915351927280426 0.31475967168807983 0.029831349849700928 -0.5808992385864258 0.1157037690281868 0.11263842135667801 0.294581800699234 -0.9857633113861084 0.9489922523498535 0.8445797562599182 0.028673818334937096 0.5974017381668091 0.9477564096450806 0.6596319675445557 0.5280523300170898 0.5511701703071594 -0.9655473828315735 -0.8792679309844971 0.9825934767723083 -0.8904612064361572 -0.5396844148635864 -0.9616283178329468 0.9828808903694153 -0.9261184930801392 0.902932345867157 0.9452766180038452 0.8555386066436768 0.23348510265350342 -0.8038383722305298 0.8186485767364502 0.8456225395202637 0.04083576425909996 -0.94280606508255 0.5827187299728394 -0.9142638444900513 0.854181706905365 -0.9499456882476807 -0.1307660937309265 0.945023775100708 0.9784804582595825 0.0783640518784523 -0.13432496786117554 -0.6010252237319946 0.9941303133964539 0.3913993239402771 0.9956752061843872 0.05020180344581604 -0.3768174350261688 -0.4998316466808319 -0.818805456161499 -0.6212863922119141 0.912856936454773 -0.7926127910614014 0.8923601508140564 -0.9519331455230713 -0.639839231967926 0.9255098700523376 -0.4446180760860443 0.45268914103507996 -0.938728928565979 0.43138357996940613 0.18259403109550476 0.979846715927124 0.9714068174362183 0.21146735548973083 0.9928025007247925 -0.9758580923080444 -0.9862937331199646 -0.7602846622467041 0.7442213296890259 0.6285433769226074 0.4356366991996765 0.7617231607437134 0.631608247756958 -0.2598046064376831 0.9986166954040527 -0.5727266669273376 -0.705337643623352 -0.7088004350662231 0.6534388661384583 -0.5403648614883423 0.8998308181762695 0.7939690351486206 -0.9625373482704163 0.11938898265361786 -0.3250166177749634 -0.3726489841938019 -0.7146178483963013 -0.8459115624427795 0.9697593450546265 0.23128247261047363 0.9812241792678833 0.5665310025215149 0.8863301873207092 0.8975650072097778 0.4597451984882355 -0.49781545996665955 -0.7892791628837585 -0.9945904016494751 -0.9078750610351562 0.6966606974601746 -0.17292644083499908 0.6677415370941162 -0.8259009122848511 -0.755035936832428 -0.9865690469741821 0.9307860732078552 -0.5958082675933838 -0.0740463137626648 0.7659851312637329 -0.6434762477874756 0.46418067812919617 -0.7648252248764038 0.573094367980957 0.8078795671463013 -0.0021716097835451365 -0.9160340428352356 -0.9702160954475403 0.45406538248062134 -0.6224499940872192 -0.15801729261875153 0.4170084297657013 0.9996286034584045 0.6429963111877441 0.8944070935249329 -0.7547435164451599 -0.3529503643512726 0.7490553855895996 -0.2818097472190857 -0.8991180658340454 0.1822495013475418 -0.5428511500358582 -0.9679393172264099 0.08953043818473816 -0.6530358791351318 -0.5580017566680908 -0.9546536207199097 0.3791951835155487 0.8528791666030884 -0.24240167438983917 -0.982670247554779 -0.13964059948921204 -0.6932003498077393 -0.9783222675323486 -0.4199105501174927 -0.8351958990097046 0.9288907647132874 -0.9804887771606445 -0.9534738659858704 -0.7406210899353027 0.9948621988296509 -0.17431402206420898 -0.8928470611572266 -0.23554879426956177 0.8318343758583069 0.9830493330955505 -0.4205126464366913 -0.7489580512046814 -0.9766085743904114 0.47341376543045044 -0.9511124491691589 -0.724612832069397 0.611627995967865 0.028979526832699776 -0.9924225807189941 0.931044340133667 -0.5799760818481445 -0.9540749192237854 -0.99144047498703 0.08123979717493057 0.28133320808410645 0.10748211294412613 0.37873876094818115 0.9973751306533813 -0.6364577412605286 0.1472124606370926 0.8936796188354492 -0.9795525670051575 -0.5198628902435303 -0.7055835723876953 0.5524407625198364 0.24619172513484955 -0.4975165128707886 -0.8774174451828003 -0.9825239181518555 0.43629419803619385 0.5309603214263916 0.925503671169281 -0.8982830047607422 -0.20901860296726227 0.8135626316070557 0.5048647522926331 0.8294052481651306 0.26349806785583496 -0.9860852360725403 -0.9707793593406677 0.9880129098892212 0.31445226073265076 -0.9558255076408386 0.3870449960231781 0.17870284616947174 -0.7944450974464417 -0.9509660601615906 -0.9370601177215576 0.4009605646133423 0.9464813470840454 -0.4371497929096222 0.4144664704799652 0.8927626609802246 -0.9985642433166504 0.8520667552947998 0.7670815587043762 0.6479965448379517 0.9330150485038757 -0.2898632884025574 0.6774880290031433 0.7364181280136108 0.19639432430267334 -0.9943643808364868 0.2581656575202942 0.5577967762947083 0.5677429437637329 0.5967514514923096 -0.12183478474617004 -0.9668306112289429 -0.9991179704666138 -0.8622018694877625 -0.047397177666425705 0.6356521248817444 -0.4605136215686798 0.8226050138473511 -0.670883059501648 -0.9549254775047302 -0.4601966142654419 0.9303790926933289 0.4827311336994171 -0.9937250018119812 0.1606941521167755 0.9377372860908508 -0.9999996423721313 -0.8308320641517639 0.2798956334590912 -0.9999984502792358 -0.3603830635547638 -0.7471845746040344 -0.2700479328632355 0.7845940589904785 0.966568112373352 -0.36122098565101624 -0.7067813873291016 0.5087525844573975 -0.9182990193367004 0.3714163303375244 -0.0885954275727272 -0.27081701159477234 0.47105395793914795 -0.7082878351211548 -0.9401963353157043 0.7995955944061279 0.3270319998264313 0.9057329893112183 -0.901710569858551 0.9689652919769287 0.6403845548629761 0.04858876392245293 0.5578827857971191 -0.6592111587524414 0.284487783908844 -0.24718745052814484 0.7687339782714844 0.5893727540969849 0.0863502249121666 0.9950193762779236 -0.8597585558891296 -0.6554334163665771 0.9999998211860657 -0.3880792260169983 0.9156172275543213 0.285840779542923 -0.9270466566085815 0.8164900541305542 0.6619182825088501 -0.36672377586364746 0.759937047958374 0.8134632110595703 0.26887306571006775 -0.17175501585006714 -0.2535313665866852 -0.9778375625610352 -0.5057269930839539 -0.956106424331665 0.3229678273200989 0.9809422492980957 -0.586409330368042 0.9999639391899109 -0.40366238355636597 0.3766025900840759 0.033155933022499084 -0.261234313249588 -0.9268378615379333 -0.7092396020889282 -0.9536502361297607 0.9981741905212402 -0.17854666709899902 0.9331968426704407 0.9534326791763306 0.5819794535636902 0.5848144292831421 -0.11549679189920425 -0.7819046974182129 -0.42210009694099426 -0.7533897161483765 -0.6286212205886841 -0.8276692032814026 -0.23807011544704437 -0.4306662678718567 -0.9991287589073181 -0.240756556391716 -0.0005988478078506887 0.9629310965538025 -0.9185731410980225 0.7371004819869995 -0.6513579487800598 0.9214019179344177 0.9366753101348877 -0.7471462488174438 -0.3675635755062103 -0.18420352041721344 0.3555600941181183 -0.7348456978797913 -0.6351917386054993 -0.2660657465457916 0.9758744239807129 -0.7982471585273743 0.6113643646240234 -0.9618870615959167 0.5655254125595093 -0.30118802189826965 0.1993705779314041 -0.8063910007476807 -0.1725052148103714 -0.2878594696521759 -0.9999948143959045 0.9761792421340942 0.9874940514564514 -0.7988324165344238 -0.15299424529075623 -0.7406113147735596 0.402820885181427 0.3843536078929901 -0.9864781498908997 0.38720130920410156 -0.9806818962097168 -0.38258934020996094 -0.24654516577720642 -0.9816989302635193 0.3840196132659912 -0.4441032409667969 0.6476260423660278 -0.8313809633255005 -0.7332901358604431 0.7849768996238708 -0.20255963504314423 -0.7464265823364258 -0.9861447215080261 -0.9762078523635864 0.9588484168052673 -0.5118088722229004 0.3528783321380615 0.7655091285705566 0.16963502764701843 -0.5107733607292175 -0.5451539158821106 -0.5550466775894165 -0.9998894333839417 -0.913801908493042 0.7009520530700684 -0.7328718900680542 -0.997110903263092 0.7785009741783142 0.18805846571922302 -0.9828037023544312 -0.2799728810787201 -0.3090417683124542 0.23711098730564117 -0.27162790298461914 -0.9433026313781738 0.06094720587134361 0.9988588094711304 0.3074067234992981 -0.9992307424545288 -0.766521155834198 0.4348919093608856 0.7491681575775146 0.9141254425048828 0.7085188627243042 -0.8185033798217773 0.787716805934906 0.9742542505264282 -0.03591028228402138 -0.7604814171791077 0.9780426025390625 -0.901568591594696 0.8841342329978943 -0.7871425151824951 0.8094896674156189 0.8709625005722046 -0.9420090913772583 -0.9789988994598389 -0.7871628403663635 0.7416465878486633 0.8420606851577759 0.9998115301132202 0.9999821186065674 0.6393448114395142 -0.9028769731521606 -0.6478551626205444 -0.7290658354759216 0.6435273885726929 0.9998735785484314 0.27724286913871765 -0.4209710359573364 -0.3304124176502228 -0.7581268548965454 0.2880440950393677 0.7957229018211365 -0.56244295835495 -0.8662517666816711 0.7385362982749939 -0.5321605801582336 0.9243930578231812 -0.479604035615921 -0.5642569065093994 0.9405125975608826 0.30011916160583496 -0.3029158115386963 0.37555328011512756 -0.9079699516296387 -0.8532777428627014 0.02958505041897297 0.947649359703064 -0.5095466375350952 0.6423648595809937 0.9684116244316101 -0.4246603846549988 0.8399147987365723 0.9957089424133301 0.4849952161312103 0.7816721796989441 -0.7797890901565552 0.7145076990127563 0.9637534618377686 0.9724130630493164 -0.39605388045310974 -0.15004566311836243 0.3713921904563904 0.9572835564613342 -0.9399983286857605 -0.7929733991622925 0.6025581359863281 -0.31515204906463623 -0.8839957118034363 0.24643675982952118 0.23751136660575867 -0.09755172580480576 0.29592880606651306 0.016975952312350273 0.6255501508712769 -0.47454240918159485 0.7044162750244141 -0.995098888874054 0.3306816816329956 -0.3950900733470917 0.3761078715324402 -0.999992847442627 0.44764596223831177 -0.8472167253494263 0.061285894364118576 0.7112894058227539 0.8324491381645203 -0.9000381827354431 -0.4119378924369812 -0.9479144811630249 -0.9464640021324158 0.13360416889190674 -0.7693668603897095 -0.9864420294761658 -0.9311437606811523 -0.08926058560609818 -0.8227984309196472 -0.6227233409881592 0.24536454677581787 0.9999821782112122 -0.9031978845596313 0.7187200784683228 -0.39630645513534546 -0.8853139281272888 -0.872791588306427 0.6166980266571045 -0.9674311280250549 -0.6406309604644775 -0.9998189210891724 -0.8571261167526245
-0.6948354244232178 0.9732574224472046 0.9311883449554443 -0.7858173847198486 -0.1708567589521408 -0.7886245250701904 -0.9932862520217896 0.14447875320911407 -0.933377206325531 0.7883217930793762 -0.30588939785957336 -0.622637152671814 -0.8875053524971008 -0.7839375734329224 -0.9028559923171997 -0.4639471471309662 0.8901640772819519 0.5319080948829651 -0.8827970623970032 -0.3590719997882843 0.9663285613059998 -0.9859030842781067 -0.9521229863166809 0.3455599546432495 0.6579225063323975 -0.9570293426513672 -0.7858947515487671 0.18125995993614197 -0.672072172164917 -0.04747874289751053 0.8292375802993774 -0.3580743670463562 -0.674774706363678 -0.8303489089012146 0.24162141978740692 0.7585294246673584 0.9149515628814697 0.201643168926239 -0.3041767179965973 0.24491249024868011 0.9632174968719482 -0.5465357303619385 0.5316792726516724 0.851737916469574 -0.27945852279663086 0.9455931186676025 -0.9158831834793091 -0.3183008134365082 -0.37826892733573914 0.4113815128803253 -0.5016819834709167 -0.9829010367393494 0.9999997615814209 -0.9999996423721313 -0.9868731498718262 0.7955707311630249 -0.23260381817817688 0.9232708215713501 0.8065171241760254 0.18524877727031708 -0.8443308472633362 -0.999821662902832 0.9999837875366211 0.9344598650932312 -0.7438681721687317 -0.059985361993312836 0.9880996942520142 -0.4906754791736603 -0.9541328549385071 0.1768958866596222 0.7426964044570923 -0.7915881872177124 0.3993636965751648 -0.3193826675415039 0.18202891945838928 -0.96065354347229 0.9561842679977417 0.9999996423721313 0.9915330410003662 0.9939414858818054 -0.6744449734687805 -0.999985933303833 -0.8012964725494385 -0.8825491666793823 0.9999991059303284 -0.8973557353019714 0.8201801180839539 -0.8504270315170288 -0.6049198508262634 0.9510065317153931 -0.9202731847763062 0.031998954713344574 -0.21962112188339233 0.8112547993659973 -0.9721460938453674 0.5213773846626282 0.4557180404663086 0.43956929445266724 -0.3752538561820984 -0.888515830039978 -0.14559964835643768 -0.25498855113983154 -0.8791975378990173 -0.21637055277824402 -0.5850264430046082 0.9907403588294983 -0.9756900668144226 -0.5901891589164734 0.5827773809432983 0.9887291789054871 -0.7827034592628479 -0.9980276823043823 -0.9446069598197937 0.35799640417099 0.8971256017684937 0.8345569372177124 0.9872134327888489 0.8446744680404663 -0.8980662226676941 0.9017423391342163 0.9874477386474609 0.47976911067962646 0.6087790727615356 -0.4231085181236267 0.774627685546875 -0.12663979828357697 -0.7913788557052612 -0.7522008419036865 0.9502325057983398 0.3865489065647125 -0.7678744196891785 -0.4341343939304352 -0.5380924940109253 -0.726986289024353 -0.3732304871082306 0.8505754470825195 -0.7128199934959412 -0.9635716080665588 0.4313960373401642 0.5905431509017944 -0.9593453407287598 0.9999517202377319 -0.047423139214515686 -0.8779793977737427 0.434285432100296 -0.6804378032684326 -0.931864857673645 0.8917952179908752 0.8410132527351379 0.3137333393096924 0.9844134449958801 0.9999803304672241 -0.2545738220214844 0.10795438289642334 0.7380183935165405 -0.1922210454940796 -0.9992724657058716 0.2085256576538086 0.9968580603599548 0.8848001956939697 0.0259757898747921 0.5025284290313721 0.992143452167511 -0.5813031196594238 0.9789150953292847 -0.9946166276931763 0.6038780212402344 0.9996442794799805 -0.017833450809121132 0.8502570390701294 0.8844924569129944 -0.6693323850631714 -0.39559367299079895 -0.3787575662136078 0.9988001585006714 -0.9707586765289307 -0.9598936438560486 -0.1848759651184082 0.9661588668823242 -0.9999997615814209 -0.9590307474136353 -0.6820422410964966 0.4535543918609619 0.9307982325553894 0.9047855138778687 -0.6549271941184998 0.2075624018907547 -0.9440926909446716 -0.6806420683860779 0.9284923076629639 0.9101653099060059 0.23280151188373566 -0.9731412529945374 0.1066470816731453 -0.9772707223892212 -0.14288340508937836 0.9985343813896179 0.7728769779205322 -0.6649512648582458 -0.7307573556900024 0.19919191300868988 0.37470269203186035 -0.6060610413551331 -0.2415405660867691 -0.9203264713287354 0.4053385555744171 -0.3029453158378601 -0.8317978382110596 -0.999924898147583 0.24592821300029755 -0.9926044940948486 -0.9735519289970398 -0.5635195374488831 0.7840177416801453 0.5001472234725952 0.9757433533668518 0.9667981863021851 -0.7450947761535645 0.8964029550552368 -0.7041819095611572 0.7007695436477661 -0.8855934143066406 0.8163792490959167 -0.7969844341278076 0.04006282985210419 0.35888972878456116 0.9655510187149048 0.9817612171173096 0.9132975935935974 -0.9997045397758484 0.9113511443138123 -0.8830175399780273 0.09559375792741776 -0.9661237597465515 -0.304609477519989 0.9458105564117432 0.5648765563964844 -0.6648879647254944 0.9787008166313171 0.7120909094810486 -0.9972509145736694 0.8816545605659485 0.3100334107875824 0.8709144592285156 -0.6982417702674866 -0.9905568361282349 -0.9622977375984192 0.920478343963623 0.24093274772167206 0.9852604866027832 0.9789227843284607 -0.9947898387908936 -0.9508786201477051 -0.9655733108520508 -0.521461546421051 -0.21184229850769043 -0.8942467570304871 -0.9649451375007629 0.5330729484558105 0.9528126120567322 -0.8233392238616943 0.7421345114707947 -0.33229169249534607 0.8950197696685791 -0.26295748353004456 -0.4722614288330078 0.27211815118789673 0.9947291016578674 -0.9564356803894043 -0.862645149230957 -0.578421950340271 0.01888084225356579 -0.6204273700714111 0.998664915561676 0.5113611221313477 0.9815801978111267 -0.9602653980255127 0.8365262150764465 -0.99765944480896 0.48085644841194153 0.07749468833208084 -0.9084720015525818 -0.9944719672203064 0.502585768699646 -0.9561976790428162 0.45684701204299927 -0.23952245712280273 0.34437739849090576 0.8960708975791931 0.7386347055435181 -0.19567790627479553 0.9850279092788696 0.36336809396743774 -0.6700283288955688 -0.48619765043258667 -0.21119756996631622 -0.9970929026603699 -0.714845597743988 0.9656341075897217 0.39625152945518494 -0.9443367719650269 -0.8631765246391296 -0.9060354232788086 0.9710288047790527 -0.9920799136161804 0.06527334451675415 0.9498118162155151 -0.44056224822998047 -0.9946985840797424 0.4582473635673523 0.9441670179367065 -0.8536892533302307 -0.5737138986587524 -0.9486724138259888 -0.6344839930534363 -0.806442141532898 -0.5241833329200745 0.9605574607849121 -0.6567226648330688 0.43025532364845276 -0.9368002414703369 -0.8930459022521973 0.1257985532283783 0.7199602723121643 0.8835282921791077 -0.46741315722465515 -0.5364854335784912 0.05011725798249245 -0.998184323310852 -0.5692845582962036 0.1308913230895996 -0.3187679052352905 0.991157591342926 -0.9211637377738953 -0.9780207872390747 0.012948987074196339 0.47846928238868713 0.6582932472229004 -0.6939226388931274 -0.7497844696044922 -0.7995192408561707 -0.9462617635726929 -0.9061853885650635 0.17881344258785248 -0.5288814306259155 -0.6702637672424316 -0.32280564308166504 -0.6556403040885925 -0.6820844411849976 -0.9417030215263367 -0.6969585418701172 0.7356280088424683 -0.8193172216415405 -0.9636833071708679 -0.39386528730392456 -0.98227459192276 -0.1732473224401474 0.7424246072769165 0.8631245493888855 0.8446297645568848 0.661628246307373 -0.1000562533736229 0.11912868916988373 -0.930314838886261 -0.6681265830993652 -0.5127339959144592 -0.9631960988044739 -0.9287228584289551 -0.6243830919265747 -0.22383268177509308 -0.610403299331665 0.9881303310394287 -0.6077278852462769 0.8018290996551514 0.8733826279640198 -0.6252200603485107 -0.8111875653266907 -0.9845139384269714 0.12918084859848022 0.30810144543647766 0.6917240023612976 -0.20337128639221191 -0.9010823965072632 0.04584202542901039 0.2769416272640228 0.6831340789794922 0.9643599987030029 -0.0738343670964241 -0.5285485982894897 -0.9959489107131958 -0.1677824854850769 0.707878828048706 -0.15005159378051758 -0.17830532789230347 -0.5197514295578003 -0.3991250693798065 0.8705835938453674 0.47301262617111206 -0.9609432220458984 -0.0012202406069263816 -0.48982784152030945 -0.9584190845489502 -0.5874254703521729 0.9851765036582947 -0.17956501245498657 0.2695736587047577 0.9936065673828125 -0.7271949648857117 0.13919365406036377 -0.7349790930747986 0.09224888682365417 -0.12987907230854034 -0.1956334114074707 0.9768327474594116 -0.6800012588500977 0.942849338054657 0.9983114004135132 -0.7158735990524292 0.7762774229049683 -0.6979033946990967 -0.9544880390167236 0.998313307762146 0.8387253284454346 -0.5979381799697876 0.9732781648635864 -0.9962338805198669 -0.5768777132034302 0.3111934959888458 -0.9806811809539795 0.5490792989730835 -0.9704998135566711 -0.8568569421768188 -0.8300834894180298 -0.3802473247051239 -0.9993224143981934 -0.9809104204177856 -0.7844351530075073 -0.9103618264198303 0.9961984753608704 0.8415830135345459 -0.35632240772247314 -0.9203447103500366 -0.9801372289657593 -0.8487303256988525 0.9992119669914246 0.7882379293441772 0.9637508988380432 -0.1181139349937439 -0.9939842820167542 -0.8367786407470703 -0.9275078177452087 0.9280791878700256 0.9394950866699219 -0.9754412174224854 -0.6457056403160095 0.7501323223114014 -0.5168886184692383 -0.9999563694000244 -0.6729233264923096 0.5930507183074951 0.05113082751631737 0.9960532188415527 0.9823747277259827 -0.9990538358688354 -0.7985106706619263 0.05654827505350113 -0.7989218831062317 0.3099343776702881 0.8054788112640381 0.8857780694961548 -0.9624959826469421 0.8352431058883667 -0.06078970804810524 0.9216829538345337 -0.9951520562171936 0.7656525373458862 -0.9963393211364746 0.9664700627326965 0.7959586381912231 -0.4494437575340271 0.9335331916809082 -0.9699562191963196 -0.9869033098220825 0.7453972101211548 0.7504234910011292 0.6420954465866089 -0.8767012357711792 0.6829721927642822 -0.46038520336151123 -0.9401053786277771 -0.8108955025672913 0.3895309269428253 -0.043038345873355865 -0.7580053806304932 -0.9920161962509155 0.9893329739570618 -0.44796523451805115 0.2987746596336365 -0.3825763761997223 0.05380028486251831 0.8753480911254883 0.25779908895492554 0.07436936348676682 0.9361664652824402 -0.9999999403953552 -0.39665791392326355 0.5413482785224915 -0.999697744846344 -0.9277567267417908 -0.48668399453163147 -0.6297008991241455 0.8848432898521423 0.4952944815158844 0.5058706402778625 0.1635206788778305 -0.5322824716567993 0.8232720494270325 0.3039889931678772 0.9493817687034607 0.8442288041114807 0.9470855593681335 -0.7164357304573059 0.36469516158103943 -0.15955296158790588 -0.593268871307373 -0.29146885871887207 0.36576467752456665 0.9600762128829956 0.878381073474884 0.8451375961303711 -0.8712663054466248 -0.42040160298347473 0.8785325288772583 0.6331701278686523 0.29876819252967834 0.30036741495132446 0.6164238452911377 0.8946568965911865 0.9142873287200928 0.43431198596954346 0.9999943375587463 -0.34061968326568604 0.9794090986251831 -0.04311942309141159 0.18999598920345306 0.9359302520751953 0.9057926535606384 -0.8814203143119812 -0.9611800312995911 0.9764112830162048 -0.9912668466567993 0.47883597016334534 -0.06473343819379807 -0.9889794588088989 0.5528304576873779 -0.9547997713088989 -0.8244439363479614 -0.9952574372291565 -0.9660494923591614 0.9999552369117737 -0.6872098445892334 0.9861253499984741 0.9922997355461121 0.9460961818695068 0.4440912902355194 0.46768224239349365 0.7275794744491577 0.5213912129402161 0.5511922836303711 -0.97222900390625 0.9985610842704773 0.14595158398151398 -0.9811967015266418 0.0736195519566536 0.47067877650260925 -0.17075499892234802 -0.9872300624847412 -0.6368799805641174 0.5988640785217285 0.48444119095802307 -0.8980672359466553 0.199056476354599 0.9557385444641113 0.99920254945755 0.9717817306518555 -0.3221698999404907 -0.08047066628932953 0.9504983425140381 -0.5628020763397217 -0.9229995012283325 -0.8803328275680542 -0.9216068387031555 0.6505997180938721 -0.6446037292480469 -0.1280864179134369 0.2951313555240631 0.9585732817649841 0.9949502348899841 -0.852258563041687 -0.37014898657798767 -0.39054757356643677 0.7164759635925293 -0.6389856934547424 0.6884057521820068 0.2077908217906952 -0.9545412063598633 0.6417083740234375 -0.99980628490448 -0.26535528898239136 0.0892522856593132 0.9341683387756348 -0.0009151434060186148 -0.42770475149154663 -0.9884812235832214 -0.6072179675102234 -0.5270208120346069 0.9425084590911865 0.9129794239997864 -0.9973864555358887 0.00977707002311945 0.9901169538497925 0.363246351480484 0.8630298376083374 0.5341669917106628 -0.8144526481628418 0.22669292986392975 0.8257372379302979 0.915549635887146 -0.026453392580151558 0.21849936246871948 -0.7647435069084167 0.7439509034156799 -0.6903718709945679 0.7866302132606506 0.7685999274253845 -0.7653053998947144 0.9392025470733643 -0.9974916577339172 -0.7670237421989441 -0.9958370923995972 0.2660253942012787 0.17121198773384094 -0.7630748748779297 0.1404159963130951 0.28270646929740906 0.5100486278533936 0.9434935450553894 -0.9872164726257324 -0.8071261644363403 0.9836534261703491 -0.9872904419898987 -0.10579302906990051 -0.01833653450012207 0.9848008155822754 0.19323410093784332 -0.9993048310279846 0.8751274347305298 0.8476707935333252 0.8673160672187805 0.6595625281333923 -0.9998824596405029 -0.9721536636352539 0.8864133954048157 0.9890181422233582 -0.8982598781585693 0.9430252313613892 0.5790783166885376 -0.9993761777877808 -0.9971609711647034 -0.9625687003135681 0.560340166091919 -0.8910725116729736 0.7720995545387268 -0.8514610528945923 -0.4364594519138336 0.8298332095146179 0.9319897890090942 0.9994664788246155 0.9484378099441528 -0.30762597918510437 0.5726138353347778 0.4902816712856293 -0.7953743934631348 0.7889729738235474 0.9796252846717834 0.9606612920761108 0.9814045429229736 -0.9939137101173401 -0.4945312738418579 0.7467591166496277 0.882141649723053 0.9975063800811768 -0.952223002910614 -0.8224234580993652 0.5417365431785583 0.481628954410553 0.7405650019645691 -0.7457913160324097 -0.18199411034584045 0.4535927176475525 0.8968894481658936 0.7240932583808899 0.9417133927345276 -0.9681783318519592 0.4786064624786377 0.6915328502655029 -0.7572372555732727 0.9977691173553467 -0.5883580446243286 0.9964455962181091 -0.9507564902305603 0.8241196870803833 -0.15832020342350006 0.7933403849601746 -0.9890522360801697 0.6826678514480591 0.7259552478790283 -0.16273419559001923 0.7531813383102417 0.9046143889427185 0.43225550651550293 0.9558872580528259 0.9971199035644531 0.13319064676761627 0.32739877700805664 -0.514929473400116 -0.972601592540741 -0.046106137335300446 -0.9004654884338379 -0.9158536195755005 0.45708733797073364 0.6393623352050781 -0.12532886862754822 -0.9291535019874573 -0.02173035778105259 -0.9139922261238098 0.41317129135131836 0.806341826915741 -0.7051169872283936 -0.9990842342376709 0.625900149345398 0.6285387277603149 -0.09035518765449524 0.9491853713989258 -0.9992695450782776 -0.31432515382766724 -0.8343636989593506 -0.9862086176872253 0.948631763458252 0.4833240211009979 -0.8521089553833008 -0.9991685748100281 -0.68143230676651 0.5300217866897583 -0.6733797788619995 0.9047862887382507 -0.9522925019264221 0.9999980330467224 0.7091704607009888 0.12478862702846527 -0.005288400687277317 -0.7668251991271973 0.6937593221664429 -0.7995538711547852 0.6073207855224609 -0.7013571858406067 -0.9999743700027466 0.9239199161529541
0.8896693587303162 0.8832470774650574 0.8470260500907898 -0.48153457045555115 -0.9268748164176941 -0.06753923743963242 0.582990825176239 -0.9036946296691895 -0.9771646857261658 0.6954324841499329 -0.9376125335693359 0.3836039900779724 -0.47454336285591125 0.46178025007247925 0.7551898956298828 -0.5359929800033569 0.8803865313529968 -0.029996201395988464 -0.6415513753890991 -0.4212028682231903 0.7316838502883911 -0.06716714799404144 0.512565016746521 0.5835496187210083 -0.2089279443025589 -0.2729965150356293 -0.7889553308486938 -0.39953112602233887 0.9528822898864746 0.5060333013534546 0.4614596366882324 0.11418572813272476 0.37183240056037903 -0.08371958136558533 0.9553926587104797 0.7808759808540344 -0.9434918165206909 -0.9184197783470154 0.36268627643585205 0.24987071752548218 0.09359589964151382 0.9125676155090332 0.9019143581390381 -0.6946656703948975 -0.3461403250694275 0.9009407758712769 0.4465281367301941 -0.45768699049949646 0.9864822626113892 0.0742441862821579 -0.0052188122645020485 -0.9018102288246155 1.0 -0.9999992847442627 0.827595591545105 -0.5133172273635864 0.269008994102478 -0.9417032599449158 -0.17918910086154938 0.2996601164340973 -0.1901814043521881 -0.999999463558197 0.9999756217002869 0.7314947843551636 -0.16958720982074738 -0.8138331174850464 -0.9767221212387085 0.08518926054239273 -0.4556334316730499 -0.5716592073440552 -0.8997507691383362 0.9678470492362976 0.9965128302574158 -0.07468829303979874 -0.6922919154167175 -0.7345758080482483 -0.9993890523910522 0.9999998211860657 0.4011426568031311 0.7367768287658691 -0.9004346132278442 -0.9964931011199951 -0.9310289621353149 -0.31189143657684326 0.9999998211860657 -0.9860568642616272 0.4869009852409363 -0.2551180422306061 -0.08096978813409805 0.9859205484390259 -0.4407765567302704 -0.00036653983988799155 -0.5300462245941162 0.8737057447433472 0.732725977897644 0.5932304859161377 0.9981770515441895 -0.9937539100646973 -0.9549738168716431 -0.39442867040634155 0.42376503348350525 0.8307873606681824 -0.19252413511276245 -0.18709835410118103 -0.043392762541770935 -0.955093264579773 -0.9549455046653748 -0.8742774724960327 -0.44394391775131226 -0.04936530441045761 -0.1386442333459854 -0.6912521123886108 -0.02248578891158104 0.9975598454475403 -0.6794479489326477 -0.49030277132987976 -0.2522599995136261 -0.7462274432182312 0.6704041957855225 -0.9465866684913635 -0.18841658532619476 -0.738387942314148 -0.06703883409500122 -0.9300032258033752 -0.9355714917182922 -0.8423395156860352 0.6235921382904053 0.9223110675811768 -0.8337697982788086 -0.3687756657600403 -0.08805060386657715 -0.7878618836402893 0.9525827765464783 0.11289863288402557 -0.8290132284164429 -0.9139195084571838 -0.6362956166267395 -0.8480525612831116 0.22217997908592224 0.4317634403705597 0.4015410542488098 0.9999994039535522 -0.592727780342102 0.3622453510761261 0.5055798292160034 0.7738016843795776 -0.6439129114151001 0.8497021198272705 -0.7103801965713501 0.8584573864936829 -0.2919616401195526 0.9999842047691345 -0.09729930758476257 -0.8608871102333069 0.9171764850616455 -0.2828408181667328 0.8486400842666626 -0.7789984941482544 -0.08177509158849716 -0.2221066802740097 0.7404536604881287 0.8288270235061646 -0.7841961979866028 0.9180330038070679 0.3056240379810333 -0.1564689427614212 -0.977502167224884 -0.9112671613693237 0.7288362979888916 -0.7368807792663574 -0.23789335787296295 0.8725690245628357 0.01083836704492569 -0.07518023997545242 -0.9937273859977722 -0.8642718195915222 -0.48374906182289124 -0.8345698118209839 0.5225523710250854 -0.9999999403953552 -0.7710323929786682 0.450210839509964 0.10509543865919113 -0.04696948081254959 0.8273393511772156 -0.5022635459899902 0.9659709930419922 0.855263352394104 -0.4074094593524933 -0.46423619985580444 0.041365403681993484 -0.957491934299469 -0.9485668540000916 0.7358525395393372 0.6773189306259155 -0.9814380407333374 -0.9927459359169006 0.011961164884269238 0.9055810570716858 0.9225699305534363 0.8984531760215759 0.613187313079834 0.8218086957931519 -0.9485511779785156 0.6276236176490784 -0.38777977228164673 -0.35701873898506165 0.5599191188812256 0.8570418953895569 -0.9581910371780396 -0.4265799820423126 0.8313701748847961 -0.6891390085220337 -0.9839830994606018 0.9454476237297058 0.7932367324829102 -0.8790428042411804 -0.7271523475646973 -0.37132978439331055 -0.5491460561752319 0.9781753420829773 -0.8587666749954224 -0.44261276721954346 -0.8962560296058655 0.07922559976577759 -0.18210557103157043 0.02587122470140457 0.5768194198608398 0.9713772535324097 -0.9264959692955017 -0.7196274995803833 -0.48527178168296814 -0.1818619966506958 0.9437264204025269 0.5227063298225403 0.9004912376403809 0.3741171956062317 0.2981598377227783 -0.9671230316162109 0.07098710536956787 -0.21250192821025848 0.8782861828804016 -0.5960053205490112 0.8997364044189453 0.3861641585826874 0.766683042049408 0.7851491570472717 -0.9620384573936462 -0.38036543130874634 -0.9073604941368103 0.9363588094711304 -0.23750638961791992 0.8401630520820618 -0.11829843372106552 -0.06772731989622116 -0.3212439715862274 -0.9319169521331787 -0.3666134178638458 -0.7705643177032471 0.9734148383140564 0.905114471912384 -0.8810809850692749 0.7549374103546143 -0.4861120283603668 -0.24481867253780365 0.5778032541275024 -0.5536175966262817 -0.5715457201004028 -0.9518483281135559 0.4045200049877167 0.15538182854652405 -0.355119526386261 -0.35577353835105896 0.9995786547660828 0.9999693036079407 0.47436338663101196 -0.8108996152877808 0.14067523181438446 -0.9423783421516418 0.3388528525829315 0.9557371139526367 0.9961785078048706 0.14629659056663513 0.29171210527420044 0.36792823672294617 0.209745392203331 -0.22538337111473083 -0.8555015325546265 -0.8671224117279053 -0.540762722492218 0.9989171028137207 -0.5255738496780396 0.743825376033783 -0.7953801155090332 0.9491598606109619 0.7075009942054749 0.9915339946746826 0.9977492094039917 -0.6045721769332886 -0.045698411762714386 0.24781930446624756 0.6915940046310425 -0.7628373503684998 -0.4245520234107971 -0.6007118821144104 -0.12981079518795013 -0.6013594269752502 -0.13913989067077637 -0.08144039660692215 -0.7561056613922119 -0.9791834354400635 -0.7357898950576782 -0.39872172474861145 -0.11880601942539215 -0.5629653930664062 -0.6136536598205566 -0.188564270734787 0.9864754676818848 0.6259472370147705 -0.37954914569854736 0.6647502779960632 0.9938568472862244 0.6183810830116272 -0.1887420415878296 0.9935302734375 -0.9219830632209778 0.8694994449615479 -0.954621434211731 0.2796267867088318 0.5701196193695068 0.20326000452041626 -0.6584718823432922 -0.44697853922843933 -0.9020400643348694 -0.9136108756065369 0.46995821595191956 0.3464488089084625 -0.04764354228973389 -0.7369722127914429 0.7823106050491333 -0.7213144898414612 -0.9056229591369629 0.253471702337265 -0.48443475365638733 0.6579179763793945 0.9755662679672241 -0.35678237676620483 -0.991943895816803 0.9960467219352722 -0.3763350546360016 0.6204361915588379 0.910436749458313 -0.6086244583129883 0.9973860383033752 0.06478647887706757 0.7016404867172241 -0.49826908111572266 -0.9510378241539001 0.5262529850006104 0.15726743638515472 -0.43724045157432556 -0.8312126398086548 0.7516762018203735 -0.7389088869094849 0.907259464263916 0.4783119559288025 -0.8508101105690002 -0.5290381908416748 -0.8305761814117432 -0.36508244276046753 -0.17078079283237457 -0.8939416408538818 -0.42899471521377563 0.6031889915466309 -0.6720989942550659 0.007645606994628906 -0.21454113721847534 0.0340188629925251 0.9952162504196167 0.7916244268417358 -0.9908820390701294 0.9257287979125977 -0.22297750413417816 -0.7370840311050415 -0.1743522733449936 -0.9997178316116333 0.5384455919265747 0.9998602867126465 -0.49143949151039124 -0.9948467016220093 -0.6886986494064331 -0.9363387227058411 -0.39739441871643066 0.9037657380104065 0.9088608026504517 -0.37392115592956543 -0.5530862808227539 0.14627408981323242 -0.8225791454315186 0.998174250125885 -0.10370267182588577 -0.27642926573753357 0.9968364238739014 -0.9081128239631653 0.2873985469341278 -0.776024341583252 -0.6586923599243164 0.9341197609901428 -0.6570237278938293 -0.7173213958740234 0.9158922433853149 0.9374533891677856 0.16292397677898407 0.9731050729751587 0.7902916669845581 0.9998342394828796 0.4612707197666168 -0.41359665989875793 0.44915226101875305 0.9768275618553162 -0.9745794534683228 0.9243906140327454 -0.9772957563400269 -0.8278793096542358 0.7532804012298584 0.9830660820007324 0.997543454170227 0.6419629454612732 0.8874454498291016 0.5888819098472595 0.9945730566978455 -0.23697936534881592 0.4582820236682892 0.9943633675575256 0.9894054532051086 0.6536290645599365 -0.8686103224754333 -0.1202421486377716 0.9771908521652222 0.4837663769721985 0.43941786885261536 0.038996774703264236 0.9912043213844299 -0.6136794090270996 0.5953570008277893 -0.7826200723648071 -0.8756542801856995 -0.3627159893512726 0.9097046852111816 -0.8861688375473022 -0.7086255550384521 0.9703980088233948 -0.19606958329677582 -0.7753630876541138 0.6101810336112976 -0.9453919529914856 -0.18651199340820312 0.27598315477371216 -0.8743771910667419 0.014964431524276733 -0.6720454692840576 0.9219585657119751 0.2369871586561203 -0.25741252303123474 -0.7685849666595459 0.3508319854736328 -0.5884362459182739 0.9964361786842346 0.4690740406513214 0.9722998738288879 -0.6404409408569336 0.791955292224884 -0.6757404804229736 0.8369230628013611 -0.9698655605316162 -0.9994902610778809 -0.8690861463546753 -0.7281482815742493 0.9570369124412537 0.13468950986862183 0.9287673234939575 -0.5534878969192505 0.7156580090522766 -0.7517982721328735 0.8147587776184082 -0.7605283856391907 0.8344301581382751 -0.906403660774231 -0.6686559915542603 0.227189302444458 0.6143816709518433 0.8688169121742249 0.5087792873382568 0.4674113690853119 -0.3089485168457031 0.34277772903442383 -0.996475875377655 0.46596866846084595 -0.6535179615020752 0.06659901142120361 0.012452634982764721 -0.7604787945747375 0.3751424252986908 0.376652330160141 -0.9999997019767761 0.039126425981521606 -0.9302862882614136 -0.9999917149543762 -0.7635801434516907 -0.5780671238899231 -0.8095709681510925 -0.933228611946106 -0.9757525324821472 -0.8661338686943054 -0.9741064310073853 0.40505102276802063 -0.013180164620280266 0.4401102662086487 0.954876184463501 -0.8402078151702881 0.76246577501297 -0.9462471604347229 -0.20640966296195984 0.7825446128845215 -0.8620516061782837 0.01629069820046425 0.8602814078330994 -0.9869681000709534 0.32493332028388977 -0.9163918495178223 0.8517377972602844 0.4862207770347595 0.460312157869339 -0.6132713556289673 0.42619672417640686 0.999477207660675 -0.8778412938117981 0.1712774932384491 0.8779802918434143 -0.05719870701432228 0.9999998807907104 -0.743443489074707 0.9750047922134399 -0.13080179691314697 -0.6356611251831055 -0.7553648948669434 -0.9921470284461975 -0.6579370498657227 0.07384365051984787 0.6980124711990356 0.9638034701347351 0.2772977352142334 0.13409122824668884 -0.999915599822998 -0.8117618560791016 -0.9987808465957642 -0.028468573465943336 0.9225546717643738 -0.5551491975784302 0.9999908208847046 0.5858702659606934 -0.988146960735321 -0.6349270343780518 0.39141392707824707 -0.8953941464424133 -0.5641095042228699 0.939079225063324 -0.9415410161018372 -0.9428526163101196 0.998981773853302 -0.8750746250152588 -0.06447084248065948 0.9901108145713806 -0.6151396036148071 0.247418612241745 0.487396776676178 -0.8203575611114502 0.6350705027580261 -0.993535041809082 -0.8922361135482788 0.4101157784461975 -0.9769547581672668 -0.992817759513855 -0.9538671374320984 0.9974531531333923 -0.8237963914871216 0.212729349732399 0.9845342636108398 -0.7875723838806152 0.8366407752037048 0.9497763514518738 -0.059829507023096085 0.05407901480793953 -0.7509769201278687 0.5521689653396606 0.9955877661705017 0.7851042151451111 -0.9031488299369812 0.8897368311882019 -0.15992477536201477 0.5504012107849121 -0.632779598236084 0.46887823939323425 0.634401798248291 0.21858075261116028 -0.5418187379837036 -0.00027228472754359245 -0.9998914003372192 0.863837480545044 -0.9829088449478149 -0.9979373812675476 0.5632930397987366 0.7183312177658081 -0.35831305384635925 0.6479994058609009 -0.8490413427352905 -0.46625950932502747 0.414314866065979 0.9845826625823975 -0.40151700377464294 -0.5996077060699463 -0.4898415803909302 -0.2550879120826721 0.03433229774236679 -0.10847887396812439 0.9347527623176575 0.3004417419433594 -0.4920714795589447 -0.633811891078949 0.9622215032577515 0.8769302368164062 -0.3160773515701294 0.34098678827285767 0.4695121943950653 -0.7201433181762695 0.5144777297973633 0.6362541913986206 0.9881685376167297 -0.5979170799255371 -0.9990066885948181 -0.7668299078941345 -0.1772558093070984 0.5962145924568176 0.5078144073486328 0.8558415174484253 -0.645675539970398 0.8448315858840942 -0.8756772875785828 0.5850603580474854 -0.40724924206733704 0.9956337809562683 -0.9786524176597595 0.7800963521003723 0.41060909628868103 0.765545129776001 0.6638113260269165 -0.7836817502975464 -0.2518058717250824 -0.9705650210380554 0.13159525394439697 0.9102344512939453 -0.8625173568725586 0.870396614074707 0.9496448636054993 -0.8103548288345337 -0.5971165895462036 -0.11749505996704102 -0.9915785789489746 0.9019360542297363 -0.5450011491775513 -0.39389488101005554 -0.740347146987915 0.8853766322135925 -0.8986360430717468 0.4822126626968384 0.3573469817638397 0.5703538656234741 0.9999769330024719 0.999890148639679 0.5864717960357666 0.4416935443878174 -0.11840201914310455 0.3487515151500702 0.7257137298583984 0.9806464910507202 0.581493079662323 -0.5432196855545044 0.9852182269096375 -0.6446601748466492 -0.9301329851150513 0.003302338533103466 -0.9663439393043518 -0.8185694217681885 0.8343368172645569 -0.5223144292831421 0.5796099305152893 0.250334769487381 0.3548717796802521 -0.9154093265533447 0.9890806078910828 -0.34800973534584045 0.19254395365715027 0.8844965696334839 0.8860912919044495 -0.08585051447153091 -0.2002754509449005 0.9269154071807861 -0.99857497215271 0.9822858572006226 -0.6639218926429749 -0.341656357049942 0.6134034991264343 -0.9620049595832825 0.6110255122184753 -0.8855754733085632 0.05515436828136444 -0.2397690713405609 -0.44543683528900146 -0.545314371585846 -0.7371342778205872 -0.35993149876594543 0.9870032072067261 0.8193817734718323 0.7713431715965271 0.04149239510297775 0.46430736780166626 0.2521642744541168 -0.8761440515518188 0.3756835460662842 -0.1784159541130066 -0.6035395264625549 0.016874289140105247 -0.5549731850624084 0.9961718916893005 0.06795378029346466 -0.37595075368881226 -0.1509331613779068 0.633590042591095 0.20524410903453827 -0.9998394250869751 0.9744579195976257 -0.8758386373519897 0.1870330274105072 0.9015195369720459 -0.9364199638366699 0.7898118495941162 0.8640446662902832 0.2113312929868698 -0.4822520315647125 0.82978755235672 0.9724472761154175 -0.9277102947235107 -0.998969316482544 -0.0221375934779644 -0.9266369342803955 0.27198877930641174 0.8816161155700684 0.9999992251396179 -0.23474368453025818 0.5558981895446777 -0.0823117196559906 0.9673608541488647 -0.21704278886318207 -0.9865131378173828 0.8262189626693726 -0.6523066163063049 -0.9983072876930237 -0.979491114616394
因为 它太大了无法显示 source diff 。你可以改为 查看blob
# -*- coding=utf-8 -*-
"""
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
import paddle.fluid as fluid
import math
from fleetrec.core.utils import envs
from fleetrec.core.model import Model as ModelBase
class Model(ModelBase):
def __init__(self, config):
ModelBase.__init__(self, config)
# tree meta hyper parameters
self.max_layers = envs.get_global_env(
"tree_parameters.max_layers", 4, self._namespace)
self.node_nums = envs.get_global_env(
"tree_parameters.node_nums", 26, self._namespace)
self.leaf_node_nums = envs.get_global_env(
"tree_parameters.leaf_node_nums", 13, self._namespace)
self.output_positive = envs.get_global_env(
"tree_parameters.output_positive", True, self._namespace)
self.layer_node_num_list = envs.get_global_env(
"tree_parameters.layer_node_num_list", [
2, 4, 7, 12], self._namespace)
self.child_nums = envs.get_global_env(
"tree_parameters.node_nums", 2, self._namespace)
self.tree_layer_init_path = envs.get_global_env(
"tree_parameters.tree_layer_init_path", None, self._namespace)
# model training hyper parameter
self.node_emb_size = envs.get_global_env(
"hyper_parameters.node_emb_size", 64, self._namespace)
self.input_emb_size = envs.get_global_env(
"hyper_parameters.input_emb_size", 768, self._namespace)
self.act = envs.get_global_env(
"hyper_parameters.act", "tanh", self._namespace)
self.neg_sampling_list = envs.get_global_env(
"hyper_parameters.neg_sampling_list", [
1, 2, 3, 4], self._namespace)
# model infer hyper parameter
self.topK = envs.get_global_env(
"hyper_parameters.node_nums", 1, self._namespace)
self.batch_size = envs.get_global_env(
"batch_size", 32, "train.reader")
def train_net(self):
self.train_input()
self.tdm_net()
self.create_info()
self.avg_loss()
self.metrics()
def infer_net(self):
self.infer_input()
self.create_first_layer()
self.tdm_infer_net()
""" -------- Train network detail ------- """
def train_input(self):
input_emb = fluid.data(
name="input_emb",
shape=[None, self.input_emb_size],
dtype="float32",
)
self._data_var.append(input_emb)
item_label = fluid.data(
name="item_label",
shape=[None, 1],
dtype="int64",
)
self._data_var.append(item_label)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
def tdm_net(self):
"""
tdm训练网络的主要流程部分
"""
is_distributed = True if envs.get_trainer() == "CtrTrainer" else False
input_emb = self._data_var[0]
item_label = self._data_var[1]
# 根据输入的item的正样本在给定的树上进行负采样
# sample_nodes 是采样的node_id的结果,包含正负样本
# sample_label 是采样的node_id对应的正负标签
# sample_mask 是为了保持tensor维度一致,padding部分的标签,若为0,则是padding的虚拟node_id
sample_nodes, sample_label, sample_mask = fluid.contrib.layers.tdm_sampler(
x=item_label,
neg_samples_num_list=self.neg_sampling_list,
layer_node_num_list=self.layer_node_num_list,
leaf_node_num=self.leaf_node_nums,
tree_travel_attr=fluid.ParamAttr(name="TDM_Tree_Travel"),
tree_layer_attr=fluid.ParamAttr(name="TDM_Tree_Layer"),
output_positive=self.output_positive,
output_list=True,
seed=0,
tree_dtype='int64',
dtype='int64'
)
# 查表得到每个节点的Embedding
sample_nodes_emb = [
fluid.embedding(
input=sample_nodes[i],
is_sparse=True,
size=[self.node_nums, self.node_emb_size],
param_attr=fluid.ParamAttr(
name="TDM_Tree_Emb")
) for i in range(self.max_layers)
]
# 此处进行Reshape是为了之后层次化的分类器训练
sample_nodes_emb = [
fluid.layers.reshape(sample_nodes_emb[i],
[-1, self.neg_sampling_list[i] +
self.output_positive, self.node_emb_size]
) for i in range(self.max_layers)
]
# 对输入的input_emb进行转换,使其维度与node_emb维度一致
input_trans_emb = self.input_trans_layer(input_emb)
# 分类器的主体网络,分别训练不同层次的分类器
layer_classifier_res = self.classifier_layer(
input_trans_emb, sample_nodes_emb)
# 最后的概率判别FC,将所有层次的node分类结果放到一起以相同的标准进行判别
# 考虑到树极大可能不平衡,有些item不在最后一层,所以需要这样的机制保证每个item都有机会被召回
tdm_fc = fluid.layers.fc(input=layer_classifier_res,
size=2,
act=None,
num_flatten_dims=2,
param_attr=fluid.ParamAttr(
name="tdm.cls_fc.weight"),
bias_attr=fluid.ParamAttr(name="tdm.cls_fc.bias"))
# 将loss打平,放到一起计算整体网络的loss
tdm_fc_re = fluid.layers.reshape(tdm_fc, [-1, 2])
# 若想对各个层次的loss辅以不同的权重,则在此处无需concat
# 支持各个层次分别计算loss,再乘相应的权重
sample_label = fluid.layers.concat(sample_label, axis=1)
labels_reshape = fluid.layers.reshape(sample_label, [-1, 1])
labels_reshape.stop_gradient = True
# 计算整体的loss并得到softmax的输出
cost, softmax_prob = fluid.layers.softmax_with_cross_entropy(
logits=tdm_fc_re, label=labels_reshape, return_softmax=True)
# 通过mask过滤掉虚拟节点的loss
sample_mask = fluid.layers.concat(sample_mask, axis=1)
mask_reshape = fluid.layers.reshape(sample_mask, [-1, 1])
mask_index = fluid.layers.where(mask_reshape != 0)
mask_index.stop_gradient = True
self.mask_cost = fluid.layers.gather_nd(cost, mask_index)
softmax_prob = fluid.layers.unsqueeze(input=softmax_prob, axes=[1])
self.mask_prob = fluid.layers.gather_nd(softmax_prob, mask_index)
self.mask_label = fluid.layers.gather_nd(labels_reshape, mask_index)
self._predict = self.mask_prob
def create_info(self):
fluid.default_startup_program().global_block().create_var(
name="TDM_Tree_Info",
dtype=fluid.core.VarDesc.VarType.INT32,
shape=[self.node_nums, 3 + self.child_nums],
persistable=True,
initializer=fluid.initializer.ConstantInitializer(0))
fluid.default_main_program().global_block().create_var(
name="TDM_Tree_Info",
dtype=fluid.core.VarDesc.VarType.INT32,
shape=[self.node_nums, 3 + self.child_nums],
persistable=True)
def avg_loss(self):
avg_cost = fluid.layers.reduce_mean(self.mask_cost)
self._cost = avg_cost
def metrics(self):
auc, batch_auc, _ = fluid.layers.auc(input=self._predict,
label=self.mask_label,
num_thresholds=2 ** 12,
slide_steps=20)
self._metrics["AUC"] = auc
self._metrics["BATCH_AUC"] = batch_auc
self._metrics["BATCH_LOSS"] = self._cost
def input_trans_layer(self, input_emb):
"""
输入侧训练组网
"""
# 将input映射到与node相同的维度
input_fc_out = fluid.layers.fc(
input=input_emb,
size=self.node_emb_size,
act=None,
param_attr=fluid.ParamAttr(name="trans.input_fc.weight"),
bias_attr=fluid.ParamAttr(name="trans.input_fc.bias"),
)
# 将input_emb映射到各个不同层次的向量表示空间
input_layer_fc_out = [
fluid.layers.fc(
input=input_fc_out,
size=self.node_emb_size,
act=self.act,
param_attr=fluid.ParamAttr(
name="trans.layer_fc.weight." + str(i)),
bias_attr=fluid.ParamAttr(name="trans.layer_fc.bias."+str(i)),
) for i in range(self.max_layers)
]
return input_layer_fc_out
def _expand_layer(self, input_layer, node, layer_idx):
# 扩展input的输入,使数量与node一致,
# 也可以以其他broadcast的操作进行代替
# 同时兼容了训练组网与预测组网
input_layer_unsequeeze = fluid.layers.unsqueeze(
input=input_layer, axes=[1])
if not isinstance(node, list):
input_layer_expand = fluid.layers.expand(
input_layer_unsequeeze, expand_times=[1, node.shape[1], 1])
else:
input_layer_expand = fluid.layers.expand(
input_layer_unsequeeze, expand_times=[1, node[layer_idx].shape[1], 1])
return input_layer_expand
def classifier_layer(self, input, node):
# 扩展input,使维度与node匹配
input_expand = [
self._expand_layer(input[i], node, i) for i in range(self.max_layers)
]
# 将input_emb与node_emb concat到一起过分类器FC
input_node_concat = [
fluid.layers.concat(
input=[input_expand[i], node[i]],
axis=2) for i in range(self.max_layers)
]
hidden_states_fc = [
fluid.layers.fc(
input=input_node_concat[i],
size=self.node_emb_size,
num_flatten_dims=2,
act=self.act,
param_attr=fluid.ParamAttr(
name="cls.concat_fc.weight."+str(i)),
bias_attr=fluid.ParamAttr(name="cls.concat_fc.bias."+str(i))
) for i in range(self.max_layers)
]
# 如果将所有层次的node放到一起计算loss,则需要在此处concat
# 将分类器结果以batch为准绳concat到一起,而不是layer
# 维度形如[batch_size, total_node_num, node_emb_size]
hidden_states_concat = fluid.layers.concat(hidden_states_fc, axis=1)
return hidden_states_concat
""" -------- Infer network detail ------- """
def infer_input(self):
input_emb = fluid.layers.data(
name="input_emb",
shape=[self.input_emb_size],
dtype="float32",
)
self._data_var.append(input_emb)
if self._platform != "LINUX":
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=self._data_var, capacity=64, use_double_buffer=False, iterable=False)
def get_layer_list(self):
"""get layer list from layer_list.txt"""
layer_list = []
with open(self.tree_layer_init_path, 'r') as fin:
for line in fin.readlines():
l = []
layer = (line.split('\n'))[0].split(',')
for node in layer:
if node:
l.append(node)
layer_list.append(l)
return layer_list
def create_first_layer(self):
"""decide which layer to start infer"""
self.get_layer_list()
first_layer_id = 0
for idx, layer_node in enumerate(self.layer_node_num_list):
if layer_node >= self.topK:
first_layer_id = idx
break
first_layer_node = self.layer_list[first_layer_id]
self.first_layer_idx = first_layer_id
node_list = []
mask_list = []
for id in node_list:
node_list.append(fluid.layers.fill_constant(
[self.batch_size, 1], value=id, dtype='int64'))
mask_list.append(fluid.layers.fill_constant(
[self.batch_size, 1], value=0, dtype='int64'))
self.first_layer_node = fluid.layers.concat(node_list, axis=1)
self.first_layer_node_mask = fluid.layers.concat(mask_list, axis=1)
def tdm_infer_net(self, inputs):
"""
infer的主要流程
infer的基本逻辑是:从上层开始(具体层idx由树结构及TopK值决定)
1、依次通过每一层分类器,得到当前层输入的指定节点的prob
2、根据prob值大小,取topK的节点,取这些节点的孩子节点作为下一层的输入
3、循环1、2步骤,遍历完所有层,得到每一层筛选结果的集合
4、将筛选结果集合中的叶子节点,拿出来再做一次topK,得到最终的召回输出
"""
input_emb = self._data_var[0]
node_score = []
node_list = []
current_layer_node = self.first_layer_node
current_layer_node_mask = self.first_layer_node_mask
input_trans_emb = self.input_trans_net.input_fc_infer(input_emb)
for layer_idx in range(self.first_layer_idx, self.max_layers):
# 确定当前层的需要计算的节点数
if layer_idx == self.first_layer_idx:
current_layer_node_num = self.first_layer_node.shape[1]
else:
current_layer_node_num = current_layer_node.shape[1] * \
current_layer_node.shape[2]
current_layer_node = fluid.layers.reshape(
current_layer_node, [-1, current_layer_node_num])
current_layer_node_mask = fluid.layers.reshape(
current_layer_node_mask, [-1, current_layer_node_num])
node_emb = fluid.embedding(
input=current_layer_node,
size=[self.node_nums, self.node_embed_size],
param_attr=fluid.ParamAttr(name="TDM_Tree_Emb"))
input_fc_out = self.layer_fc_infer(
input_trans_emb, layer_idx)
# 过每一层的分类器
layer_classifier_res = self.classifier_layer_infer(input_fc_out,
node_emb,
layer_idx)
# 过最终的判别分类器
tdm_fc = fluid.layers.fc(input=layer_classifier_res,
size=2,
act=None,
num_flatten_dims=2,
param_attr=fluid.ParamAttr(
name="tdm.cls_fc.weight"),
bias_attr=fluid.ParamAttr(name="tdm.cls_fc.bias"))
prob = fluid.layers.softmax(tdm_fc)
positive_prob = fluid.layers.slice(
prob, axes=[2], starts=[1], ends=[2])
prob_re = fluid.layers.reshape(
positive_prob, [-1, current_layer_node_num])
# 过滤掉padding产生的无效节点(node_id=0)
node_zero_mask = fluid.layers.cast(current_layer_node, 'bool')
node_zero_mask = fluid.layers.cast(node_zero_mask, 'float')
prob_re = prob_re * node_zero_mask
# 在当前层的分类结果中取topK,并将对应的score及node_id保存下来
k = self.topK
if current_layer_node_num < self.topK:
k = current_layer_node_num
_, topk_i = fluid.layers.topk(prob_re, k)
# index_sample op根据下标索引tensor对应位置的值
# 若paddle版本>2.0,调用方式为paddle.index_sample
top_node = fluid.contrib.layers.index_sample(
current_layer_node, topk_i)
prob_re_mask = prob_re * current_layer_node_mask # 过滤掉非叶子节点
topk_value = fluid.contrib.layers.index_sample(
prob_re_mask, topk_i)
node_score.append(topk_value)
node_list.append(top_node)
# 取当前层topK结果的孩子节点,作为下一层的输入
if layer_idx < self.max_layers - 1:
# tdm_child op 根据输入返回其 child 及 child_mask
# 若child是叶子节点,则child_mask=1,否则为0
current_layer_node, current_layer_node_mask = \
fluid.contrib.layers.tdm_child(x=top_node,
node_nums=self.node_nums,
child_nums=self.child_nums,
param_attr=fluid.ParamAttr(
name="TDM_Tree_Info"),
dtype='int64')
total_node_score = fluid.layers.concat(node_score, axis=1)
total_node = fluid.layers.concat(node_list, axis=1)
# 考虑到树可能是不平衡的,计算所有层的叶子节点的topK
res_score, res_i = fluid.layers.topk(total_node_score, self.topK)
res_layer_node = fluid.contrib.layers.index_sample(total_node, res_i)
res_node = fluid.layers.reshape(res_layer_node, [-1, self.topK, 1])
# 利用Tree_info信息,将node_id转换为item_id
tree_info = fluid.default_main_program().global_block().var("TDM_Tree_Info")
res_node_emb = fluid.layers.gather_nd(tree_info, res_node)
res_item = fluid.layers.slice(
res_node_emb, axes=[2], starts=[0], ends=[1])
self.res_item_re = fluid.layers.reshape(res_item, [-1, self.topK])
def input_fc_infer(self, input_emb):
"""
输入侧预测组网第一部分,将input转换为node同维度
"""
# 组网与训练时保持一致
input_fc_out = fluid.layers.fc(
input=input_emb,
size=self.node_emb_size,
act=None,
param_attr=fluid.ParamAttr(name="trans.input_fc.weight"),
bias_attr=fluid.ParamAttr(name="trans.input_fc.bias"),
)
return input_fc_out
def layer_fc_infer(self, input_fc_out, layer_idx):
"""
输入侧预测组网第二部分,将input映射到不同层次的向量空间
"""
# 组网与训练保持一致,通过layer_idx指定不同层的FC
input_layer_fc_out = fluid.layers.fc(
input=input_fc_out,
size=self.node_emb_size,
act=self.act,
param_attr=fluid.ParamAttr(
name="trans.layer_fc.weight." + str(layer_idx)),
bias_attr=fluid.ParamAttr(
name="trans.layer_fc.bias."+str(layer_idx)),
)
return input_layer_fc_out
def classifier_layer_infer(self, input, node, layer_idx):
# 为infer组网提供的简化版classifier,通过给定layer_idx调用不同层的分类器
# 同样需要保持input与node的维度匹配
input_expand = self._expand_layer(input, node, layer_idx)
# 与训练网络相同的concat逻辑
input_node_concat = fluid.layers.concat(
input=[input_expand, node], axis=2)
# 根据参数名param_attr调用不同的层的FC
hidden_states_fc = fluid.layers.fc(
input=input_node_concat,
size=self.node_emb_size,
num_flatten_dims=2,
act=self.act,
param_attr=fluid.ParamAttr(
name="cls.concat_fc.weight."+str(layer_idx)),
bias_attr=fluid.ParamAttr(name="cls.concat_fc.bias."+str(layer_idx)))
return hidden_states_fc
# -*- coding=utf8 -*-
"""
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
from __future__ import print_function
from fleetrec.core.reader import Reader
class TrainReader(Reader):
def init(self):
pass
def generate_sample(self, line):
"""
Read the data line by line and process it as a dictionary
"""
def reader():
"""
This function needs to be implemented by the user, based on data format
"""
features = (line.strip('\n')).split('\t')
input_emb = map(float, features[0].split(' '))
item_label = [int(features[1])]
feature_name = ["input_emb", "item_label"]
yield zip(feature_name, [input_emb] + [item_label])
return reader
1,2
3,4,5,6
7,8,9,10,11,12,13
14,15,16,17,18,19,20,21,22,23,24,25
\ No newline at end of file
# 快速开始
## 环境准备
Fleet-Rec是基于飞桨分布式训练所开发的,包含模型、训练模式的快速开发、调试、部署的工具, 让用户更轻松的使用飞桨分布式训练。
<p align="center">
<img align="center" src="doc/imgs/logo.png">
<p>
[![License](https://img.shields.io/badge/license-Apache%202-red.svg)](LICENSE)
[![Version](https://img.shields.io/github/v/release/PaddlePaddle/Paddle.svg)](https://github.com/PaddlePaddle/PaddleRec/releases)
PaddleRec是源于飞桨生态的搜索推荐模型一站式开箱即用工具,无论您是初学者,开发者,研究者均可便捷的使用PaddleRec完成调研,训练到预测部署的全流程工作。PaddleRec提供了搜索推荐任务中语义理解、召回、粗排、精排、多任务学习的全流程解决方案。
PadlleRec以预置模型为核心,具备以下特点:
- [易于上手,开箱即用](https://www.paddlepaddle.org.cn)
- [灵活配置,个性调参](https://www.paddlepaddle.org.cn)
- [分布式训练,大规模稀疏](https://www.paddlepaddle.org.cn)
- [快速部署,一键上线](https://www.paddlepaddle.org.cn)
<p align="center">
<img align="center" src="doc/imgs/coding-gif.png">
<p>
# 目录
* [特性](#特性)
* [支持模型列表](#支持模型列表)
* [文档教程](#文档教程)
* [入门教程](#入门教程)
* [环境要求](#环境要求)
* [安装命令](#安装命令)
* [快速开始](#快速开始)
* [常见问题FAQ](#常见问题faq)
* [进阶教程](#进阶教程)
* [自定义数据集及Reader](#自定义数据集及reader)
* [模型调参](#模型调参)
* [单机训练](#单机训练)
* [分布式训练](#分布式训练)
* [预测部署](#预测部署)
* [版本历史](#版本历史)
* [版本更新](#版本更新)
* [Benchamrk](#benchamrk)
* [许可证书](#许可证书)
* [如何贡献代码](#如何贡献代码)
* [优化PaddleRec框架](#优化paddlerec框架)
* [新增模型到PaddleRec](#新增模型到paddlerec)
# 特性
- 易于上手,开箱即用
- 灵活配置,个性调参
- 分布式训练,大规模稀疏
- 快速部署,一键上线
# 支持模型列表
| 方向 | 模型 | 单机CPU训练 | 单机GPU训练 | 分布式CPU训练 | 分布式GPU训练 | 自定义数据集 | 服务器部署 |
| :------------------: | :--------------------: | :---------: | :---------: | :-----------: | :-----------: | :----------: | :--------: |
| ContentUnderstanding | [Text-Classifcation]() | ✓ | x | ✓ | x | ✓ | ✓ |
| ContentUnderstanding | [TagSpace]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Recall | [Word2Vec]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Recall | [TDM]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Rank | [CTR-Dnn]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Rank | [DeepFm]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Rerank | [ListWise]() | ✓ | x | ✓ | x | ✓ | ✓ |
| MultiTask | [MMOE]() | ✓ | x | ✓ | x | ✓ | ✓ |
| MultiTask | [ESSM]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Match | [DSSM]() | ✓ | x | ✓ | x | ✓ | ✓ |
| Match | [Multiview-Simnet]() | ✓ | x | ✓ | x | ✓ | ✓ |
# 文档教程
## 入门教程
### 环境要求
* Python >= 2.7
* PaddlePaddle >= 1.7.2
* 操作系统: Windows/Mac/Linux
### 安装命令
- 安装方法一<PIP源直接安装>
```bash
python -m pip install fleet-rec
```
```bash
python -m pip install fleet-rec
```
- 安装方法二
* 安装飞桨 **注:需要用户安装最新版本的飞桨<当前只支持Linux系统>。**
* 安装飞桨 **注:需要用户安装最新版本的飞桨<当前只支持Linux系统>。**
```bash
python -m pip install paddlepaddle -i https://mirror.baidu.com/pypi/simple
```
```bash
python -m pip install paddlepaddle -i https://mirror.baidu.com/pypi/simple
```
* 源码安装Fleet-Rec
```
git clone https://github.com/seiriosPlus/FleetRec/
cd FleetRec
python setup.py install
```
* 源码安装Fleet-Rec
```
git clone https://github.com/seiriosPlus/FleetRec/
cd FleetRec
python setup.py install
```
## ctr-dnn示例使用
### 快速开始
#### ctr-dnn示例使用
目前框架内置了多个模型,简单的命令即可使用内置模型开始单机训练和本地1*1模拟训练
### 单机训练
##### 单机训练
```bash
cd FleetRec
......@@ -44,7 +113,7 @@ python -m fleetrec.run \
-e single
```
### 本地模拟分布式训练
##### 本地模拟分布式训练
```bash
cd FleetRec
......@@ -55,7 +124,7 @@ python -m fleetrec.run \
-e local_cluster
```
### 集群提交分布式训练<需要用户预先配置好集群环境,本提交命令不包含提交客户端>
##### 集群提交分布式训练<需要用户预先配置好集群环境,本提交命令不包含提交客户端>
```bash
cd FleetRec
......@@ -66,5 +135,21 @@ python -m fleetrec.run \
-e cluster
```
### 常见问题FAQ
更多用户文档及二次开发文档,敬请期待。
\ No newline at end of file
## 进阶教程
### 自定义数据集及Reader
### 模型调参
### 单机训练
### 分布式训练
### 预测部署
# 版本历史
## 版本更新
## Benchamrk
# 许可证书
本项目的发布受[Apache 2.0 license](LICENSE)许可认证。
# 如何贡献代码
## 优化PaddleRec框架
## 新增模型到PaddleRec
......@@ -39,7 +39,7 @@ def build(dirname):
packages = find_packages(dirname, include=('fleetrec.*'))
package_dir = {'': dirname}
package_data = {}
need_copy = ['data/*/*.txt', '*.yaml']
need_copy = ['data/*/*.txt', '*.yaml', 'tree/*.npy','tree/*.txt']
for package in packages:
if package.startswith("fleetrec.models."):
package_data[package] = need_copy
......
import os
import time
import shutil
import requests
import sys
import tarfile
import zipfile
import platform
import functools
lasttime = time.time()
FLUSH_INTERVAL = 0.1
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
def get_platform():
return platform.platform()
def is_windows():
return get_platform().lower().startswith("windows")
def progress(str, end=False):
global lasttime
if end:
str += "\n"
lasttime = 0
if time.time() - lasttime >= FLUSH_INTERVAL:
sys.stdout.write("\r%s" % str)
lasttime = time.time()
sys.stdout.flush()
def download_file(url, savepath, print_progress):
r = requests.get(url, stream=True)
total_length = r.headers.get('content-length')
if total_length is None:
with open(savepath, 'wb') as f:
shutil.copyfileobj(r.raw, f)
else:
with open(savepath, 'wb') as f:
dl = 0
total_length = int(total_length)
starttime = time.time()
if print_progress:
print("Downloading %s" % os.path.basename(savepath))
for data in r.iter_content(chunk_size=4096):
dl += len(data)
f.write(data)
if print_progress:
done = int(50 * dl / total_length)
progress("[%-50s] %.2f%%" %
('=' * done, float(100 * dl) / total_length))
if print_progress:
progress("[%-50s] %.2f%%" % ('=' * 50, 100), end=True)
def _uncompress_file(filepath, extrapath, delete_file, print_progress):
if print_progress:
print("Uncompress %s" % os.path.basename(filepath))
if filepath.endswith("zip"):
handler = _uncompress_file_zip
elif filepath.endswith("tgz"):
handler = _uncompress_file_tar
else:
handler = functools.partial(_uncompress_file_tar, mode="r")
for total_num, index, rootpath in handler(filepath, extrapath):
if print_progress:
done = int(50 * float(index) / total_num)
progress("[%-50s] %.2f%%" %
('=' * done, float(100 * index) / total_num))
if print_progress:
progress("[%-50s] %.2f%%" % ('=' * 50, 100), end=True)
if delete_file:
os.remove(filepath)
return rootpath
def _uncompress_file_zip(filepath, extrapath):
files = zipfile.ZipFile(filepath, 'r')
filelist = files.namelist()
rootpath = filelist[0]
total_num = len(filelist)
for index, file in enumerate(filelist):
files.extract(file, extrapath)
yield total_num, index, rootpath
files.close()
yield total_num, index, rootpath
def _uncompress_file_tar(filepath, extrapath, mode="r:gz"):
files = tarfile.open(filepath, mode)
filelist = files.getnames()
total_num = len(filelist)
rootpath = filelist[0]
for index, file in enumerate(filelist):
files.extract(file, extrapath)
yield total_num, index, rootpath
files.close()
yield total_num, index, rootpath
def download_file_and_uncompress(url,
savepath=None,
savename=None,
extrapath=None,
print_progress=True,
cover=False,
delete_file=False):
if savepath is None:
savepath = "."
if extrapath is None:
extrapath = "."
if savename is None:
savename = url.split("/")[-1]
savepath = os.path.join(savepath, savename)
if cover:
if os.path.exists(savepath):
shutil.rmtree(savepath)
if not os.path.exists(savepath):
download_file(url, savepath, print_progress)
_ = _uncompress_file(savepath, extrapath, delete_file, print_progress)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册