From dcbf07c050b86cc0de8eebc41110804630be536d Mon Sep 17 00:00:00 2001 From: xiexionghang Date: Wed, 4 Mar 2020 15:38:10 +0800 Subject: [PATCH] depend on paddle with bcloud --- .../package/format_nets/all_slot.dict | 409 ----- .../package/format_nets/clear_ssd.sh | 38 - .../package/format_nets/config.py | 40 - .../package/format_nets/data_generate_base.py | 358 ---- .../package/format_nets/feed/layer.py | 163 -- .../package/format_nets/feed/layer_model.py | 54 - .../package/format_nets/feed/model.layers | 22 - .../package/format_nets/feed/test.py | 1 - .../fleet_desc_combinejoincommon.prototxt | 1466 ---------------- .../format_nets/format_newcate_hotnews.awk | 21 - .../package/format_nets/ins_weight.py | 122 -- .../jingpai_fleet_desc_new.prototxt | 1504 ----------------- .../package/format_nets/model.layers | 22 - .../package/format_nets/model_new.py | 188 --- .../package/format_nets/model_new_jc.py | 166 -- .../format_nets/my_data_generator_str.py | 89 - .../old_join_common_startup_program.bin | Bin 120992 -> 0 bytes .../old_join_common_train_program.bin | Bin 1847327 -> 0 bytes .../old_program/old_update_main_program.bin | Bin 1874534 -> 0 bytes .../old_update_startup_program.bin | Bin 48630 -> 0 bytes .../package/format_nets/old_slot/slot | 408 ----- .../package/format_nets/old_slot/slot_common | 99 -- .../package/format_nets/old_slot/to.py | 5 - .../package/format_nets/reqi_fleet_desc | 1461 ---------------- .../format_nets/scripts/xbox_compressor_mf.py | 162 -- .../scripts/xbox_decompressor_mf.awk | 52 - .../package/format_nets/slot/slot | 407 ----- .../package/format_nets/slot/slot_common | 99 -- .../package/format_nets/tmp/slot/slot | 408 ----- .../package/format_nets/tmp/slot/slot_common | 99 -- .../package/format_nets/tmp/slot/to.py | 5 - .../package/format_nets/trainer_online.py | 593 ------- .../format_nets/trainer_online_local.py | 500 ------ .../package/format_nets/util.bak.py | 135 -- .../news_jingpai/package/format_nets/util.py | 286 ---- 35 files changed, 9382 deletions(-) delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/all_slot.dict delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/clear_ssd.sh delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/config.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/data_generate_base.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/feed/layer.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/feed/layer_model.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/feed/model.layers delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/feed/test.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/fleet_desc_combinejoincommon.prototxt delete mode 100755 feed/feed_deploy/news_jingpai/package/format_nets/format_newcate_hotnews.awk delete mode 100755 feed/feed_deploy/news_jingpai/package/format_nets/ins_weight.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/jingpai_fleet_desc_new.prototxt delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/model.layers delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/model_new.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/model_new_jc.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/my_data_generator_str.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_join_common_startup_program.bin delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_join_common_train_program.bin delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_update_main_program.bin delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_update_startup_program.bin delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot_common delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/old_slot/to.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/reqi_fleet_desc delete mode 100755 feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_compressor_mf.py delete mode 100755 feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_decompressor_mf.awk delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/slot/slot delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/slot/slot_common delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot_common delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/to.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/trainer_online.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/trainer_online_local.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/util.bak.py delete mode 100644 feed/feed_deploy/news_jingpai/package/format_nets/util.py diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/all_slot.dict b/feed/feed_deploy/news_jingpai/package/format_nets/all_slot.dict deleted file mode 100644 index 8ad76f38..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/all_slot.dict +++ /dev/null @@ -1,409 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 -6009 -6011 -6012 -6013 -6014 -6015 -6019 -6023 -6024 -6027 -6029 -6031 -6050 -6060 -6068 -6069 -6089 -6095 -6105 -6112 -6130 -6131 -6132 -6134 -6161 -6162 -6163 -6166 -6182 -6183 -6185 -6190 -6212 -6213 -6231 -6233 -6234 -6236 -6238 -6239 -6240 -6241 -6242 -6243 -6244 -6245 -6354 -7002 -7005 -7008 -7010 -7013 -7015 -7019 -7020 -7045 -7046 -7048 -7049 -7052 -7054 -7056 -7064 -7066 -7076 -7078 -7083 -7084 -7085 -7086 -7087 -7088 -7089 -7090 -7099 -7100 -7101 -7102 -7103 -7104 -7105 -7109 -7124 -7126 -7136 -7142 -7143 -7144 -7145 -7146 -7147 -7148 -7150 -7151 -7152 -7153 -7154 -7155 -7156 -7157 -7047 -7050 -6257 -6259 -6260 -6261 -7170 -7185 -7186 -6751 -6755 -6757 -6759 -6760 -6763 -6764 -6765 -6766 -6767 -6768 -6769 -6770 -7502 -7503 -7504 -7505 -7510 -7511 -7512 -7513 -6806 -6807 -6808 -6809 -6810 -6811 -6812 -6813 -6815 -6816 -6817 -6819 -6823 -6828 -6831 -6840 -6845 -6875 -6879 -6881 -6888 -6889 -6947 -6950 -6956 -6957 -6959 -10006 -10008 -10009 -10010 -10011 -10016 -10017 -10018 -10019 -10020 -10021 -10022 -10023 -10024 -10029 -10030 -10031 -10032 -10033 -10034 -10035 -10036 -10037 -10038 -10039 -10040 -10041 -10042 -10044 -10045 -10046 -10051 -10052 -10053 -10054 -10055 -10056 -10057 -10060 -10066 -10069 -6820 -6821 -6822 -13333 -13334 -13335 -13336 -13337 -13338 -13339 -13340 -13341 -13351 -13352 -13353 -13359 -13361 -13362 -13363 -13366 -13367 -13368 -13369 -13370 -13371 -13375 -13376 -5700 -5702 -13400 -13401 -13402 -13403 -13404 -13406 -13407 -13408 -13410 -13417 -13418 -13419 -13420 -13422 -13425 -13427 -13428 -13429 -13430 -13431 -13433 -13434 -13436 -13437 -13326 -13330 -13331 -5717 -13442 -13451 -13452 -13455 -13456 -13457 -13458 -13459 -13460 -13461 -13462 -13463 -13464 -13465 -13466 -13467 -13468 -1104 -1106 -1107 -1108 -1109 -1110 -1111 -1112 -1113 -1114 -1115 -1116 -1117 -1119 -1120 -1121 -1122 -1123 -1124 -1125 -1126 -1127 -1128 -1129 -13812 -13813 -6740 -1490 -32915 -32950 -32952 -32953 -32954 -33077 -33085 -33086 -12345 -23456 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/clear_ssd.sh b/feed/feed_deploy/news_jingpai/package/format_nets/clear_ssd.sh deleted file mode 100644 index a26c21a0..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/clear_ssd.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!bash - -function check_appid_valid() { - appid="$1" - num=`echo "${appid}" |awk -F '-' '{print NF}'` - if [ $num -ne 4 ];then - return 1 - fi - return 0 -} - -function appid_running_num() { - appid="$1" - proc_num=`ps -ef |grep "${appid}"|grep -v grep|wc -l` - if [ $? -ne 0 ];then - #if failed, return 1, avoid - return 1 - fi - return ${proc_num} -} - -work_dir="$1" -base_dir=`echo "${work_dir}" |awk -F 'app-user-' '{print $1}'` -database_list=`find ${base_dir} -type d -name 'database'` -for element in ${database_list[@]} -do - app_id=`echo "$element"|awk -F 'app-user-' '{print $2}' |awk -F '/' '{print "app-user-"$1}'` - check_appid_valid "${app_id}" - if [ $? -ne 0 ];then - continue - fi - appid_running_num "${app_id}" - if [ $? -eq 0 ];then - echo "remove ${element}" - rm -rf ${element} - fi -done - diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/config.py b/feed/feed_deploy/news_jingpai/package/format_nets/config.py deleted file mode 100644 index 185c6842..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/config.py +++ /dev/null @@ -1,40 +0,0 @@ -dataset_type="InMemoryDataset" -batch_size=32 -thread_num=12 -shuffle_thread=12 -preload_thread=12 -join_common_thread=16 -update_thread=12 -fs_name="afs://xingtian.afs.baidu.com:9902" -fs_ugi="mlarch_pro,proisvip" -train_data_path=["afs:/user/feed/mlarch/samplejoin/mondr_shoubai_dnn_master/feasign"] -init_model_path="" -days="{20191201..20191231} {20200101..20200131} {20200201..20200228} {20200301..20200331}" -hours="{0..23}" -split_interval=5 -split_per_pass=2 -is_data_hourly_placed=False -save_first_base=False -output_path="afs:/user/feed/mlarch/model/feed_muye_news_paddle" -pipe_command="./read_feasign | python/bin/python ins_weight.py | awk -f format_newcate_hotnews.awk | ./parse_feasign all_slot.dict" -save_xbox_before_update=True -check_exist_seconds=30 -checkpoint_per_pass=36 -save_delta_frequency=6 -prefetch=True -write_stdout_frequency=10 - -need_reqi_changeslot=True -hdfs_dnn_plugin_path="afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0918_amd/20191006/base/dnn_plugin" -reqi_dnn_plugin_day=20191006 -reqi_dnn_plugin_pass=0 - -task_name="feed_production_shoubai_video_ctr_fsort_session_cut" -nodes=119 -node_memory=100000 -mpi_server="yq01-hpc-lvliang01-smart-master.dmop.baidu.com" -mpi_queue="feed5" -mpi_priority="very_high" -smart_client_home="/home/work/xiexionghang/news_paddle_online/smart_client/" -local_hadoop_home="/home/work/xiexionghang/news_paddle_online/hadoop-client/hadoop" -sparse_table_storage="ssd" diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/data_generate_base.py b/feed/feed_deploy/news_jingpai/package/format_nets/data_generate_base.py deleted file mode 100644 index 7abce3bd..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/data_generate_base.py +++ /dev/null @@ -1,358 +0,0 @@ -# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys - -__all__ = ['MultiSlotDataGenerator'] - - -class DataGenerator(object): - """ - DataGenerator is a general Base class for user to inherit - A user who wants to define his/her own python processing logic - with paddle.fluid.dataset should inherit this class. - """ - - def __init__(self): - self._proto_info = None - self.batch_size_ = 32 - - def _set_line_limit(self, line_limit): - if not isinstance(line_limit, int): - raise ValueError("line_limit%s must be in int type" % - type(line_limit)) - if line_limit < 1: - raise ValueError("line_limit can not less than 1") - self._line_limit = line_limit - - def set_batch(self, batch_size): - ''' - Set batch size of current DataGenerator - This is necessary only if a user wants to define generator_batch - - Example: - - .. code-block:: python - import paddle.fluid.incubate.data_generator as dg - class MyData(dg.DataGenerator): - - def generate_sample(self, line): - def local_iter(): - int_words = [int(x) for x in line.split()] - yield ("words", int_words) - return local_iter - - def generate_batch(self, samples): - def local_iter(): - for s in samples: - yield ("words", s[1].extend([s[1][0]])) - mydata = MyData() - mydata.set_batch(128) - - ''' - self.batch_size_ = batch_size - - def run_from_memory(self): - ''' - This function generator data from memory, it is usually used for - debug and benchmarking - - Example: - .. code-block:: python - import paddle.fluid.incubate.data_generator as dg - class MyData(dg.DataGenerator): - - def generate_sample(self, line): - def local_iter(): - yield ("words", [1, 2, 3, 4]) - return local_iter - - mydata = MyData() - mydata.run_from_memory() - ''' - batch_samples = [] - line_iter = self.generate_sample(None) - for user_parsed_line in line_iter(): - if user_parsed_line == None: - continue - batch_samples.append(user_parsed_line) - if len(batch_samples) == self.batch_size_: - batch_iter = self.generate_batch(batch_samples) - for sample in batch_iter(): - sys.stdout.write(self._gen_str(sample)) - batch_samples = [] - if len(batch_samples) > 0: - batch_iter = self.generate_batch(batch_samples) - for sample in batch_iter(): - sys.stdout.write(self._gen_str(sample)) - - - def run_from_stdin(self): - ''' - This function reads the data row from stdin, parses it with the - process function, and further parses the return value of the - process function with the _gen_str function. The parsed data will - be wrote to stdout and the corresponding protofile will be - generated. - - Example: - - .. code-block:: python - import paddle.fluid.incubate.data_generator as dg - class MyData(dg.DataGenerator): - - def generate_sample(self, line): - def local_iter(): - int_words = [int(x) for x in line.split()] - yield ("words", [int_words]) - return local_iter - - mydata = MyData() - mydata.run_from_stdin() - - ''' - batch_samples = [] - for line in sys.stdin: - line_iter = self.generate_sample(line) - for user_parsed_line in line_iter(): - if user_parsed_line == None: - continue - batch_samples.append(user_parsed_line) - if len(batch_samples) == self.batch_size_: - batch_iter = self.generate_batch(batch_samples) - for sample in batch_iter(): - sys.stdout.write(self._gen_str(sample)) - batch_samples = [] - if len(batch_samples) > 0: - batch_iter = self.generate_batch(batch_samples) - for sample in batch_iter(): - sys.stdout.write(self._gen_str(sample)) - - def _gen_str(self, line): - ''' - Further processing the output of the process() function rewritten by - user, outputting data that can be directly read by the datafeed,and - updating proto_info infomation. - - Args: - line(str): the output of the process() function rewritten by user. - - Returns: - Return a string data that can be read directly by the datafeed. - ''' - raise NotImplementedError( - "pls use MultiSlotDataGenerator or PairWiseDataGenerator") - - def generate_sample(self, line): - ''' - This function needs to be overridden by the user to process the - original data row into a list or tuple. - - Args: - line(str): the original data row - - Returns: - Returns the data processed by the user. - The data format is list or tuple: - [(name, [feasign, ...]), ...] - or ((name, [feasign, ...]), ...) - - For example: - [("words", [1926, 08, 17]), ("label", [1])] - or (("words", [1926, 08, 17]), ("label", [1])) - - Note: - The type of feasigns must be in int or float. Once the float - element appears in the feasign, the type of that slot will be - processed into a float. - - Example: - - .. code-block:: python - import paddle.fluid.incubate.data_generator as dg - class MyData(dg.DataGenerator): - - def generate_sample(self, line): - def local_iter(): - int_words = [int(x) for x in line.split()] - yield ("words", [int_words]) - return local_iter - - ''' - raise NotImplementedError( - "Please rewrite this function to return a list or tuple: " + - "[(name, [feasign, ...]), ...] or ((name, [feasign, ...]), ...)") - - def generate_batch(self, samples): - ''' - This function needs to be overridden by the user to process the - generated samples from generate_sample(self, str) function - It is usually used as batch processing when a user wants to - do preprocessing on a batch of samples, e.g. padding according to - the max length of a sample in the batch - - Args: - samples(list tuple): generated sample from generate_sample - - Returns: - a python generator, the same format as return value of generate_sample - - Example: - - .. code-block:: python - import paddle.fluid.incubate.data_generator as dg - class MyData(dg.DataGenerator): - - def generate_sample(self, line): - def local_iter(): - int_words = [int(x) for x in line.split()] - yield ("words", int_words) - return local_iter - - def generate_batch(self, samples): - def local_iter(): - for s in samples: - yield ("words", s[1].extend([s[1][0]])) - mydata = MyData() - mydata.set_batch(128) - ''' - - def local_iter(): - for sample in samples: - yield sample - - return local_iter - - -class MultiSlotDataGenerator(DataGenerator): - - def _gen_str(self, line): - ''' - Further processing the output of the process() function rewritten by - user, outputting data that can be directly read by the MultiSlotDataFeed, - and updating proto_info infomation. - - The input line will be in this format: - >>> [(name, [feasign, ...]), ...] - >>> or ((name, [feasign, ...]), ...) - The output will be in this format: - >>> [ids_num id1 id2 ...] ... - The proto_info will be in this format: - >>> [(name, type), ...] - - For example, if the input is like this: - >>> [("words", [1926, 08, 17]), ("label", [1])] - >>> or (("words", [1926, 08, 17]), ("label", [1])) - the output will be: - >>> 3 1234 2345 3456 1 1 - the proto_info will be: - >>> [("words", "uint64"), ("label", "uint64")] - - Args: - line(str): the output of the process() function rewritten by user. - - Returns: - Return a string data that can be read directly by the MultiSlotDataFeed. - ''' - if not isinstance(line, list) and not isinstance(line, tuple): - raise ValueError( - "the output of process() must be in list or tuple type") - output = "" - - for index, item in enumerate(line): - name, elements = item - if output: - output += " " - out_str = [] - out_str.append(str(len(elements))) - out_str.extend(elements) - output += " ".join(out_str) - return output + "\n" - - if self._proto_info is None: - self._proto_info = [] - for index, item in enumerate(line): - name, elements = item - ''' - if not isinstance(name, str): - raise ValueError("name%s must be in str type" % type(name)) - if not isinstance(elements, list): - raise ValueError("elements%s must be in list type" % - type(elements)) - if not elements: - raise ValueError( - "the elements of each field can not be empty, you need padding it in process()." - ) - self._proto_info.append((name, "uint64")) - if output: - output += " " - output += str(len(elements)) - for elem in elements: - if isinstance(elem, float): - self._proto_info[-1] = (name, "float") - elif not isinstance(elem, int) and not isinstance(elem, - long): - raise ValueError( - "the type of element%s must be in int or float" % - type(elem)) - output += " " + str(elem) - ''' - if output: - output += " " - out_str = [] - out_str.append(str(len(elements))) - out_str.extend(elements) - output += " ".join(out_str) - else: - if len(line) != len(self._proto_info): - raise ValueError( - "the complete field set of two given line are inconsistent.") - for index, item in enumerate(line): - name, elements = item - ''' - if not isinstance(name, str): - raise ValueError("name%s must be in str type" % type(name)) - if not isinstance(elements, list): - raise ValueError("elements%s must be in list type" % - type(elements)) - if not elements: - raise ValueError( - "the elements of each field can not be empty, you need padding it in process()." - ) - if name != self._proto_info[index][0]: - raise ValueError( - "the field name of two given line are not match: require<%s>, get<%s>." - % (self._proto_info[index][0], name)) - ''' - if output: - output += " " - out_str = [] - out_str.append(str(len(elements))) - #out_str.extend([str(x) for x in elements]) - out_str.extend(elements) - output += " ".join(out_str) - ''' - for elem in elements: - if self._proto_info[index][1] != "float": - if isinstance(elem, float): - self._proto_info[index] = (name, "float") - elif not isinstance(elem, int) and not isinstance(elem, - long): - raise ValueError( - "the type of element%s must be in int or float" - % type(elem)) - output += " " + str(elem) - ''' - return output + "\n" diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer.py b/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer.py deleted file mode 100644 index c0563f42..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer.py +++ /dev/null @@ -1,163 +0,0 @@ -import paddle.fluid as fluid -from abc import ABCMeta, abstractmethod - -class Layer(object): - __metaclass__=ABCMeta - - def __init__(self, config): - pass - - def generate(self, mode, param): - if mode == 'fluid': - return self.generate_fluid(param) - elif mode == 'tensorflow': - return self.generate_tensorflow(param) - print ('unsupport this mode: ' + mode) - return None,None - - @abstractmethod - def generate_fluid(self, param): - pass - - @abstractmethod - def generate_tensorflow(self, param): - pass - -class EmbeddingInputLayer(Layer): - def __init__(self, config): - self._cvm = config['cvm'] - self._name = config['name'] - self._slots = config['slots'] - self._mf_dim = config['mf_dim'] - self._backward = config['backward'] - self._emb_dim = self._mf_dim - if self._cvm: - self._emb_dim = self._mf_dim + 2 #append show ctr - self._emb_layers = [] - - def generate_fluid(self, param): - show_clk = fluid.layers.concat( - [param['layer']['show'], param['layer']['click']], axis=1) - show_clk.stop_gradient = True - for slot in self._slots: - l = fluid.layers.data(name=slot, shape=[1], dtype="int64", lod_level=1) - emb = fluid.layers.embedding(input=l, size=[10, self._mf_dim + 2], is_sparse = True, is_distributed=True, param_attr=fluid.ParamAttr(name="embedding")) - emb = fluid.layers.sequence_pool(input=emb, pool_type='sum') - emb = fluid.layers.continuous_value_model(emb, show_clk, self._use_cvm) - self._emb_layers.append(emb) - output = fluid.layers.concat(input=self._emb_layers, axis=1, name=self._name) - return output, None - -class LabelInputLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._dim = config.get('dim', 1) - self._data_type = config.get('data_type', "int64") - self._label_idx = config['label_idx'] - - def generate_fluid(self, param): - output = fluid.layers.data(name=self._name, shape=[-1, self._dim], dtype=self._data_type, lod_level=0, append_batch_size=False) - return output, None - -class TagInputLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._tag = config['tag'] - self._dim = config.get('dim', 1) - self._data_type = config['data_type'] - - def generate_fluid(self, param): - output = fluid.layers.data(name=self._name, shape=[-1, self._dim], dtype=self._data_type, lod_level=0, append_batch_size=False, stop_gradient=Tru) - return output, None - -class ParamLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._coln = config['coln'] - self._init_range = config.get('init_range', 1) - self._data_type = config['data_type'] - self._config = config - - def generate_fluid(self, param): - return config, None - -class NormalizetionLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._input = config['input'] - - def generate_fluid(self, param): - input_layer = param['layer'][self._input[0]] - if len(self._input) > 0: - input_list=[ param['layer'][i] for i in self._input ] - input_layer = fluid.layers.concat(input=input_list, axis=1) - bn = fluid.layers.data_norm(input=input_layer, name=self._name, epsilon=1e-4, param_attr={ - "batch_size":1e4, - "batch_sum_default":0.0, - "batch_square":1e4}) - inference_param = [ self._name + '.batch_size', self._name + '.batch_sum', self._name + '.batch_square_sum' ] - return bn, {'inference_param' : inference_param} - -class NeuralLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._param = config['param'] - self._input = config['input'] - self._bias = config.get('bias', True) - self._act_func = config.get('act_func', None) - - def generate_fluid(self, param): - param_layer = param['layer'][self._param] - input_layer = param['layer'][slef._input[0]] - if len(self._input) > 0: - input_list=[ param['layer'][i] for i in self._input ] - input_layer = fluid.layers.concat(input=input_list, axis=1) - input_coln = input_layer.shape[1] - scale = param_layer['init_range'] / (input_coln ** 0.5) - bias = None - if self._bias: - bias = fluid.ParamAttr(learning_rate=1.0, initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=scale)) - fc = fluid.layers.fc( - name = slef._name, - input = input_layer, - size = param_layer['coln'], - act = self._act_func, - param_attr = \ - fluid.ParamAttr(learning_rate=1.0, \ - initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=scale)), - bias_attr = bias) - inference_param = [self._name + '.w_0', self._name + '.b_0'] - return fc, {'inference_param' : inference_param} - -class SigmoidLossLayer(Layer): - def __init__(self, config): - self._name = config['name'] - self._label = config['label'] - self._input = config['input'] - self._weight = config.get('weight', None) - self._bound = config.get('bound', [-15.0, 15.0]) - self._extend_output = {} - - def generate_fluid(self, param): - input_layer = param['layer'][slef._input[0]] - label_layer = param['layer'][slef._label] - output = fluid.layers.clip(input_layer, min=self._bound[0], max=self._bound[1]), name = self._name) - norm = fluid.layers.sigmoid(input=output, name=self._name) - output = fluid.layers.log_loss(input=norm, label=label_layer) - if self._weight: - weight_layer = param['layer'][slef._weight] - output = fluid.layers.elementwise_mul(output, weight_layer) - output = fluid.layers.mean(x=output) - - #For AUC - binary_predict = fluid.layers.concat( - input=[fluid.layers.elementwise_sub(fluid.layers.ceil(norm), norm), norm], axis=1) - self._extend_output['auc'], self._extend_output['batch_auc', [self._extend_output['batch_stat_pos'], \ - self._extend_output['batch_stat_neg'], self._extend_output['stat_pos', self._extend_output['stat_neg']] = \ - fluid.layers.auc(input=binary_predict, label=label_layer, curve='ROC', num_thresholds=4096) - - self._extend_output['sqrerr'], self._extend_output['abserr'], self._extend_output['prob'], self._extend_output['q'], \ - self._extend_output['pos'], self._extend_output['total'] = \ - fluid.contrib.layers.ctr_metric_bundle(norm, fluid.layers.cast(x=label_layer, dtype='float32')) - - return norm, self._extend_output diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer_model.py b/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer_model.py deleted file mode 100644 index 2fbc72a6..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/feed/layer_model.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import copy -import yaml -import layer_model -import paddle.fluid as fluid - -mode='fluid' -f = open('model.layers', 'r') - - -build_nodes = yaml.safe_load(f.read()) - - -build_param = {'layer': {}, 'inner_layer':{}, 'layer_extend': {}, 'model': {}} -build_phase = ['input', 'param', 'layer'] -inference_layer = ['ctr_output'] -inference_meta = {'dependency':{}, 'params': {}} -for layer in build_nodes['layer']: - build_param['inner_layer'][layer['name']] = layer - -def get_dependency(layer_graph, dest_layer): - dependency_list = [] - if dest_layer in layer_graph: - dependencys = copy.deepcopy(layer_graph[dest_layer]['input']) - dependency_list = copy.deepcopy(dependencys) - for dependency in dependencys: - dependency_list = dependency_list + get_dependency(layer_graph, dependency) - return list(set(dependency_list)) - -# build train model -if mode == 'fluid': - build_param['model']['train_program'] = fluid.Program() - build_param['model']['startup_program'] = fluid.Program() - with fluid.program_guard(build_param['model']['train_program'], build_param['model']['startup_program']): - with fluid.unique_name.guard(): - for phase in build_phase: - for node in build_nodes[phase]: - exec("""layer=layer_model.{}(node)""".format(node['class'])) - layer_output, extend_output = layer.generate(mode, build_param) - build_param['layer'][node['name']] = layer_output - build_param['layer_extend'][node['name']] = extend_output - -# build inference model -for layer in inference_layer: - inference_meta['param'][layer] = [] - inference_meta['dependency'][layer] = get_dependency(build_param['inner_layer'], layer) - for node in build_nodes['layer']: - if node['name'] not in inference_meta['dependency'][layer]: - continue - if 'inference_param' in build_param['layer_extend'][node['name']]: - inference_meta['param'][layer] += build_param['layer_extend'][node['name']]['inference_param'] - print(inference_meta['param'][layer]) - - diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/feed/model.layers b/feed/feed_deploy/news_jingpai/package/format_nets/feed/model.layers deleted file mode 100644 index 72502c5b..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/feed/model.layers +++ /dev/null @@ -1,22 +0,0 @@ -input : - - { name : embedding_input, class: EmbeddingLayer, backward: false, cvm: true, mf_dim: 10, slots: [ ]} - - { name : label_target, class: label, backward: false } - - { name : ins_sample_weight, class: tag, backward: false } - - { name : label_with_pred_target, class: label, backward: false } -summary : - - { name : base_summary } -param : - - { name : h1_param, class : param_layer, init_range : 1, coln:511, scale_by_rown : true} - - { name : h2_param, class : param_layer, init_range : 1, coln:255, scale_by_rown : true} - - { name : h3_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h4_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h5_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h6_param, class : param_layer, init_range : 1, coln:1, scale_by_rown : true} -layer : - - { name : base_input_norm, class : normalization_layer, input : [embedding_input], summary : base_summary} - - { name : h1, class : neural_layer, input : [base_input_norm], param : h1_param, bias: true, act_func : relu} - - { name : h2, class : neural_layer, input : [h1], param : h2_param, bias : true, act_func : relu} - - { name : h3, class : neural_layer, input : [h2], param : h3_param, bias : true, act_func : relu} - - { name : h4, class : neural_layer, input : [h3], param : h4_param, bias : true, act_func : relu} - - { name : h5, class : neural_layer, input : [h4], param : h5_param, bias : true, act_func : relu} - - { name : ctr_output, class : neural_layer, input : [h5], param : h6_param, bias : true, act_func : sig_moid} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/feed/test.py b/feed/feed_deploy/news_jingpai/package/format_nets/feed/test.py deleted file mode 100644 index 8b137891..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/feed/test.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/fleet_desc_combinejoincommon.prototxt b/feed/feed_deploy/news_jingpai/package/format_nets/fleet_desc_combinejoincommon.prototxt deleted file mode 100644 index e29be5c4..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/fleet_desc_combinejoincommon.prototxt +++ /dev/null @@ -1,1466 +0,0 @@ -server_param { - downpour_server_param { - downpour_table_param { - table_id: 0 - table_class: "DownpourSparseTable" - shard_num: 1950 - accessor { - accessor_class: "DownpourCtrAccessor" - sparse_sgd_param { - learning_rate: 0.05 - initial_g2sum: 3.0 - initial_range: 0.0001 - weight_bounds: -10.0 - weight_bounds: 10.0 - } - fea_dim: 11 - embedx_dim: 8 - embedx_threshold: 10 - downpour_accessor_param { - nonclk_coeff: 0.1 - click_coeff: 1 - base_threshold: 1.5 - delta_threshold: 0.25 - delta_keep_days: 16 - delete_after_unseen_days: 30 - show_click_decay_rate: 0.98 - delete_threshold: 0.8 - } - table_accessor_save_param { - param: 1 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - table_accessor_save_param { - param: 2 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - } - type: PS_SPARSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 1 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - fea_dim: 3405365 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 2 - table_class: "DownpourDenseDoubleTable" - accessor { - accessor_class: "DownpourDenseValueDoubleAccessor" - dense_sgd_param { - name: "summarydouble" - summary { - summary_decay_rate: 0.999999 - } - } - fea_dim: 16731 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 3 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - fea_dim: 2072615 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - service_param { - server_class: "DownpourBrpcPsServer" - client_class: "DownpourBrpcPsClient" - service_class: "DownpourPsService" - start_server_port: 0 - server_thread_num: 12 - } - } -} -trainer_param { - dense_table { - table_id: 1 - - dense_variable_name: "join_0.w_0" - dense_variable_name: "join_0.b_0" - dense_variable_name: "join_1.w_0" - dense_variable_name: "join_1.b_0" - dense_variable_name: "join_2.w_0" - dense_variable_name: "join_2.b_0" - dense_variable_name: "join_3.w_0" - dense_variable_name: "join_3.b_0" - dense_variable_name: "join_4.w_0" - dense_variable_name: "join_4.b_0" - dense_variable_name: "join_5.w_0" - dense_variable_name: "join_5.b_0" - dense_variable_name: "join_6.w_0" - dense_variable_name: "join_6.b_0" - dense_variable_name: "join_7.w_0" - dense_variable_name: "join_7.b_0" - - dense_variable_name: "common_0.w_0" - dense_variable_name: "common_0.b_0" - dense_variable_name: "common_1.w_0" - dense_variable_name: "common_1.b_0" - dense_variable_name: "common_2.w_0" - dense_variable_name: "common_2.b_0" - dense_variable_name: "common_3.w_0" - dense_variable_name: "common_3.b_0" - dense_variable_name: "common_4.w_0" - dense_variable_name: "common_4.b_0" - dense_variable_name: "common_5.w_0" - dense_variable_name: "common_5.b_0" - dense_variable_name: "common_6.w_0" - dense_variable_name: "common_6.b_0" - dense_variable_name: "common_7.w_0" - dense_variable_name: "common_7.b_0" - - dense_gradient_variable_name: "join_0.w_0@GRAD" - dense_gradient_variable_name: "join_0.b_0@GRAD" - dense_gradient_variable_name: "join_1.w_0@GRAD" - dense_gradient_variable_name: "join_1.b_0@GRAD" - dense_gradient_variable_name: "join_2.w_0@GRAD" - dense_gradient_variable_name: "join_2.b_0@GRAD" - dense_gradient_variable_name: "join_3.w_0@GRAD" - dense_gradient_variable_name: "join_3.b_0@GRAD" - dense_gradient_variable_name: "join_4.w_0@GRAD" - dense_gradient_variable_name: "join_4.b_0@GRAD" - dense_gradient_variable_name: "join_5.w_0@GRAD" - dense_gradient_variable_name: "join_5.b_0@GRAD" - dense_gradient_variable_name: "join_6.w_0@GRAD" - dense_gradient_variable_name: "join_6.b_0@GRAD" - dense_gradient_variable_name: "join_7.w_0@GRAD" - dense_gradient_variable_name: "join_7.b_0@GRAD" - - dense_gradient_variable_name: "common_0.w_0@GRAD" - dense_gradient_variable_name: "common_0.b_0@GRAD" - dense_gradient_variable_name: "common_1.w_0@GRAD" - dense_gradient_variable_name: "common_1.b_0@GRAD" - dense_gradient_variable_name: "common_2.w_0@GRAD" - dense_gradient_variable_name: "common_2.b_0@GRAD" - dense_gradient_variable_name: "common_3.w_0@GRAD" - dense_gradient_variable_name: "common_3.b_0@GRAD" - dense_gradient_variable_name: "common_4.w_0@GRAD" - dense_gradient_variable_name: "common_4.b_0@GRAD" - dense_gradient_variable_name: "common_5.w_0@GRAD" - dense_gradient_variable_name: "common_5.b_0@GRAD" - dense_gradient_variable_name: "common_6.w_0@GRAD" - dense_gradient_variable_name: "common_6.b_0@GRAD" - dense_gradient_variable_name: "common_7.w_0@GRAD" - dense_gradient_variable_name: "common_7.b_0@GRAD" - } - dense_table { - table_id: 2 - dense_variable_name: "join.batch_size" - dense_variable_name: "join.batch_sum" - dense_variable_name: "join.batch_square_sum" - - dense_variable_name: "common.batch_size" - dense_variable_name: "common.batch_sum" - dense_variable_name: "common.batch_square_sum" - - dense_gradient_variable_name: "join.batch_size@GRAD" - dense_gradient_variable_name: "join.batch_sum@GRAD" - dense_gradient_variable_name: "join.batch_square_sum@GRAD" - - dense_gradient_variable_name: "common.batch_size@GRAD" - dense_gradient_variable_name: "common.batch_sum@GRAD" - dense_gradient_variable_name: "common.batch_square_sum@GRAD" - } - dense_table { - table_id: 3 - dense_variable_name: "fc_0.w_0" - dense_variable_name: "fc_0.b_0" - dense_variable_name: "fc_1.w_0" - dense_variable_name: "fc_1.b_0" - dense_variable_name: "fc_2.w_0" - dense_variable_name: "fc_2.b_0" - dense_variable_name: "fc_3.w_0" - dense_variable_name: "fc_3.b_0" - dense_variable_name: "fc_4.w_0" - dense_variable_name: "fc_4.b_0" - dense_variable_name: "fc_5.w_0" - dense_variable_name: "fc_5.b_0" - dense_gradient_variable_name: "fc_0.w_0@GRAD" - dense_gradient_variable_name: "fc_0.b_0@GRAD" - dense_gradient_variable_name: "fc_1.w_0@GRAD" - dense_gradient_variable_name: "fc_1.b_0@GRAD" - dense_gradient_variable_name: "fc_2.w_0@GRAD" - dense_gradient_variable_name: "fc_2.b_0@GRAD" - dense_gradient_variable_name: "fc_3.w_0@GRAD" - dense_gradient_variable_name: "fc_3.b_0@GRAD" - dense_gradient_variable_name: "fc_4.w_0@GRAD" - dense_gradient_variable_name: "fc_4.b_0@GRAD" - dense_gradient_variable_name: "fc_5.w_0@GRAD" - dense_gradient_variable_name: "fc_5.b_0@GRAD" - } - sparse_table { - table_id: 0 - slot_key: "6048" - slot_key: "6002" - slot_key: "6145" - slot_key: "6202" - slot_key: "6201" - slot_key: "6121" - slot_key: "6738" - slot_key: "6119" - slot_key: "6146" - slot_key: "6120" - slot_key: "6147" - slot_key: "6122" - slot_key: "6123" - slot_key: "6118" - slot_key: "6142" - slot_key: "6143" - slot_key: "6008" - slot_key: "6148" - slot_key: "6151" - slot_key: "6127" - slot_key: "6144" - slot_key: "6094" - slot_key: "6083" - slot_key: "6952" - slot_key: "6739" - slot_key: "6150" - slot_key: "6109" - slot_key: "6003" - slot_key: "6099" - slot_key: "6149" - slot_key: "6129" - slot_key: "6203" - slot_key: "6153" - slot_key: "6152" - slot_key: "6128" - slot_key: "6106" - slot_key: "6251" - slot_key: "7082" - slot_key: "7515" - slot_key: "6951" - slot_key: "6949" - slot_key: "7080" - slot_key: "6066" - slot_key: "7507" - slot_key: "6186" - slot_key: "6007" - slot_key: "7514" - slot_key: "6125" - slot_key: "7506" - slot_key: "10001" - slot_key: "6006" - slot_key: "7023" - slot_key: "6085" - slot_key: "10000" - slot_key: "6098" - slot_key: "6250" - slot_key: "6110" - slot_key: "6124" - slot_key: "6090" - slot_key: "6082" - slot_key: "6067" - slot_key: "6101" - slot_key: "6004" - slot_key: "6191" - slot_key: "7075" - slot_key: "6948" - slot_key: "6157" - slot_key: "6126" - slot_key: "6188" - slot_key: "7077" - slot_key: "6070" - slot_key: "6111" - slot_key: "6087" - slot_key: "6103" - slot_key: "6107" - slot_key: "6194" - slot_key: "6156" - slot_key: "6005" - slot_key: "6247" - slot_key: "6814" - slot_key: "6158" - slot_key: "7122" - slot_key: "6058" - slot_key: "6189" - slot_key: "7058" - slot_key: "6059" - slot_key: "6115" - slot_key: "7079" - slot_key: "7081" - slot_key: "6833" - slot_key: "7024" - slot_key: "6108" - slot_key: "13342" - slot_key: "13345" - slot_key: "13412" - slot_key: "13343" - slot_key: "13350" - slot_key: "13346" - slot_key: "13409" - slot_key: "6009" - slot_key: "6011" - slot_key: "6012" - slot_key: "6013" - slot_key: "6014" - slot_key: "6015" - slot_key: "6019" - slot_key: "6023" - slot_key: "6024" - slot_key: "6027" - slot_key: "6029" - slot_key: "6031" - slot_key: "6050" - slot_key: "6060" - slot_key: "6068" - slot_key: "6069" - slot_key: "6089" - slot_key: "6095" - slot_key: "6105" - slot_key: "6112" - slot_key: "6130" - slot_key: "6131" - slot_key: "6132" - slot_key: "6134" - slot_key: "6161" - slot_key: "6162" - slot_key: "6163" - slot_key: "6166" - slot_key: "6182" - slot_key: "6183" - slot_key: "6185" - slot_key: "6190" - slot_key: "6212" - slot_key: "6213" - slot_key: "6231" - slot_key: "6233" - slot_key: "6234" - slot_key: "6236" - slot_key: "6238" - slot_key: "6239" - slot_key: "6240" - slot_key: "6241" - slot_key: "6242" - slot_key: "6243" - slot_key: "6244" - slot_key: "6245" - slot_key: "6354" - slot_key: "7002" - slot_key: "7005" - slot_key: "7008" - slot_key: "7010" - slot_key: "7012" - slot_key: "7013" - slot_key: "7015" - slot_key: "7016" - slot_key: "7017" - slot_key: "7018" - slot_key: "7019" - slot_key: "7020" - slot_key: "7045" - slot_key: "7046" - slot_key: "7048" - slot_key: "7049" - slot_key: "7052" - slot_key: "7054" - slot_key: "7056" - slot_key: "7064" - slot_key: "7066" - slot_key: "7076" - slot_key: "7078" - slot_key: "7083" - slot_key: "7084" - slot_key: "7085" - slot_key: "7086" - slot_key: "7087" - slot_key: "7088" - slot_key: "7089" - slot_key: "7090" - slot_key: "7099" - slot_key: "7100" - slot_key: "7101" - slot_key: "7102" - slot_key: "7103" - slot_key: "7104" - slot_key: "7105" - slot_key: "7109" - slot_key: "7124" - slot_key: "7126" - slot_key: "7136" - slot_key: "7142" - slot_key: "7143" - slot_key: "7144" - slot_key: "7145" - slot_key: "7146" - slot_key: "7147" - slot_key: "7148" - slot_key: "7150" - slot_key: "7151" - slot_key: "7152" - slot_key: "7153" - slot_key: "7154" - slot_key: "7155" - slot_key: "7156" - slot_key: "7157" - slot_key: "7047" - slot_key: "7050" - slot_key: "6253" - slot_key: "6254" - slot_key: "6255" - slot_key: "6256" - slot_key: "6257" - slot_key: "6259" - slot_key: "6260" - slot_key: "6261" - slot_key: "7170" - slot_key: "7185" - slot_key: "7186" - slot_key: "6751" - slot_key: "6755" - slot_key: "6757" - slot_key: "6759" - slot_key: "6760" - slot_key: "6763" - slot_key: "6764" - slot_key: "6765" - slot_key: "6766" - slot_key: "6767" - slot_key: "6768" - slot_key: "6769" - slot_key: "6770" - slot_key: "7502" - slot_key: "7503" - slot_key: "7504" - slot_key: "7505" - slot_key: "7510" - slot_key: "7511" - slot_key: "7512" - slot_key: "7513" - slot_key: "6806" - slot_key: "6807" - slot_key: "6808" - slot_key: "6809" - slot_key: "6810" - slot_key: "6811" - slot_key: "6812" - slot_key: "6813" - slot_key: "6815" - slot_key: "6816" - slot_key: "6817" - slot_key: "6819" - slot_key: "6823" - slot_key: "6828" - slot_key: "6831" - slot_key: "6840" - slot_key: "6845" - slot_key: "6875" - slot_key: "6879" - slot_key: "6881" - slot_key: "6888" - slot_key: "6889" - slot_key: "6947" - slot_key: "6950" - slot_key: "6956" - slot_key: "6957" - slot_key: "6959" - slot_key: "10006" - slot_key: "10008" - slot_key: "10009" - slot_key: "10010" - slot_key: "10011" - slot_key: "10016" - slot_key: "10017" - slot_key: "10018" - slot_key: "10019" - slot_key: "10020" - slot_key: "10021" - slot_key: "10022" - slot_key: "10023" - slot_key: "10024" - slot_key: "10029" - slot_key: "10030" - slot_key: "10031" - slot_key: "10032" - slot_key: "10033" - slot_key: "10034" - slot_key: "10035" - slot_key: "10036" - slot_key: "10037" - slot_key: "10038" - slot_key: "10039" - slot_key: "10040" - slot_key: "10041" - slot_key: "10042" - slot_key: "10044" - slot_key: "10045" - slot_key: "10046" - slot_key: "10051" - slot_key: "10052" - slot_key: "10053" - slot_key: "10054" - slot_key: "10055" - slot_key: "10056" - slot_key: "10057" - slot_key: "10060" - slot_key: "10066" - slot_key: "10069" - slot_key: "6820" - slot_key: "6821" - slot_key: "6822" - slot_key: "13333" - slot_key: "13334" - slot_key: "13335" - slot_key: "13336" - slot_key: "13337" - slot_key: "13338" - slot_key: "13339" - slot_key: "13340" - slot_key: "13341" - slot_key: "13351" - slot_key: "13352" - slot_key: "13353" - slot_key: "13359" - slot_key: "13361" - slot_key: "13362" - slot_key: "13363" - slot_key: "13366" - slot_key: "13367" - slot_key: "13368" - slot_key: "13369" - slot_key: "13370" - slot_key: "13371" - slot_key: "13375" - slot_key: "13376" - slot_key: "5700" - slot_key: "5702" - slot_key: "13400" - slot_key: "13401" - slot_key: "13402" - slot_key: "13403" - slot_key: "13404" - slot_key: "13406" - slot_key: "13407" - slot_key: "13408" - slot_key: "13410" - slot_key: "13417" - slot_key: "13418" - slot_key: "13419" - slot_key: "13420" - slot_key: "13422" - slot_key: "13425" - slot_key: "13427" - slot_key: "13428" - slot_key: "13429" - slot_key: "13430" - slot_key: "13431" - slot_key: "13433" - slot_key: "13434" - slot_key: "13436" - slot_key: "13437" - slot_key: "13326" - slot_key: "13330" - slot_key: "13331" - slot_key: "5717" - slot_key: "13442" - slot_key: "13451" - slot_key: "13452" - slot_key: "13455" - slot_key: "13456" - slot_key: "13457" - slot_key: "13458" - slot_key: "13459" - slot_key: "13460" - slot_key: "13461" - slot_key: "13462" - slot_key: "13463" - slot_key: "13464" - slot_key: "13465" - slot_key: "13466" - slot_key: "13467" - slot_key: "13468" - slot_key: "1104" - slot_key: "1106" - slot_key: "1107" - slot_key: "1108" - slot_key: "1109" - slot_key: "1110" - slot_key: "1111" - slot_key: "1112" - slot_key: "1113" - slot_key: "1114" - slot_key: "1115" - slot_key: "1116" - slot_key: "1117" - slot_key: "1119" - slot_key: "1120" - slot_key: "1121" - slot_key: "1122" - slot_key: "1123" - slot_key: "1124" - slot_key: "1125" - slot_key: "1126" - slot_key: "1127" - slot_key: "1128" - slot_key: "1129" - slot_key: "13812" - slot_key: "13813" - slot_key: "6740" - slot_key: "1490" - slot_key: "1491" - slot_value: "embedding_0.tmp_0" - slot_value: "embedding_1.tmp_0" - slot_value: "embedding_2.tmp_0" - slot_value: "embedding_3.tmp_0" - slot_value: "embedding_4.tmp_0" - slot_value: "embedding_5.tmp_0" - slot_value: "embedding_6.tmp_0" - slot_value: "embedding_7.tmp_0" - slot_value: "embedding_8.tmp_0" - slot_value: "embedding_9.tmp_0" - slot_value: "embedding_10.tmp_0" - slot_value: "embedding_11.tmp_0" - slot_value: "embedding_12.tmp_0" - slot_value: "embedding_13.tmp_0" - slot_value: "embedding_14.tmp_0" - slot_value: "embedding_15.tmp_0" - slot_value: "embedding_16.tmp_0" - slot_value: "embedding_17.tmp_0" - slot_value: "embedding_18.tmp_0" - slot_value: "embedding_19.tmp_0" - slot_value: "embedding_20.tmp_0" - slot_value: "embedding_21.tmp_0" - slot_value: "embedding_22.tmp_0" - slot_value: "embedding_23.tmp_0" - slot_value: "embedding_24.tmp_0" - slot_value: "embedding_25.tmp_0" - slot_value: "embedding_26.tmp_0" - slot_value: "embedding_27.tmp_0" - slot_value: "embedding_28.tmp_0" - slot_value: "embedding_29.tmp_0" - slot_value: "embedding_30.tmp_0" - slot_value: "embedding_31.tmp_0" - slot_value: "embedding_32.tmp_0" - slot_value: "embedding_33.tmp_0" - slot_value: "embedding_34.tmp_0" - slot_value: "embedding_35.tmp_0" - slot_value: "embedding_36.tmp_0" - slot_value: "embedding_37.tmp_0" - slot_value: "embedding_38.tmp_0" - slot_value: "embedding_39.tmp_0" - slot_value: "embedding_40.tmp_0" - slot_value: "embedding_41.tmp_0" - slot_value: "embedding_42.tmp_0" - slot_value: "embedding_43.tmp_0" - slot_value: "embedding_44.tmp_0" - slot_value: "embedding_45.tmp_0" - slot_value: "embedding_46.tmp_0" - slot_value: "embedding_47.tmp_0" - slot_value: "embedding_48.tmp_0" - slot_value: "embedding_49.tmp_0" - slot_value: "embedding_50.tmp_0" - slot_value: "embedding_51.tmp_0" - slot_value: "embedding_52.tmp_0" - slot_value: "embedding_53.tmp_0" - slot_value: "embedding_54.tmp_0" - slot_value: "embedding_55.tmp_0" - slot_value: "embedding_56.tmp_0" - slot_value: "embedding_57.tmp_0" - slot_value: "embedding_58.tmp_0" - slot_value: "embedding_59.tmp_0" - slot_value: "embedding_60.tmp_0" - slot_value: "embedding_61.tmp_0" - slot_value: "embedding_62.tmp_0" - slot_value: "embedding_63.tmp_0" - slot_value: "embedding_64.tmp_0" - slot_value: "embedding_65.tmp_0" - slot_value: "embedding_66.tmp_0" - slot_value: "embedding_67.tmp_0" - slot_value: "embedding_68.tmp_0" - slot_value: "embedding_69.tmp_0" - slot_value: "embedding_70.tmp_0" - slot_value: "embedding_71.tmp_0" - slot_value: "embedding_72.tmp_0" - slot_value: "embedding_73.tmp_0" - slot_value: "embedding_74.tmp_0" - slot_value: "embedding_75.tmp_0" - slot_value: "embedding_76.tmp_0" - slot_value: "embedding_77.tmp_0" - slot_value: "embedding_78.tmp_0" - slot_value: "embedding_79.tmp_0" - slot_value: "embedding_80.tmp_0" - slot_value: "embedding_81.tmp_0" - slot_value: "embedding_82.tmp_0" - slot_value: "embedding_83.tmp_0" - slot_value: "embedding_84.tmp_0" - slot_value: "embedding_85.tmp_0" - slot_value: "embedding_86.tmp_0" - slot_value: "embedding_87.tmp_0" - slot_value: "embedding_88.tmp_0" - slot_value: "embedding_89.tmp_0" - slot_value: "embedding_90.tmp_0" - slot_value: "embedding_91.tmp_0" - slot_value: "embedding_92.tmp_0" - slot_value: "embedding_93.tmp_0" - slot_value: "embedding_94.tmp_0" - slot_value: "embedding_95.tmp_0" - slot_value: "embedding_96.tmp_0" - slot_value: "embedding_97.tmp_0" - slot_value: "embedding_98.tmp_0" - slot_value: "embedding_99.tmp_0" - slot_value: "embedding_100.tmp_0" - slot_value: "embedding_101.tmp_0" - slot_value: "embedding_102.tmp_0" - slot_value: "embedding_103.tmp_0" - slot_value: "embedding_104.tmp_0" - slot_value: "embedding_105.tmp_0" - slot_value: "embedding_106.tmp_0" - slot_value: "embedding_107.tmp_0" - slot_value: "embedding_108.tmp_0" - slot_value: "embedding_109.tmp_0" - slot_value: "embedding_110.tmp_0" - slot_value: "embedding_111.tmp_0" - slot_value: "embedding_112.tmp_0" - slot_value: "embedding_113.tmp_0" - slot_value: "embedding_114.tmp_0" - slot_value: "embedding_115.tmp_0" - slot_value: "embedding_116.tmp_0" - slot_value: "embedding_117.tmp_0" - slot_value: "embedding_118.tmp_0" - slot_value: "embedding_119.tmp_0" - slot_value: "embedding_120.tmp_0" - slot_value: "embedding_121.tmp_0" - slot_value: "embedding_122.tmp_0" - slot_value: "embedding_123.tmp_0" - slot_value: "embedding_124.tmp_0" - slot_value: "embedding_125.tmp_0" - slot_value: "embedding_126.tmp_0" - slot_value: "embedding_127.tmp_0" - slot_value: "embedding_128.tmp_0" - slot_value: "embedding_129.tmp_0" - slot_value: "embedding_130.tmp_0" - slot_value: "embedding_131.tmp_0" - slot_value: "embedding_132.tmp_0" - slot_value: "embedding_133.tmp_0" - slot_value: "embedding_134.tmp_0" - slot_value: "embedding_135.tmp_0" - slot_value: "embedding_136.tmp_0" - slot_value: "embedding_137.tmp_0" - slot_value: "embedding_138.tmp_0" - slot_value: "embedding_139.tmp_0" - slot_value: "embedding_140.tmp_0" - slot_value: "embedding_141.tmp_0" - slot_value: "embedding_142.tmp_0" - slot_value: "embedding_143.tmp_0" - slot_value: "embedding_144.tmp_0" - slot_value: "embedding_145.tmp_0" - slot_value: "embedding_146.tmp_0" - slot_value: "embedding_147.tmp_0" - slot_value: "embedding_148.tmp_0" - slot_value: "embedding_149.tmp_0" - slot_value: "embedding_150.tmp_0" - slot_value: "embedding_151.tmp_0" - slot_value: "embedding_152.tmp_0" - slot_value: "embedding_153.tmp_0" - slot_value: "embedding_154.tmp_0" - slot_value: "embedding_155.tmp_0" - slot_value: "embedding_156.tmp_0" - slot_value: "embedding_157.tmp_0" - slot_value: "embedding_158.tmp_0" - slot_value: "embedding_159.tmp_0" - slot_value: "embedding_160.tmp_0" - slot_value: "embedding_161.tmp_0" - slot_value: "embedding_162.tmp_0" - slot_value: "embedding_163.tmp_0" - slot_value: "embedding_164.tmp_0" - slot_value: "embedding_165.tmp_0" - slot_value: "embedding_166.tmp_0" - slot_value: "embedding_167.tmp_0" - slot_value: "embedding_168.tmp_0" - slot_value: "embedding_169.tmp_0" - slot_value: "embedding_170.tmp_0" - slot_value: "embedding_171.tmp_0" - slot_value: "embedding_172.tmp_0" - slot_value: "embedding_173.tmp_0" - slot_value: "embedding_174.tmp_0" - slot_value: "embedding_175.tmp_0" - slot_value: "embedding_176.tmp_0" - slot_value: "embedding_177.tmp_0" - slot_value: "embedding_178.tmp_0" - slot_value: "embedding_179.tmp_0" - slot_value: "embedding_180.tmp_0" - slot_value: "embedding_181.tmp_0" - slot_value: "embedding_182.tmp_0" - slot_value: "embedding_183.tmp_0" - slot_value: "embedding_184.tmp_0" - slot_value: "embedding_185.tmp_0" - slot_value: "embedding_186.tmp_0" - slot_value: "embedding_187.tmp_0" - slot_value: "embedding_188.tmp_0" - slot_value: "embedding_189.tmp_0" - slot_value: "embedding_190.tmp_0" - slot_value: "embedding_191.tmp_0" - slot_value: "embedding_192.tmp_0" - slot_value: "embedding_193.tmp_0" - slot_value: "embedding_194.tmp_0" - slot_value: "embedding_195.tmp_0" - slot_value: "embedding_196.tmp_0" - slot_value: "embedding_197.tmp_0" - slot_value: "embedding_198.tmp_0" - slot_value: "embedding_199.tmp_0" - slot_value: "embedding_200.tmp_0" - slot_value: "embedding_201.tmp_0" - slot_value: "embedding_202.tmp_0" - slot_value: "embedding_203.tmp_0" - slot_value: "embedding_204.tmp_0" - slot_value: "embedding_205.tmp_0" - slot_value: "embedding_206.tmp_0" - slot_value: "embedding_207.tmp_0" - slot_value: "embedding_208.tmp_0" - slot_value: "embedding_209.tmp_0" - slot_value: "embedding_210.tmp_0" - slot_value: "embedding_211.tmp_0" - slot_value: "embedding_212.tmp_0" - slot_value: "embedding_213.tmp_0" - slot_value: "embedding_214.tmp_0" - slot_value: "embedding_215.tmp_0" - slot_value: "embedding_216.tmp_0" - slot_value: "embedding_217.tmp_0" - slot_value: "embedding_218.tmp_0" - slot_value: "embedding_219.tmp_0" - slot_value: "embedding_220.tmp_0" - slot_value: "embedding_221.tmp_0" - slot_value: "embedding_222.tmp_0" - slot_value: "embedding_223.tmp_0" - slot_value: "embedding_224.tmp_0" - slot_value: "embedding_225.tmp_0" - slot_value: "embedding_226.tmp_0" - slot_value: "embedding_227.tmp_0" - slot_value: "embedding_228.tmp_0" - slot_value: "embedding_229.tmp_0" - slot_value: "embedding_230.tmp_0" - slot_value: "embedding_231.tmp_0" - slot_value: "embedding_232.tmp_0" - slot_value: "embedding_233.tmp_0" - slot_value: "embedding_234.tmp_0" - slot_value: "embedding_235.tmp_0" - slot_value: "embedding_236.tmp_0" - slot_value: "embedding_237.tmp_0" - slot_value: "embedding_238.tmp_0" - slot_value: "embedding_239.tmp_0" - slot_value: "embedding_240.tmp_0" - slot_value: "embedding_241.tmp_0" - slot_value: "embedding_242.tmp_0" - slot_value: "embedding_243.tmp_0" - slot_value: "embedding_244.tmp_0" - slot_value: "embedding_245.tmp_0" - slot_value: "embedding_246.tmp_0" - slot_value: "embedding_247.tmp_0" - slot_value: "embedding_248.tmp_0" - slot_value: "embedding_249.tmp_0" - slot_value: "embedding_250.tmp_0" - slot_value: "embedding_251.tmp_0" - slot_value: "embedding_252.tmp_0" - slot_value: "embedding_253.tmp_0" - slot_value: "embedding_254.tmp_0" - slot_value: "embedding_255.tmp_0" - slot_value: "embedding_256.tmp_0" - slot_value: "embedding_257.tmp_0" - slot_value: "embedding_258.tmp_0" - slot_value: "embedding_259.tmp_0" - slot_value: "embedding_260.tmp_0" - slot_value: "embedding_261.tmp_0" - slot_value: "embedding_262.tmp_0" - slot_value: "embedding_263.tmp_0" - slot_value: "embedding_264.tmp_0" - slot_value: "embedding_265.tmp_0" - slot_value: "embedding_266.tmp_0" - slot_value: "embedding_267.tmp_0" - slot_value: "embedding_268.tmp_0" - slot_value: "embedding_269.tmp_0" - slot_value: "embedding_270.tmp_0" - slot_value: "embedding_271.tmp_0" - slot_value: "embedding_272.tmp_0" - slot_value: "embedding_273.tmp_0" - slot_value: "embedding_274.tmp_0" - slot_value: "embedding_275.tmp_0" - slot_value: "embedding_276.tmp_0" - slot_value: "embedding_277.tmp_0" - slot_value: "embedding_278.tmp_0" - slot_value: "embedding_279.tmp_0" - slot_value: "embedding_280.tmp_0" - slot_value: "embedding_281.tmp_0" - slot_value: "embedding_282.tmp_0" - slot_value: "embedding_283.tmp_0" - slot_value: "embedding_284.tmp_0" - slot_value: "embedding_285.tmp_0" - slot_value: "embedding_286.tmp_0" - slot_value: "embedding_287.tmp_0" - slot_value: "embedding_288.tmp_0" - slot_value: "embedding_289.tmp_0" - slot_value: "embedding_290.tmp_0" - slot_value: "embedding_291.tmp_0" - slot_value: "embedding_292.tmp_0" - slot_value: "embedding_293.tmp_0" - slot_value: "embedding_294.tmp_0" - slot_value: "embedding_295.tmp_0" - slot_value: "embedding_296.tmp_0" - slot_value: "embedding_297.tmp_0" - slot_value: "embedding_298.tmp_0" - slot_value: "embedding_299.tmp_0" - slot_value: "embedding_300.tmp_0" - slot_value: "embedding_301.tmp_0" - slot_value: "embedding_302.tmp_0" - slot_value: "embedding_303.tmp_0" - slot_value: "embedding_304.tmp_0" - slot_value: "embedding_305.tmp_0" - slot_value: "embedding_306.tmp_0" - slot_value: "embedding_307.tmp_0" - slot_value: "embedding_308.tmp_0" - slot_value: "embedding_309.tmp_0" - slot_value: "embedding_310.tmp_0" - slot_value: "embedding_311.tmp_0" - slot_value: "embedding_312.tmp_0" - slot_value: "embedding_313.tmp_0" - slot_value: "embedding_314.tmp_0" - slot_value: "embedding_315.tmp_0" - slot_value: "embedding_316.tmp_0" - slot_value: "embedding_317.tmp_0" - slot_value: "embedding_318.tmp_0" - slot_value: "embedding_319.tmp_0" - slot_value: "embedding_320.tmp_0" - slot_value: "embedding_321.tmp_0" - slot_value: "embedding_322.tmp_0" - slot_value: "embedding_323.tmp_0" - slot_value: "embedding_324.tmp_0" - slot_value: "embedding_325.tmp_0" - slot_value: "embedding_326.tmp_0" - slot_value: "embedding_327.tmp_0" - slot_value: "embedding_328.tmp_0" - slot_value: "embedding_329.tmp_0" - slot_value: "embedding_330.tmp_0" - slot_value: "embedding_331.tmp_0" - slot_value: "embedding_332.tmp_0" - slot_value: "embedding_333.tmp_0" - slot_value: "embedding_334.tmp_0" - slot_value: "embedding_335.tmp_0" - slot_value: "embedding_336.tmp_0" - slot_value: "embedding_337.tmp_0" - slot_value: "embedding_338.tmp_0" - slot_value: "embedding_339.tmp_0" - slot_value: "embedding_340.tmp_0" - slot_value: "embedding_341.tmp_0" - slot_value: "embedding_342.tmp_0" - slot_value: "embedding_343.tmp_0" - slot_value: "embedding_344.tmp_0" - slot_value: "embedding_345.tmp_0" - slot_value: "embedding_346.tmp_0" - slot_value: "embedding_347.tmp_0" - slot_value: "embedding_348.tmp_0" - slot_value: "embedding_349.tmp_0" - slot_value: "embedding_350.tmp_0" - slot_value: "embedding_351.tmp_0" - slot_value: "embedding_352.tmp_0" - slot_value: "embedding_353.tmp_0" - slot_value: "embedding_354.tmp_0" - slot_value: "embedding_355.tmp_0" - slot_value: "embedding_356.tmp_0" - slot_value: "embedding_357.tmp_0" - slot_value: "embedding_358.tmp_0" - slot_value: "embedding_359.tmp_0" - slot_value: "embedding_360.tmp_0" - slot_value: "embedding_361.tmp_0" - slot_value: "embedding_362.tmp_0" - slot_value: "embedding_363.tmp_0" - slot_value: "embedding_364.tmp_0" - slot_value: "embedding_365.tmp_0" - slot_value: "embedding_366.tmp_0" - slot_value: "embedding_367.tmp_0" - slot_value: "embedding_368.tmp_0" - slot_value: "embedding_369.tmp_0" - slot_value: "embedding_370.tmp_0" - slot_value: "embedding_371.tmp_0" - slot_value: "embedding_372.tmp_0" - slot_value: "embedding_373.tmp_0" - slot_value: "embedding_374.tmp_0" - slot_value: "embedding_375.tmp_0" - slot_value: "embedding_376.tmp_0" - slot_value: "embedding_377.tmp_0" - slot_value: "embedding_378.tmp_0" - slot_value: "embedding_379.tmp_0" - slot_value: "embedding_380.tmp_0" - slot_value: "embedding_381.tmp_0" - slot_value: "embedding_382.tmp_0" - slot_value: "embedding_383.tmp_0" - slot_value: "embedding_384.tmp_0" - slot_value: "embedding_385.tmp_0" - slot_value: "embedding_386.tmp_0" - slot_value: "embedding_387.tmp_0" - slot_value: "embedding_388.tmp_0" - slot_value: "embedding_389.tmp_0" - slot_value: "embedding_390.tmp_0" - slot_value: "embedding_391.tmp_0" - slot_value: "embedding_392.tmp_0" - slot_value: "embedding_393.tmp_0" - slot_value: "embedding_394.tmp_0" - slot_value: "embedding_395.tmp_0" - slot_value: "embedding_396.tmp_0" - slot_value: "embedding_397.tmp_0" - slot_value: "embedding_398.tmp_0" - slot_value: "embedding_399.tmp_0" - slot_value: "embedding_400.tmp_0" - slot_value: "embedding_401.tmp_0" - slot_value: "embedding_402.tmp_0" - slot_value: "embedding_403.tmp_0" - slot_value: "embedding_404.tmp_0" - slot_value: "embedding_405.tmp_0" - slot_value: "embedding_406.tmp_0" - slot_value: "embedding_407.tmp_0" - slot_gradient: "embedding_0.tmp_0@GRAD" - slot_gradient: "embedding_1.tmp_0@GRAD" - slot_gradient: "embedding_2.tmp_0@GRAD" - slot_gradient: "embedding_3.tmp_0@GRAD" - slot_gradient: "embedding_4.tmp_0@GRAD" - slot_gradient: "embedding_5.tmp_0@GRAD" - slot_gradient: "embedding_6.tmp_0@GRAD" - slot_gradient: "embedding_7.tmp_0@GRAD" - slot_gradient: "embedding_8.tmp_0@GRAD" - slot_gradient: "embedding_9.tmp_0@GRAD" - slot_gradient: "embedding_10.tmp_0@GRAD" - slot_gradient: "embedding_11.tmp_0@GRAD" - slot_gradient: "embedding_12.tmp_0@GRAD" - slot_gradient: "embedding_13.tmp_0@GRAD" - slot_gradient: "embedding_14.tmp_0@GRAD" - slot_gradient: "embedding_15.tmp_0@GRAD" - slot_gradient: "embedding_16.tmp_0@GRAD" - slot_gradient: "embedding_17.tmp_0@GRAD" - slot_gradient: "embedding_18.tmp_0@GRAD" - slot_gradient: "embedding_19.tmp_0@GRAD" - slot_gradient: "embedding_20.tmp_0@GRAD" - slot_gradient: "embedding_21.tmp_0@GRAD" - slot_gradient: "embedding_22.tmp_0@GRAD" - slot_gradient: "embedding_23.tmp_0@GRAD" - slot_gradient: "embedding_24.tmp_0@GRAD" - slot_gradient: "embedding_25.tmp_0@GRAD" - slot_gradient: "embedding_26.tmp_0@GRAD" - slot_gradient: "embedding_27.tmp_0@GRAD" - slot_gradient: "embedding_28.tmp_0@GRAD" - slot_gradient: "embedding_29.tmp_0@GRAD" - slot_gradient: "embedding_30.tmp_0@GRAD" - slot_gradient: "embedding_31.tmp_0@GRAD" - slot_gradient: "embedding_32.tmp_0@GRAD" - slot_gradient: "embedding_33.tmp_0@GRAD" - slot_gradient: "embedding_34.tmp_0@GRAD" - slot_gradient: "embedding_35.tmp_0@GRAD" - slot_gradient: "embedding_36.tmp_0@GRAD" - slot_gradient: "embedding_37.tmp_0@GRAD" - slot_gradient: "embedding_38.tmp_0@GRAD" - slot_gradient: "embedding_39.tmp_0@GRAD" - slot_gradient: "embedding_40.tmp_0@GRAD" - slot_gradient: "embedding_41.tmp_0@GRAD" - slot_gradient: "embedding_42.tmp_0@GRAD" - slot_gradient: "embedding_43.tmp_0@GRAD" - slot_gradient: "embedding_44.tmp_0@GRAD" - slot_gradient: "embedding_45.tmp_0@GRAD" - slot_gradient: "embedding_46.tmp_0@GRAD" - slot_gradient: "embedding_47.tmp_0@GRAD" - slot_gradient: "embedding_48.tmp_0@GRAD" - slot_gradient: "embedding_49.tmp_0@GRAD" - slot_gradient: "embedding_50.tmp_0@GRAD" - slot_gradient: "embedding_51.tmp_0@GRAD" - slot_gradient: "embedding_52.tmp_0@GRAD" - slot_gradient: "embedding_53.tmp_0@GRAD" - slot_gradient: "embedding_54.tmp_0@GRAD" - slot_gradient: "embedding_55.tmp_0@GRAD" - slot_gradient: "embedding_56.tmp_0@GRAD" - slot_gradient: "embedding_57.tmp_0@GRAD" - slot_gradient: "embedding_58.tmp_0@GRAD" - slot_gradient: "embedding_59.tmp_0@GRAD" - slot_gradient: "embedding_60.tmp_0@GRAD" - slot_gradient: "embedding_61.tmp_0@GRAD" - slot_gradient: "embedding_62.tmp_0@GRAD" - slot_gradient: "embedding_63.tmp_0@GRAD" - slot_gradient: "embedding_64.tmp_0@GRAD" - slot_gradient: "embedding_65.tmp_0@GRAD" - slot_gradient: "embedding_66.tmp_0@GRAD" - slot_gradient: "embedding_67.tmp_0@GRAD" - slot_gradient: "embedding_68.tmp_0@GRAD" - slot_gradient: "embedding_69.tmp_0@GRAD" - slot_gradient: "embedding_70.tmp_0@GRAD" - slot_gradient: "embedding_71.tmp_0@GRAD" - slot_gradient: "embedding_72.tmp_0@GRAD" - slot_gradient: "embedding_73.tmp_0@GRAD" - slot_gradient: "embedding_74.tmp_0@GRAD" - slot_gradient: "embedding_75.tmp_0@GRAD" - slot_gradient: "embedding_76.tmp_0@GRAD" - slot_gradient: "embedding_77.tmp_0@GRAD" - slot_gradient: "embedding_78.tmp_0@GRAD" - slot_gradient: "embedding_79.tmp_0@GRAD" - slot_gradient: "embedding_80.tmp_0@GRAD" - slot_gradient: "embedding_81.tmp_0@GRAD" - slot_gradient: "embedding_82.tmp_0@GRAD" - slot_gradient: "embedding_83.tmp_0@GRAD" - slot_gradient: "embedding_84.tmp_0@GRAD" - slot_gradient: "embedding_85.tmp_0@GRAD" - slot_gradient: "embedding_86.tmp_0@GRAD" - slot_gradient: "embedding_87.tmp_0@GRAD" - slot_gradient: "embedding_88.tmp_0@GRAD" - slot_gradient: "embedding_89.tmp_0@GRAD" - slot_gradient: "embedding_90.tmp_0@GRAD" - slot_gradient: "embedding_91.tmp_0@GRAD" - slot_gradient: "embedding_92.tmp_0@GRAD" - slot_gradient: "embedding_93.tmp_0@GRAD" - slot_gradient: "embedding_94.tmp_0@GRAD" - slot_gradient: "embedding_95.tmp_0@GRAD" - slot_gradient: "embedding_96.tmp_0@GRAD" - slot_gradient: "embedding_97.tmp_0@GRAD" - slot_gradient: "embedding_98.tmp_0@GRAD" - slot_gradient: "embedding_99.tmp_0@GRAD" - slot_gradient: "embedding_100.tmp_0@GRAD" - slot_gradient: "embedding_101.tmp_0@GRAD" - slot_gradient: "embedding_102.tmp_0@GRAD" - slot_gradient: "embedding_103.tmp_0@GRAD" - slot_gradient: "embedding_104.tmp_0@GRAD" - slot_gradient: "embedding_105.tmp_0@GRAD" - slot_gradient: "embedding_106.tmp_0@GRAD" - slot_gradient: "embedding_107.tmp_0@GRAD" - slot_gradient: "embedding_108.tmp_0@GRAD" - slot_gradient: "embedding_109.tmp_0@GRAD" - slot_gradient: "embedding_110.tmp_0@GRAD" - slot_gradient: "embedding_111.tmp_0@GRAD" - slot_gradient: "embedding_112.tmp_0@GRAD" - slot_gradient: "embedding_113.tmp_0@GRAD" - slot_gradient: "embedding_114.tmp_0@GRAD" - slot_gradient: "embedding_115.tmp_0@GRAD" - slot_gradient: "embedding_116.tmp_0@GRAD" - slot_gradient: "embedding_117.tmp_0@GRAD" - slot_gradient: "embedding_118.tmp_0@GRAD" - slot_gradient: "embedding_119.tmp_0@GRAD" - slot_gradient: "embedding_120.tmp_0@GRAD" - slot_gradient: "embedding_121.tmp_0@GRAD" - slot_gradient: "embedding_122.tmp_0@GRAD" - slot_gradient: "embedding_123.tmp_0@GRAD" - slot_gradient: "embedding_124.tmp_0@GRAD" - slot_gradient: "embedding_125.tmp_0@GRAD" - slot_gradient: "embedding_126.tmp_0@GRAD" - slot_gradient: "embedding_127.tmp_0@GRAD" - slot_gradient: "embedding_128.tmp_0@GRAD" - slot_gradient: "embedding_129.tmp_0@GRAD" - slot_gradient: "embedding_130.tmp_0@GRAD" - slot_gradient: "embedding_131.tmp_0@GRAD" - slot_gradient: "embedding_132.tmp_0@GRAD" - slot_gradient: "embedding_133.tmp_0@GRAD" - slot_gradient: "embedding_134.tmp_0@GRAD" - slot_gradient: "embedding_135.tmp_0@GRAD" - slot_gradient: "embedding_136.tmp_0@GRAD" - slot_gradient: "embedding_137.tmp_0@GRAD" - slot_gradient: "embedding_138.tmp_0@GRAD" - slot_gradient: "embedding_139.tmp_0@GRAD" - slot_gradient: "embedding_140.tmp_0@GRAD" - slot_gradient: "embedding_141.tmp_0@GRAD" - slot_gradient: "embedding_142.tmp_0@GRAD" - slot_gradient: "embedding_143.tmp_0@GRAD" - slot_gradient: "embedding_144.tmp_0@GRAD" - slot_gradient: "embedding_145.tmp_0@GRAD" - slot_gradient: "embedding_146.tmp_0@GRAD" - slot_gradient: "embedding_147.tmp_0@GRAD" - slot_gradient: "embedding_148.tmp_0@GRAD" - slot_gradient: "embedding_149.tmp_0@GRAD" - slot_gradient: "embedding_150.tmp_0@GRAD" - slot_gradient: "embedding_151.tmp_0@GRAD" - slot_gradient: "embedding_152.tmp_0@GRAD" - slot_gradient: "embedding_153.tmp_0@GRAD" - slot_gradient: "embedding_154.tmp_0@GRAD" - slot_gradient: "embedding_155.tmp_0@GRAD" - slot_gradient: "embedding_156.tmp_0@GRAD" - slot_gradient: "embedding_157.tmp_0@GRAD" - slot_gradient: "embedding_158.tmp_0@GRAD" - slot_gradient: "embedding_159.tmp_0@GRAD" - slot_gradient: "embedding_160.tmp_0@GRAD" - slot_gradient: "embedding_161.tmp_0@GRAD" - slot_gradient: "embedding_162.tmp_0@GRAD" - slot_gradient: "embedding_163.tmp_0@GRAD" - slot_gradient: "embedding_164.tmp_0@GRAD" - slot_gradient: "embedding_165.tmp_0@GRAD" - slot_gradient: "embedding_166.tmp_0@GRAD" - slot_gradient: "embedding_167.tmp_0@GRAD" - slot_gradient: "embedding_168.tmp_0@GRAD" - slot_gradient: "embedding_169.tmp_0@GRAD" - slot_gradient: "embedding_170.tmp_0@GRAD" - slot_gradient: "embedding_171.tmp_0@GRAD" - slot_gradient: "embedding_172.tmp_0@GRAD" - slot_gradient: "embedding_173.tmp_0@GRAD" - slot_gradient: "embedding_174.tmp_0@GRAD" - slot_gradient: "embedding_175.tmp_0@GRAD" - slot_gradient: "embedding_176.tmp_0@GRAD" - slot_gradient: "embedding_177.tmp_0@GRAD" - slot_gradient: "embedding_178.tmp_0@GRAD" - slot_gradient: "embedding_179.tmp_0@GRAD" - slot_gradient: "embedding_180.tmp_0@GRAD" - slot_gradient: "embedding_181.tmp_0@GRAD" - slot_gradient: "embedding_182.tmp_0@GRAD" - slot_gradient: "embedding_183.tmp_0@GRAD" - slot_gradient: "embedding_184.tmp_0@GRAD" - slot_gradient: "embedding_185.tmp_0@GRAD" - slot_gradient: "embedding_186.tmp_0@GRAD" - slot_gradient: "embedding_187.tmp_0@GRAD" - slot_gradient: "embedding_188.tmp_0@GRAD" - slot_gradient: "embedding_189.tmp_0@GRAD" - slot_gradient: "embedding_190.tmp_0@GRAD" - slot_gradient: "embedding_191.tmp_0@GRAD" - slot_gradient: "embedding_192.tmp_0@GRAD" - slot_gradient: "embedding_193.tmp_0@GRAD" - slot_gradient: "embedding_194.tmp_0@GRAD" - slot_gradient: "embedding_195.tmp_0@GRAD" - slot_gradient: "embedding_196.tmp_0@GRAD" - slot_gradient: "embedding_197.tmp_0@GRAD" - slot_gradient: "embedding_198.tmp_0@GRAD" - slot_gradient: "embedding_199.tmp_0@GRAD" - slot_gradient: "embedding_200.tmp_0@GRAD" - slot_gradient: "embedding_201.tmp_0@GRAD" - slot_gradient: "embedding_202.tmp_0@GRAD" - slot_gradient: "embedding_203.tmp_0@GRAD" - slot_gradient: "embedding_204.tmp_0@GRAD" - slot_gradient: "embedding_205.tmp_0@GRAD" - slot_gradient: "embedding_206.tmp_0@GRAD" - slot_gradient: "embedding_207.tmp_0@GRAD" - slot_gradient: "embedding_208.tmp_0@GRAD" - slot_gradient: "embedding_209.tmp_0@GRAD" - slot_gradient: "embedding_210.tmp_0@GRAD" - slot_gradient: "embedding_211.tmp_0@GRAD" - slot_gradient: "embedding_212.tmp_0@GRAD" - slot_gradient: "embedding_213.tmp_0@GRAD" - slot_gradient: "embedding_214.tmp_0@GRAD" - slot_gradient: "embedding_215.tmp_0@GRAD" - slot_gradient: "embedding_216.tmp_0@GRAD" - slot_gradient: "embedding_217.tmp_0@GRAD" - slot_gradient: "embedding_218.tmp_0@GRAD" - slot_gradient: "embedding_219.tmp_0@GRAD" - slot_gradient: "embedding_220.tmp_0@GRAD" - slot_gradient: "embedding_221.tmp_0@GRAD" - slot_gradient: "embedding_222.tmp_0@GRAD" - slot_gradient: "embedding_223.tmp_0@GRAD" - slot_gradient: "embedding_224.tmp_0@GRAD" - slot_gradient: "embedding_225.tmp_0@GRAD" - slot_gradient: "embedding_226.tmp_0@GRAD" - slot_gradient: "embedding_227.tmp_0@GRAD" - slot_gradient: "embedding_228.tmp_0@GRAD" - slot_gradient: "embedding_229.tmp_0@GRAD" - slot_gradient: "embedding_230.tmp_0@GRAD" - slot_gradient: "embedding_231.tmp_0@GRAD" - slot_gradient: "embedding_232.tmp_0@GRAD" - slot_gradient: "embedding_233.tmp_0@GRAD" - slot_gradient: "embedding_234.tmp_0@GRAD" - slot_gradient: "embedding_235.tmp_0@GRAD" - slot_gradient: "embedding_236.tmp_0@GRAD" - slot_gradient: "embedding_237.tmp_0@GRAD" - slot_gradient: "embedding_238.tmp_0@GRAD" - slot_gradient: "embedding_239.tmp_0@GRAD" - slot_gradient: "embedding_240.tmp_0@GRAD" - slot_gradient: "embedding_241.tmp_0@GRAD" - slot_gradient: "embedding_242.tmp_0@GRAD" - slot_gradient: "embedding_243.tmp_0@GRAD" - slot_gradient: "embedding_244.tmp_0@GRAD" - slot_gradient: "embedding_245.tmp_0@GRAD" - slot_gradient: "embedding_246.tmp_0@GRAD" - slot_gradient: "embedding_247.tmp_0@GRAD" - slot_gradient: "embedding_248.tmp_0@GRAD" - slot_gradient: "embedding_249.tmp_0@GRAD" - slot_gradient: "embedding_250.tmp_0@GRAD" - slot_gradient: "embedding_251.tmp_0@GRAD" - slot_gradient: "embedding_252.tmp_0@GRAD" - slot_gradient: "embedding_253.tmp_0@GRAD" - slot_gradient: "embedding_254.tmp_0@GRAD" - slot_gradient: "embedding_255.tmp_0@GRAD" - slot_gradient: "embedding_256.tmp_0@GRAD" - slot_gradient: "embedding_257.tmp_0@GRAD" - slot_gradient: "embedding_258.tmp_0@GRAD" - slot_gradient: "embedding_259.tmp_0@GRAD" - slot_gradient: "embedding_260.tmp_0@GRAD" - slot_gradient: "embedding_261.tmp_0@GRAD" - slot_gradient: "embedding_262.tmp_0@GRAD" - slot_gradient: "embedding_263.tmp_0@GRAD" - slot_gradient: "embedding_264.tmp_0@GRAD" - slot_gradient: "embedding_265.tmp_0@GRAD" - slot_gradient: "embedding_266.tmp_0@GRAD" - slot_gradient: "embedding_267.tmp_0@GRAD" - slot_gradient: "embedding_268.tmp_0@GRAD" - slot_gradient: "embedding_269.tmp_0@GRAD" - slot_gradient: "embedding_270.tmp_0@GRAD" - slot_gradient: "embedding_271.tmp_0@GRAD" - slot_gradient: "embedding_272.tmp_0@GRAD" - slot_gradient: "embedding_273.tmp_0@GRAD" - slot_gradient: "embedding_274.tmp_0@GRAD" - slot_gradient: "embedding_275.tmp_0@GRAD" - slot_gradient: "embedding_276.tmp_0@GRAD" - slot_gradient: "embedding_277.tmp_0@GRAD" - slot_gradient: "embedding_278.tmp_0@GRAD" - slot_gradient: "embedding_279.tmp_0@GRAD" - slot_gradient: "embedding_280.tmp_0@GRAD" - slot_gradient: "embedding_281.tmp_0@GRAD" - slot_gradient: "embedding_282.tmp_0@GRAD" - slot_gradient: "embedding_283.tmp_0@GRAD" - slot_gradient: "embedding_284.tmp_0@GRAD" - slot_gradient: "embedding_285.tmp_0@GRAD" - slot_gradient: "embedding_286.tmp_0@GRAD" - slot_gradient: "embedding_287.tmp_0@GRAD" - slot_gradient: "embedding_288.tmp_0@GRAD" - slot_gradient: "embedding_289.tmp_0@GRAD" - slot_gradient: "embedding_290.tmp_0@GRAD" - slot_gradient: "embedding_291.tmp_0@GRAD" - slot_gradient: "embedding_292.tmp_0@GRAD" - slot_gradient: "embedding_293.tmp_0@GRAD" - slot_gradient: "embedding_294.tmp_0@GRAD" - slot_gradient: "embedding_295.tmp_0@GRAD" - slot_gradient: "embedding_296.tmp_0@GRAD" - slot_gradient: "embedding_297.tmp_0@GRAD" - slot_gradient: "embedding_298.tmp_0@GRAD" - slot_gradient: "embedding_299.tmp_0@GRAD" - slot_gradient: "embedding_300.tmp_0@GRAD" - slot_gradient: "embedding_301.tmp_0@GRAD" - slot_gradient: "embedding_302.tmp_0@GRAD" - slot_gradient: "embedding_303.tmp_0@GRAD" - slot_gradient: "embedding_304.tmp_0@GRAD" - slot_gradient: "embedding_305.tmp_0@GRAD" - slot_gradient: "embedding_306.tmp_0@GRAD" - slot_gradient: "embedding_307.tmp_0@GRAD" - slot_gradient: "embedding_308.tmp_0@GRAD" - slot_gradient: "embedding_309.tmp_0@GRAD" - slot_gradient: "embedding_310.tmp_0@GRAD" - slot_gradient: "embedding_311.tmp_0@GRAD" - slot_gradient: "embedding_312.tmp_0@GRAD" - slot_gradient: "embedding_313.tmp_0@GRAD" - slot_gradient: "embedding_314.tmp_0@GRAD" - slot_gradient: "embedding_315.tmp_0@GRAD" - slot_gradient: "embedding_316.tmp_0@GRAD" - slot_gradient: "embedding_317.tmp_0@GRAD" - slot_gradient: "embedding_318.tmp_0@GRAD" - slot_gradient: "embedding_319.tmp_0@GRAD" - slot_gradient: "embedding_320.tmp_0@GRAD" - slot_gradient: "embedding_321.tmp_0@GRAD" - slot_gradient: "embedding_322.tmp_0@GRAD" - slot_gradient: "embedding_323.tmp_0@GRAD" - slot_gradient: "embedding_324.tmp_0@GRAD" - slot_gradient: "embedding_325.tmp_0@GRAD" - slot_gradient: "embedding_326.tmp_0@GRAD" - slot_gradient: "embedding_327.tmp_0@GRAD" - slot_gradient: "embedding_328.tmp_0@GRAD" - slot_gradient: "embedding_329.tmp_0@GRAD" - slot_gradient: "embedding_330.tmp_0@GRAD" - slot_gradient: "embedding_331.tmp_0@GRAD" - slot_gradient: "embedding_332.tmp_0@GRAD" - slot_gradient: "embedding_333.tmp_0@GRAD" - slot_gradient: "embedding_334.tmp_0@GRAD" - slot_gradient: "embedding_335.tmp_0@GRAD" - slot_gradient: "embedding_336.tmp_0@GRAD" - slot_gradient: "embedding_337.tmp_0@GRAD" - slot_gradient: "embedding_338.tmp_0@GRAD" - slot_gradient: "embedding_339.tmp_0@GRAD" - slot_gradient: "embedding_340.tmp_0@GRAD" - slot_gradient: "embedding_341.tmp_0@GRAD" - slot_gradient: "embedding_342.tmp_0@GRAD" - slot_gradient: "embedding_343.tmp_0@GRAD" - slot_gradient: "embedding_344.tmp_0@GRAD" - slot_gradient: "embedding_345.tmp_0@GRAD" - slot_gradient: "embedding_346.tmp_0@GRAD" - slot_gradient: "embedding_347.tmp_0@GRAD" - slot_gradient: "embedding_348.tmp_0@GRAD" - slot_gradient: "embedding_349.tmp_0@GRAD" - slot_gradient: "embedding_350.tmp_0@GRAD" - slot_gradient: "embedding_351.tmp_0@GRAD" - slot_gradient: "embedding_352.tmp_0@GRAD" - slot_gradient: "embedding_353.tmp_0@GRAD" - slot_gradient: "embedding_354.tmp_0@GRAD" - slot_gradient: "embedding_355.tmp_0@GRAD" - slot_gradient: "embedding_356.tmp_0@GRAD" - slot_gradient: "embedding_357.tmp_0@GRAD" - slot_gradient: "embedding_358.tmp_0@GRAD" - slot_gradient: "embedding_359.tmp_0@GRAD" - slot_gradient: "embedding_360.tmp_0@GRAD" - slot_gradient: "embedding_361.tmp_0@GRAD" - slot_gradient: "embedding_362.tmp_0@GRAD" - slot_gradient: "embedding_363.tmp_0@GRAD" - slot_gradient: "embedding_364.tmp_0@GRAD" - slot_gradient: "embedding_365.tmp_0@GRAD" - slot_gradient: "embedding_366.tmp_0@GRAD" - slot_gradient: "embedding_367.tmp_0@GRAD" - slot_gradient: "embedding_368.tmp_0@GRAD" - slot_gradient: "embedding_369.tmp_0@GRAD" - slot_gradient: "embedding_370.tmp_0@GRAD" - slot_gradient: "embedding_371.tmp_0@GRAD" - slot_gradient: "embedding_372.tmp_0@GRAD" - slot_gradient: "embedding_373.tmp_0@GRAD" - slot_gradient: "embedding_374.tmp_0@GRAD" - slot_gradient: "embedding_375.tmp_0@GRAD" - slot_gradient: "embedding_376.tmp_0@GRAD" - slot_gradient: "embedding_377.tmp_0@GRAD" - slot_gradient: "embedding_378.tmp_0@GRAD" - slot_gradient: "embedding_379.tmp_0@GRAD" - slot_gradient: "embedding_380.tmp_0@GRAD" - slot_gradient: "embedding_381.tmp_0@GRAD" - slot_gradient: "embedding_382.tmp_0@GRAD" - slot_gradient: "embedding_383.tmp_0@GRAD" - slot_gradient: "embedding_384.tmp_0@GRAD" - slot_gradient: "embedding_385.tmp_0@GRAD" - slot_gradient: "embedding_386.tmp_0@GRAD" - slot_gradient: "embedding_387.tmp_0@GRAD" - slot_gradient: "embedding_388.tmp_0@GRAD" - slot_gradient: "embedding_389.tmp_0@GRAD" - slot_gradient: "embedding_390.tmp_0@GRAD" - slot_gradient: "embedding_391.tmp_0@GRAD" - slot_gradient: "embedding_392.tmp_0@GRAD" - slot_gradient: "embedding_393.tmp_0@GRAD" - slot_gradient: "embedding_394.tmp_0@GRAD" - slot_gradient: "embedding_395.tmp_0@GRAD" - slot_gradient: "embedding_396.tmp_0@GRAD" - slot_gradient: "embedding_397.tmp_0@GRAD" - slot_gradient: "embedding_398.tmp_0@GRAD" - slot_gradient: "embedding_399.tmp_0@GRAD" - slot_gradient: "embedding_400.tmp_0@GRAD" - slot_gradient: "embedding_401.tmp_0@GRAD" - slot_gradient: "embedding_402.tmp_0@GRAD" - slot_gradient: "embedding_403.tmp_0@GRAD" - slot_gradient: "embedding_404.tmp_0@GRAD" - slot_gradient: "embedding_405.tmp_0@GRAD" - slot_gradient: "embedding_406.tmp_0@GRAD" - slot_gradient: "embedding_407.tmp_0@GRAD" - } - skip_op: "lookup_table" - skip_op: "lookup_table_grad" -} -fs_client_param { - uri: "afs://xingtian.afs.baidu.com:9902" - user: "mlarch" - passwd: "Fv1M87" - hadoop_bin: "$HADOOP_HOME/bin/hadoop" -} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/format_newcate_hotnews.awk b/feed/feed_deploy/news_jingpai/package/format_nets/format_newcate_hotnews.awk deleted file mode 100755 index 7820d405..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/format_newcate_hotnews.awk +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/awk -f -{ - if ($1 !~ /^([0-9a-zA-Z])+$/ || $2 !~ /^([0-9])+$/ || $3 !~ /^([0-9])+$/) { - next; - } - show = $2; - clk = $3; - if (clk > show) { - clk = show; - } - for (i = 0; i < clk; i++) { - $2 = "1"; - $3 = "1"; - print $0; - } - for (i = 0; i < show - clk; i++) { - $2 = "1"; - $3 = "0"; - print $0; - } -} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/ins_weight.py b/feed/feed_deploy/news_jingpai/package/format_nets/ins_weight.py deleted file mode 100755 index 8b4d87c3..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/ins_weight.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/python -import sys -import re -import math - -del_text_slot = True -g_ratio = 1 -w_ratio = 0.01 -slots_str = "6048 6145 6202 6201 6121 6119 6146 6120 6147 6122 6123 6118 6142 6143 6008 6148 6151 6127 6144 6150 6109 6003 6096 6149 6129 6203 6153 6152 6128 6106 6251 7082 7515 7080 6066 7507 6186 6007 7514 6054 6125 7506 10001 6006 6080 7023 6085 10000 6250 6110 6124 6090 6082 6067 7516 6101 6004 6191 6188 6070 6194 6247 6814 7512 10007 6058 6189 6059 7517 10005 7510 7024 7502 7503 6183 7511 6060 6806 7504 6185 6810 6248 10004 6815 6182 10068 6069 6073 6196 6816 7513 6071 6809 6072 6817 6190 7505 6813 6192 6807 6808 6195 6826 6184 6197 6068 6812 7107 6811 6823 6824 6819 6818 6821 6822 6820 6094 6083 6952 6099 6951 6949 6098 7075 6948 6157 6126 7077 6111 6087 6103 6107 6156 6005 6158 7122 6155 7058 6115 7079 7081 6833 6108 6840 6837 7147 7129 6097 6231 6957 7145 6956 7143 6130 7149 7142 6212 6827 7144 6089 6161 7055 6233 6105 7057 6237 6828 6850 6163 7124 6354 6162 7146 6830 7123 6160 6235 7056 6081 6841 6132 6954 6131 6236 6831 6845 6832 6953 6839 6950 7125 7054 6138 6166 6076 6851 6353 7076 7148 6858 6842 6860 7126 6829 6835 7078 6866 6869 6871 7052 6134 6855 6947 6862 6215 6852 7128 6092 6112 6213 6232 6863 6113 6165 6214 6216 6873 6865 6870 6077 6234 6861 6164 6217 7127 6218 6962 7053 7051 6961 6002 6738 6739 10105 7064 6751 6770 7100 6014 6765 6755 10021 10022 6010 10056 6011 6756 10055 6768 10024 6023 10003 6769 10002 6767 6759 10018 6024 6064 6012 6050 10042 6168 6253 10010 10020 6015 6018 10033 10041 10039 10031 10016 6764 7083 7152 7066 6171 7150 7085 6255 10044 10008 7102 6167 6240 6238 6095 10017 10046 6019 6031 6763 6256 6169 6254 10034 7108 7186 6257 10019 6757 10040 6025 7019 7086 10029 10011 7104 6261 6013 6766 10106 7105 7153 7089 6057 7134 7151 7045 7005 7008 7101 6035 7137 10023 6036 6172 7099 7087 6239 7185 6170 10006 6243 6350 7103 7090 7157 6259 7171 6875 7084 7154 6242 6260 7155 7017 7048 7156 6959 7047 10053 7135 6244 7136 10030 7063 6760 7016 7065 7179 6881 7018 6876 10081 10052 10054 10038 6886 10069 7004 10051 7007 7109 10057 6029 6888 10009 6889 7021 10047 6245 6878 10067 6879 6884 7180 7182 10071 7002 6880 6890 6887 10061 6027 6877 6892 10060 6893 7050 10036 7049 10012 10025 7012 7183 10058 7181 10086 6891 6258 6894 6883 7046 6037 7106 10043 10048 10045 10087 6885 10013 10028 7187 10037 10035 10050 6895 7011 7170 7172 10026 10063 10095 10082 10084 6960 10092 10075 6038 7010 7015 10015 10027 10064 7184 10014 10059 7013 7020 10072 10066 10080 6896 10083 10090 6039 10049 7164 7165 10091 10099 6963 7166 10079 10103 7006 7009 7169 6034 7028 7029 7030 7034 7035 7036 7040 7041 7042 10032 6009 6241 7003 7014 7088 13326 13330 13331 13352 13353 6198" -slot_whitelist = slots_str.split(" ") - -def calc_ins_weight(params, label): - """calc ins weight""" - global g_ratio - global w_ratio - slots = [] - s_clk_num = 0 - s_show_num = 0 - active = 0 - attclk_num = 0 - attshow_num = 0 - attclk_avg = 0 - for items in params: - if len(items) != 2: - continue - slot_name = items[0] - slot_val = items[1] - if slot_name not in slots: - slots.append(slot_name) - if slot_name == "session_click_num": - s_clk_num = int(slot_val) - if slot_name == "session_show_num": - s_show_num = int(slot_val) - if slot_name == "activity": - active = float(slot_val) / 10000.0 - w = 1 - # for inactive user - if active >= 0 and active < 0.4 and s_show_num >=0 and s_show_num < 20: - w = math.log(w_ratio * (420 - (active * 50 + 1) * (s_show_num + 1)) + math.e) - if label == "0": - w = 1 + (w - 1) * g_ratio - return w - -def filter_whitelist_slot(tmp_line): - terms = tmp_line.split() - line = "%s %s %s" % (terms[0], terms[1], terms[2]) - for item in terms[3:]: - feasign = item.split(':') - if len(feasign) == 2 and \ - feasign[1] in slot_whitelist: - line = "%s %s" %(line, item) - return line - -def get_sample_type(line): - # vertical_type = 20 - # if line.find("13038012583501790:6738") > 0: - # return 30 - # vertical_type = 0/5/1/2/9/11/13/16/29/-1 - if (line.find("7408512894065610:6738") > 0) or \ - (line.find("8815887816424655:6738") > 0) or \ - (line.find("7689987878537419:6738") > 0) or \ - (line.find("7971462863009228:6738") > 0) or \ - (line.find("9941787754311891:6738") > 0) or \ - (line.find("10504737723255509:6738") > 0) or \ - (line.find("11067687692199127:6738") > 0) or \ - (line.find("11912112645614554:6738") > 0) or \ - (line.find("15571287443748071:6738") > 0) or \ - (line.find("7127025017546227:6738") > 0): - return 20 - return -1 - -def main(): - """ins adjust""" - global del_text_slot - for l in sys.stdin: - l = l.rstrip("\n") - items = l.split(" ") - if len(items) < 3: - continue - label = items[2] - lines = l.split("\t") - line = lines[0] - # streaming ins include all ins, sample_type only handle NEWS ins - sample_type = -1 - if 'NEWS' in l: - sample_type = get_sample_type(line) - #line = filter_whitelist_slot(tmp_line) - if len(lines) >= 4: - if 'VIDEO' in lines[3]: - continue - params = lines[2] - params = params.split(" ") - m = [tuple(i.split(":")) for i in params] - if m is None or len(m) == 0: - if sample_type > 0: - print "%s $%s *1" % (line, sample_type) - else: - print "%s *1" % line - sys.stdout.flush() - continue - weight = calc_ins_weight(m, label) - if sample_type > 0: - print "%s $%s *%s" % (line, sample_type, weight) - else: - print "%s *%s" % (line, weight) - sys.stdout.flush() - else: - if sample_type > 0: - print "%s $%s *1" % (line, sample_type) - else: - print "%s *1" % line - sys.stdout.flush() - -if __name__ == "__main__": - if len(sys.argv) > 1: - if sys.argv[1] == "0": - del_text_slot = False - if len(sys.argv) > 2: - g_ratio = float(sys.argv[2]) - if len(sys.argv) > 3: - w_ratio = float(sys.argv[3]) - main() diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/jingpai_fleet_desc_new.prototxt b/feed/feed_deploy/news_jingpai/package/format_nets/jingpai_fleet_desc_new.prototxt deleted file mode 100644 index baf86c34..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/jingpai_fleet_desc_new.prototxt +++ /dev/null @@ -1,1504 +0,0 @@ -server_param { - downpour_server_param { - downpour_table_param { - table_id: 0 - table_class: "DownpourSparseTable" - shard_num: 1950 - accessor { - accessor_class: "DownpourCtrAccessor" - sparse_sgd_param { - learning_rate: 0.05 - initial_g2sum: 3.0 - initial_range: 0.0001 - weight_bounds: -10.0 - weight_bounds: 10.0 - } - fea_dim: 11 - embedx_dim: 8 - embedx_threshold: 10 - downpour_accessor_param { - nonclk_coeff: 0.1 - click_coeff: 1 - base_threshold: 1.5 - delta_threshold: 0.25 - delta_keep_days: 16 - delete_after_unseen_days: 30 - show_click_decay_rate: 0.98 - delete_threshold: 0.8 - } - table_accessor_save_param { - param: 1 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - table_accessor_save_param { - param: 2 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - } - type: PS_SPARSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 1 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - fea_dim: 2571127 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 2 - table_class: "DownpourDenseDoubleTable" - accessor { - accessor_class: "DownpourDenseValueDoubleAccessor" - dense_sgd_param { - name: "summarydouble" - summary { - summary_decay_rate: 0.999999 - } - } - fea_dim: 13464 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 3 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - fea_dim: 834238 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 4 - table_class: "DownpourDenseDoubleTable" - accessor { - accessor_class: "DownpourDenseValueDoubleAccessor" - dense_sgd_param { - name: "summarydouble" - summary { - summary_decay_rate: 0.999999 - } - } - fea_dim: 3267 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 5 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - fea_dim: 2072615 - } - type: PS_DENSE_TABLE - compress_in_save: true - } - service_param { - server_class: "DownpourBrpcPsServer" - client_class: "DownpourBrpcPsClient" - service_class: "DownpourPsService" - start_server_port: 0 - server_thread_num: 12 - } - } -} -trainer_param { - dense_table { - table_id: 1 - dense_variable_name: "fc_0.w_0" - dense_variable_name: "fc_0.b_0" - dense_variable_name: "fc_1.w_0" - dense_variable_name: "fc_1.b_0" - dense_variable_name: "fc_2.w_0" - dense_variable_name: "fc_2.b_0" - dense_variable_name: "fc_3.w_0" - dense_variable_name: "fc_3.b_0" - dense_variable_name: "fc_4.w_0" - dense_variable_name: "fc_4.b_0" - dense_variable_name: "fc_5.w_0" - dense_variable_name: "fc_5.b_0" - dense_variable_name: "fc_6.w_0" - dense_variable_name: "fc_6.b_0" - dense_variable_name: "fc_7.w_0" - dense_variable_name: "fc_7.b_0" - dense_gradient_variable_name: "fc_0.w_0@GRAD" - dense_gradient_variable_name: "fc_0.b_0@GRAD" - dense_gradient_variable_name: "fc_1.w_0@GRAD" - dense_gradient_variable_name: "fc_1.b_0@GRAD" - dense_gradient_variable_name: "fc_2.w_0@GRAD" - dense_gradient_variable_name: "fc_2.b_0@GRAD" - dense_gradient_variable_name: "fc_3.w_0@GRAD" - dense_gradient_variable_name: "fc_3.b_0@GRAD" - dense_gradient_variable_name: "fc_4.w_0@GRAD" - dense_gradient_variable_name: "fc_4.b_0@GRAD" - dense_gradient_variable_name: "fc_5.w_0@GRAD" - dense_gradient_variable_name: "fc_5.b_0@GRAD" - dense_gradient_variable_name: "fc_6.w_0@GRAD" - dense_gradient_variable_name: "fc_6.b_0@GRAD" - dense_gradient_variable_name: "fc_7.w_0@GRAD" - dense_gradient_variable_name: "fc_7.b_0@GRAD" - } - dense_table { - table_id: 2 - dense_variable_name: "bn6048.batch_size" - dense_variable_name: "bn6048.batch_sum" - dense_variable_name: "bn6048.batch_square_sum" - dense_gradient_variable_name: "bn6048.batch_size@GRAD" - dense_gradient_variable_name: "bn6048.batch_sum@GRAD" - dense_gradient_variable_name: "bn6048.batch_square_sum@GRAD" - } - dense_table { - table_id: 3 - dense_variable_name: "fc_0.w_0" - dense_variable_name: "fc_0.b_0" - dense_variable_name: "fc_1.w_0" - dense_variable_name: "fc_1.b_0" - dense_variable_name: "fc_2.w_0" - dense_variable_name: "fc_2.b_0" - dense_variable_name: "fc_3.w_0" - dense_variable_name: "fc_3.b_0" - dense_variable_name: "fc_4.w_0" - dense_variable_name: "fc_4.b_0" - dense_variable_name: "fc_5.w_0" - dense_variable_name: "fc_5.b_0" - dense_variable_name: "fc_6.w_0" - dense_variable_name: "fc_6.b_0" - dense_variable_name: "fc_7.w_0" - dense_variable_name: "fc_7.b_0" - dense_gradient_variable_name: "fc_0.w_0@GRAD" - dense_gradient_variable_name: "fc_0.b_0@GRAD" - dense_gradient_variable_name: "fc_1.w_0@GRAD" - dense_gradient_variable_name: "fc_1.b_0@GRAD" - dense_gradient_variable_name: "fc_2.w_0@GRAD" - dense_gradient_variable_name: "fc_2.b_0@GRAD" - dense_gradient_variable_name: "fc_3.w_0@GRAD" - dense_gradient_variable_name: "fc_3.b_0@GRAD" - dense_gradient_variable_name: "fc_4.w_0@GRAD" - dense_gradient_variable_name: "fc_4.b_0@GRAD" - dense_gradient_variable_name: "fc_5.w_0@GRAD" - dense_gradient_variable_name: "fc_5.b_0@GRAD" - dense_gradient_variable_name: "fc_6.w_0@GRAD" - dense_gradient_variable_name: "fc_6.b_0@GRAD" - dense_gradient_variable_name: "fc_7.w_0@GRAD" - dense_gradient_variable_name: "fc_7.b_0@GRAD" - } - dense_table { - table_id: 4 - dense_variable_name: "bn6048.batch_size" - dense_variable_name: "bn6048.batch_sum" - dense_variable_name: "bn6048.batch_square_sum" - dense_gradient_variable_name: "bn6048.batch_size@GRAD" - dense_gradient_variable_name: "bn6048.batch_sum@GRAD" - dense_gradient_variable_name: "bn6048.batch_square_sum@GRAD" - } - dense_table { - table_id: 5 - dense_variable_name: "fc_0.w_0" - dense_variable_name: "fc_0.b_0" - dense_variable_name: "fc_1.w_0" - dense_variable_name: "fc_1.b_0" - dense_variable_name: "fc_2.w_0" - dense_variable_name: "fc_2.b_0" - dense_variable_name: "fc_3.w_0" - dense_variable_name: "fc_3.b_0" - dense_variable_name: "fc_4.w_0" - dense_variable_name: "fc_4.b_0" - dense_variable_name: "fc_5.w_0" - dense_variable_name: "fc_5.b_0" - dense_gradient_variable_name: "fc_0.w_0@GRAD" - dense_gradient_variable_name: "fc_0.b_0@GRAD" - dense_gradient_variable_name: "fc_1.w_0@GRAD" - dense_gradient_variable_name: "fc_1.b_0@GRAD" - dense_gradient_variable_name: "fc_2.w_0@GRAD" - dense_gradient_variable_name: "fc_2.b_0@GRAD" - dense_gradient_variable_name: "fc_3.w_0@GRAD" - dense_gradient_variable_name: "fc_3.b_0@GRAD" - dense_gradient_variable_name: "fc_4.w_0@GRAD" - dense_gradient_variable_name: "fc_4.b_0@GRAD" - dense_gradient_variable_name: "fc_5.w_0@GRAD" - dense_gradient_variable_name: "fc_5.b_0@GRAD" - } - sparse_table { - table_id: 0 - slot_key: "6048" - slot_key: "6002" - slot_key: "6145" - slot_key: "6202" - slot_key: "6201" - slot_key: "6121" - slot_key: "6738" - slot_key: "6119" - slot_key: "6146" - slot_key: "6120" - slot_key: "6147" - slot_key: "6122" - slot_key: "6123" - slot_key: "6118" - slot_key: "6142" - slot_key: "6143" - slot_key: "6008" - slot_key: "6148" - slot_key: "6151" - slot_key: "6127" - slot_key: "6144" - slot_key: "6094" - slot_key: "6083" - slot_key: "6952" - slot_key: "6739" - slot_key: "6150" - slot_key: "6109" - slot_key: "6003" - slot_key: "6099" - slot_key: "6149" - slot_key: "6129" - slot_key: "6203" - slot_key: "6153" - slot_key: "6152" - slot_key: "6128" - slot_key: "6106" - slot_key: "6251" - slot_key: "7082" - slot_key: "7515" - slot_key: "6951" - slot_key: "6949" - slot_key: "7080" - slot_key: "6066" - slot_key: "7507" - slot_key: "6186" - slot_key: "6007" - slot_key: "7514" - slot_key: "6125" - slot_key: "7506" - slot_key: "10001" - slot_key: "6006" - slot_key: "7023" - slot_key: "6085" - slot_key: "10000" - slot_key: "6098" - slot_key: "6250" - slot_key: "6110" - slot_key: "6124" - slot_key: "6090" - slot_key: "6082" - slot_key: "6067" - slot_key: "6101" - slot_key: "6004" - slot_key: "6191" - slot_key: "7075" - slot_key: "6948" - slot_key: "6157" - slot_key: "6126" - slot_key: "6188" - slot_key: "7077" - slot_key: "6070" - slot_key: "6111" - slot_key: "6087" - slot_key: "6103" - slot_key: "6107" - slot_key: "6194" - slot_key: "6156" - slot_key: "6005" - slot_key: "6247" - slot_key: "6814" - slot_key: "6158" - slot_key: "7122" - slot_key: "6058" - slot_key: "6189" - slot_key: "7058" - slot_key: "6059" - slot_key: "6115" - slot_key: "7079" - slot_key: "7081" - slot_key: "6833" - slot_key: "7024" - slot_key: "6108" - slot_key: "13342" - slot_key: "13345" - slot_key: "13412" - slot_key: "13343" - slot_key: "13350" - slot_key: "13346" - slot_key: "13409" - slot_key: "6009" - slot_key: "6011" - slot_key: "6012" - slot_key: "6013" - slot_key: "6014" - slot_key: "6015" - slot_key: "6019" - slot_key: "6023" - slot_key: "6024" - slot_key: "6027" - slot_key: "6029" - slot_key: "6031" - slot_key: "6050" - slot_key: "6060" - slot_key: "6068" - slot_key: "6069" - slot_key: "6089" - slot_key: "6095" - slot_key: "6105" - slot_key: "6112" - slot_key: "6130" - slot_key: "6131" - slot_key: "6132" - slot_key: "6134" - slot_key: "6161" - slot_key: "6162" - slot_key: "6163" - slot_key: "6166" - slot_key: "6182" - slot_key: "6183" - slot_key: "6185" - slot_key: "6190" - slot_key: "6212" - slot_key: "6213" - slot_key: "6231" - slot_key: "6233" - slot_key: "6234" - slot_key: "6236" - slot_key: "6238" - slot_key: "6239" - slot_key: "6240" - slot_key: "6241" - slot_key: "6242" - slot_key: "6243" - slot_key: "6244" - slot_key: "6245" - slot_key: "6354" - slot_key: "7002" - slot_key: "7005" - slot_key: "7008" - slot_key: "7010" - slot_key: "7012" - slot_key: "7013" - slot_key: "7015" - slot_key: "7016" - slot_key: "7017" - slot_key: "7018" - slot_key: "7019" - slot_key: "7020" - slot_key: "7045" - slot_key: "7046" - slot_key: "7048" - slot_key: "7049" - slot_key: "7052" - slot_key: "7054" - slot_key: "7056" - slot_key: "7064" - slot_key: "7066" - slot_key: "7076" - slot_key: "7078" - slot_key: "7083" - slot_key: "7084" - slot_key: "7085" - slot_key: "7086" - slot_key: "7087" - slot_key: "7088" - slot_key: "7089" - slot_key: "7090" - slot_key: "7099" - slot_key: "7100" - slot_key: "7101" - slot_key: "7102" - slot_key: "7103" - slot_key: "7104" - slot_key: "7105" - slot_key: "7109" - slot_key: "7124" - slot_key: "7126" - slot_key: "7136" - slot_key: "7142" - slot_key: "7143" - slot_key: "7144" - slot_key: "7145" - slot_key: "7146" - slot_key: "7147" - slot_key: "7148" - slot_key: "7150" - slot_key: "7151" - slot_key: "7152" - slot_key: "7153" - slot_key: "7154" - slot_key: "7155" - slot_key: "7156" - slot_key: "7157" - slot_key: "7047" - slot_key: "7050" - slot_key: "6253" - slot_key: "6254" - slot_key: "6255" - slot_key: "6256" - slot_key: "6257" - slot_key: "6259" - slot_key: "6260" - slot_key: "6261" - slot_key: "7170" - slot_key: "7185" - slot_key: "7186" - slot_key: "6751" - slot_key: "6755" - slot_key: "6757" - slot_key: "6759" - slot_key: "6760" - slot_key: "6763" - slot_key: "6764" - slot_key: "6765" - slot_key: "6766" - slot_key: "6767" - slot_key: "6768" - slot_key: "6769" - slot_key: "6770" - slot_key: "7502" - slot_key: "7503" - slot_key: "7504" - slot_key: "7505" - slot_key: "7510" - slot_key: "7511" - slot_key: "7512" - slot_key: "7513" - slot_key: "6806" - slot_key: "6807" - slot_key: "6808" - slot_key: "6809" - slot_key: "6810" - slot_key: "6811" - slot_key: "6812" - slot_key: "6813" - slot_key: "6815" - slot_key: "6816" - slot_key: "6817" - slot_key: "6819" - slot_key: "6823" - slot_key: "6828" - slot_key: "6831" - slot_key: "6840" - slot_key: "6845" - slot_key: "6875" - slot_key: "6879" - slot_key: "6881" - slot_key: "6888" - slot_key: "6889" - slot_key: "6947" - slot_key: "6950" - slot_key: "6956" - slot_key: "6957" - slot_key: "6959" - slot_key: "10006" - slot_key: "10008" - slot_key: "10009" - slot_key: "10010" - slot_key: "10011" - slot_key: "10016" - slot_key: "10017" - slot_key: "10018" - slot_key: "10019" - slot_key: "10020" - slot_key: "10021" - slot_key: "10022" - slot_key: "10023" - slot_key: "10024" - slot_key: "10029" - slot_key: "10030" - slot_key: "10031" - slot_key: "10032" - slot_key: "10033" - slot_key: "10034" - slot_key: "10035" - slot_key: "10036" - slot_key: "10037" - slot_key: "10038" - slot_key: "10039" - slot_key: "10040" - slot_key: "10041" - slot_key: "10042" - slot_key: "10044" - slot_key: "10045" - slot_key: "10046" - slot_key: "10051" - slot_key: "10052" - slot_key: "10053" - slot_key: "10054" - slot_key: "10055" - slot_key: "10056" - slot_key: "10057" - slot_key: "10060" - slot_key: "10066" - slot_key: "10069" - slot_key: "6820" - slot_key: "6821" - slot_key: "6822" - slot_key: "13333" - slot_key: "13334" - slot_key: "13335" - slot_key: "13336" - slot_key: "13337" - slot_key: "13338" - slot_key: "13339" - slot_key: "13340" - slot_key: "13341" - slot_key: "13351" - slot_key: "13352" - slot_key: "13353" - slot_key: "13359" - slot_key: "13361" - slot_key: "13362" - slot_key: "13363" - slot_key: "13366" - slot_key: "13367" - slot_key: "13368" - slot_key: "13369" - slot_key: "13370" - slot_key: "13371" - slot_key: "13375" - slot_key: "13376" - slot_key: "5700" - slot_key: "5702" - slot_key: "13400" - slot_key: "13401" - slot_key: "13402" - slot_key: "13403" - slot_key: "13404" - slot_key: "13406" - slot_key: "13407" - slot_key: "13408" - slot_key: "13410" - slot_key: "13417" - slot_key: "13418" - slot_key: "13419" - slot_key: "13420" - slot_key: "13422" - slot_key: "13425" - slot_key: "13427" - slot_key: "13428" - slot_key: "13429" - slot_key: "13430" - slot_key: "13431" - slot_key: "13433" - slot_key: "13434" - slot_key: "13436" - slot_key: "13437" - slot_key: "13326" - slot_key: "13330" - slot_key: "13331" - slot_key: "5717" - slot_key: "13442" - slot_key: "13451" - slot_key: "13452" - slot_key: "13455" - slot_key: "13456" - slot_key: "13457" - slot_key: "13458" - slot_key: "13459" - slot_key: "13460" - slot_key: "13461" - slot_key: "13462" - slot_key: "13463" - slot_key: "13464" - slot_key: "13465" - slot_key: "13466" - slot_key: "13467" - slot_key: "13468" - slot_key: "1104" - slot_key: "1106" - slot_key: "1107" - slot_key: "1108" - slot_key: "1109" - slot_key: "1110" - slot_key: "1111" - slot_key: "1112" - slot_key: "1113" - slot_key: "1114" - slot_key: "1115" - slot_key: "1116" - slot_key: "1117" - slot_key: "1119" - slot_key: "1120" - slot_key: "1121" - slot_key: "1122" - slot_key: "1123" - slot_key: "1124" - slot_key: "1125" - slot_key: "1126" - slot_key: "1127" - slot_key: "1128" - slot_key: "1129" - slot_key: "13812" - slot_key: "13813" - slot_key: "6740" - slot_key: "1490" - slot_key: "1491" - slot_value: "embedding_0.tmp_0" - slot_value: "embedding_1.tmp_0" - slot_value: "embedding_2.tmp_0" - slot_value: "embedding_3.tmp_0" - slot_value: "embedding_4.tmp_0" - slot_value: "embedding_5.tmp_0" - slot_value: "embedding_6.tmp_0" - slot_value: "embedding_7.tmp_0" - slot_value: "embedding_8.tmp_0" - slot_value: "embedding_9.tmp_0" - slot_value: "embedding_10.tmp_0" - slot_value: "embedding_11.tmp_0" - slot_value: "embedding_12.tmp_0" - slot_value: "embedding_13.tmp_0" - slot_value: "embedding_14.tmp_0" - slot_value: "embedding_15.tmp_0" - slot_value: "embedding_16.tmp_0" - slot_value: "embedding_17.tmp_0" - slot_value: "embedding_18.tmp_0" - slot_value: "embedding_19.tmp_0" - slot_value: "embedding_20.tmp_0" - slot_value: "embedding_21.tmp_0" - slot_value: "embedding_22.tmp_0" - slot_value: "embedding_23.tmp_0" - slot_value: "embedding_24.tmp_0" - slot_value: "embedding_25.tmp_0" - slot_value: "embedding_26.tmp_0" - slot_value: "embedding_27.tmp_0" - slot_value: "embedding_28.tmp_0" - slot_value: "embedding_29.tmp_0" - slot_value: "embedding_30.tmp_0" - slot_value: "embedding_31.tmp_0" - slot_value: "embedding_32.tmp_0" - slot_value: "embedding_33.tmp_0" - slot_value: "embedding_34.tmp_0" - slot_value: "embedding_35.tmp_0" - slot_value: "embedding_36.tmp_0" - slot_value: "embedding_37.tmp_0" - slot_value: "embedding_38.tmp_0" - slot_value: "embedding_39.tmp_0" - slot_value: "embedding_40.tmp_0" - slot_value: "embedding_41.tmp_0" - slot_value: "embedding_42.tmp_0" - slot_value: "embedding_43.tmp_0" - slot_value: "embedding_44.tmp_0" - slot_value: "embedding_45.tmp_0" - slot_value: "embedding_46.tmp_0" - slot_value: "embedding_47.tmp_0" - slot_value: "embedding_48.tmp_0" - slot_value: "embedding_49.tmp_0" - slot_value: "embedding_50.tmp_0" - slot_value: "embedding_51.tmp_0" - slot_value: "embedding_52.tmp_0" - slot_value: "embedding_53.tmp_0" - slot_value: "embedding_54.tmp_0" - slot_value: "embedding_55.tmp_0" - slot_value: "embedding_56.tmp_0" - slot_value: "embedding_57.tmp_0" - slot_value: "embedding_58.tmp_0" - slot_value: "embedding_59.tmp_0" - slot_value: "embedding_60.tmp_0" - slot_value: "embedding_61.tmp_0" - slot_value: "embedding_62.tmp_0" - slot_value: "embedding_63.tmp_0" - slot_value: "embedding_64.tmp_0" - slot_value: "embedding_65.tmp_0" - slot_value: "embedding_66.tmp_0" - slot_value: "embedding_67.tmp_0" - slot_value: "embedding_68.tmp_0" - slot_value: "embedding_69.tmp_0" - slot_value: "embedding_70.tmp_0" - slot_value: "embedding_71.tmp_0" - slot_value: "embedding_72.tmp_0" - slot_value: "embedding_73.tmp_0" - slot_value: "embedding_74.tmp_0" - slot_value: "embedding_75.tmp_0" - slot_value: "embedding_76.tmp_0" - slot_value: "embedding_77.tmp_0" - slot_value: "embedding_78.tmp_0" - slot_value: "embedding_79.tmp_0" - slot_value: "embedding_80.tmp_0" - slot_value: "embedding_81.tmp_0" - slot_value: "embedding_82.tmp_0" - slot_value: "embedding_83.tmp_0" - slot_value: "embedding_84.tmp_0" - slot_value: "embedding_85.tmp_0" - slot_value: "embedding_86.tmp_0" - slot_value: "embedding_87.tmp_0" - slot_value: "embedding_88.tmp_0" - slot_value: "embedding_89.tmp_0" - slot_value: "embedding_90.tmp_0" - slot_value: "embedding_91.tmp_0" - slot_value: "embedding_92.tmp_0" - slot_value: "embedding_93.tmp_0" - slot_value: "embedding_94.tmp_0" - slot_value: "embedding_95.tmp_0" - slot_value: "embedding_96.tmp_0" - slot_value: "embedding_97.tmp_0" - slot_value: "embedding_98.tmp_0" - slot_value: "embedding_99.tmp_0" - slot_value: "embedding_100.tmp_0" - slot_value: "embedding_101.tmp_0" - slot_value: "embedding_102.tmp_0" - slot_value: "embedding_103.tmp_0" - slot_value: "embedding_104.tmp_0" - slot_value: "embedding_105.tmp_0" - slot_value: "embedding_106.tmp_0" - slot_value: "embedding_107.tmp_0" - slot_value: "embedding_108.tmp_0" - slot_value: "embedding_109.tmp_0" - slot_value: "embedding_110.tmp_0" - slot_value: "embedding_111.tmp_0" - slot_value: "embedding_112.tmp_0" - slot_value: "embedding_113.tmp_0" - slot_value: "embedding_114.tmp_0" - slot_value: "embedding_115.tmp_0" - slot_value: "embedding_116.tmp_0" - slot_value: "embedding_117.tmp_0" - slot_value: "embedding_118.tmp_0" - slot_value: "embedding_119.tmp_0" - slot_value: "embedding_120.tmp_0" - slot_value: "embedding_121.tmp_0" - slot_value: "embedding_122.tmp_0" - slot_value: "embedding_123.tmp_0" - slot_value: "embedding_124.tmp_0" - slot_value: "embedding_125.tmp_0" - slot_value: "embedding_126.tmp_0" - slot_value: "embedding_127.tmp_0" - slot_value: "embedding_128.tmp_0" - slot_value: "embedding_129.tmp_0" - slot_value: "embedding_130.tmp_0" - slot_value: "embedding_131.tmp_0" - slot_value: "embedding_132.tmp_0" - slot_value: "embedding_133.tmp_0" - slot_value: "embedding_134.tmp_0" - slot_value: "embedding_135.tmp_0" - slot_value: "embedding_136.tmp_0" - slot_value: "embedding_137.tmp_0" - slot_value: "embedding_138.tmp_0" - slot_value: "embedding_139.tmp_0" - slot_value: "embedding_140.tmp_0" - slot_value: "embedding_141.tmp_0" - slot_value: "embedding_142.tmp_0" - slot_value: "embedding_143.tmp_0" - slot_value: "embedding_144.tmp_0" - slot_value: "embedding_145.tmp_0" - slot_value: "embedding_146.tmp_0" - slot_value: "embedding_147.tmp_0" - slot_value: "embedding_148.tmp_0" - slot_value: "embedding_149.tmp_0" - slot_value: "embedding_150.tmp_0" - slot_value: "embedding_151.tmp_0" - slot_value: "embedding_152.tmp_0" - slot_value: "embedding_153.tmp_0" - slot_value: "embedding_154.tmp_0" - slot_value: "embedding_155.tmp_0" - slot_value: "embedding_156.tmp_0" - slot_value: "embedding_157.tmp_0" - slot_value: "embedding_158.tmp_0" - slot_value: "embedding_159.tmp_0" - slot_value: "embedding_160.tmp_0" - slot_value: "embedding_161.tmp_0" - slot_value: "embedding_162.tmp_0" - slot_value: "embedding_163.tmp_0" - slot_value: "embedding_164.tmp_0" - slot_value: "embedding_165.tmp_0" - slot_value: "embedding_166.tmp_0" - slot_value: "embedding_167.tmp_0" - slot_value: "embedding_168.tmp_0" - slot_value: "embedding_169.tmp_0" - slot_value: "embedding_170.tmp_0" - slot_value: "embedding_171.tmp_0" - slot_value: "embedding_172.tmp_0" - slot_value: "embedding_173.tmp_0" - slot_value: "embedding_174.tmp_0" - slot_value: "embedding_175.tmp_0" - slot_value: "embedding_176.tmp_0" - slot_value: "embedding_177.tmp_0" - slot_value: "embedding_178.tmp_0" - slot_value: "embedding_179.tmp_0" - slot_value: "embedding_180.tmp_0" - slot_value: "embedding_181.tmp_0" - slot_value: "embedding_182.tmp_0" - slot_value: "embedding_183.tmp_0" - slot_value: "embedding_184.tmp_0" - slot_value: "embedding_185.tmp_0" - slot_value: "embedding_186.tmp_0" - slot_value: "embedding_187.tmp_0" - slot_value: "embedding_188.tmp_0" - slot_value: "embedding_189.tmp_0" - slot_value: "embedding_190.tmp_0" - slot_value: "embedding_191.tmp_0" - slot_value: "embedding_192.tmp_0" - slot_value: "embedding_193.tmp_0" - slot_value: "embedding_194.tmp_0" - slot_value: "embedding_195.tmp_0" - slot_value: "embedding_196.tmp_0" - slot_value: "embedding_197.tmp_0" - slot_value: "embedding_198.tmp_0" - slot_value: "embedding_199.tmp_0" - slot_value: "embedding_200.tmp_0" - slot_value: "embedding_201.tmp_0" - slot_value: "embedding_202.tmp_0" - slot_value: "embedding_203.tmp_0" - slot_value: "embedding_204.tmp_0" - slot_value: "embedding_205.tmp_0" - slot_value: "embedding_206.tmp_0" - slot_value: "embedding_207.tmp_0" - slot_value: "embedding_208.tmp_0" - slot_value: "embedding_209.tmp_0" - slot_value: "embedding_210.tmp_0" - slot_value: "embedding_211.tmp_0" - slot_value: "embedding_212.tmp_0" - slot_value: "embedding_213.tmp_0" - slot_value: "embedding_214.tmp_0" - slot_value: "embedding_215.tmp_0" - slot_value: "embedding_216.tmp_0" - slot_value: "embedding_217.tmp_0" - slot_value: "embedding_218.tmp_0" - slot_value: "embedding_219.tmp_0" - slot_value: "embedding_220.tmp_0" - slot_value: "embedding_221.tmp_0" - slot_value: "embedding_222.tmp_0" - slot_value: "embedding_223.tmp_0" - slot_value: "embedding_224.tmp_0" - slot_value: "embedding_225.tmp_0" - slot_value: "embedding_226.tmp_0" - slot_value: "embedding_227.tmp_0" - slot_value: "embedding_228.tmp_0" - slot_value: "embedding_229.tmp_0" - slot_value: "embedding_230.tmp_0" - slot_value: "embedding_231.tmp_0" - slot_value: "embedding_232.tmp_0" - slot_value: "embedding_233.tmp_0" - slot_value: "embedding_234.tmp_0" - slot_value: "embedding_235.tmp_0" - slot_value: "embedding_236.tmp_0" - slot_value: "embedding_237.tmp_0" - slot_value: "embedding_238.tmp_0" - slot_value: "embedding_239.tmp_0" - slot_value: "embedding_240.tmp_0" - slot_value: "embedding_241.tmp_0" - slot_value: "embedding_242.tmp_0" - slot_value: "embedding_243.tmp_0" - slot_value: "embedding_244.tmp_0" - slot_value: "embedding_245.tmp_0" - slot_value: "embedding_246.tmp_0" - slot_value: "embedding_247.tmp_0" - slot_value: "embedding_248.tmp_0" - slot_value: "embedding_249.tmp_0" - slot_value: "embedding_250.tmp_0" - slot_value: "embedding_251.tmp_0" - slot_value: "embedding_252.tmp_0" - slot_value: "embedding_253.tmp_0" - slot_value: "embedding_254.tmp_0" - slot_value: "embedding_255.tmp_0" - slot_value: "embedding_256.tmp_0" - slot_value: "embedding_257.tmp_0" - slot_value: "embedding_258.tmp_0" - slot_value: "embedding_259.tmp_0" - slot_value: "embedding_260.tmp_0" - slot_value: "embedding_261.tmp_0" - slot_value: "embedding_262.tmp_0" - slot_value: "embedding_263.tmp_0" - slot_value: "embedding_264.tmp_0" - slot_value: "embedding_265.tmp_0" - slot_value: "embedding_266.tmp_0" - slot_value: "embedding_267.tmp_0" - slot_value: "embedding_268.tmp_0" - slot_value: "embedding_269.tmp_0" - slot_value: "embedding_270.tmp_0" - slot_value: "embedding_271.tmp_0" - slot_value: "embedding_272.tmp_0" - slot_value: "embedding_273.tmp_0" - slot_value: "embedding_274.tmp_0" - slot_value: "embedding_275.tmp_0" - slot_value: "embedding_276.tmp_0" - slot_value: "embedding_277.tmp_0" - slot_value: "embedding_278.tmp_0" - slot_value: "embedding_279.tmp_0" - slot_value: "embedding_280.tmp_0" - slot_value: "embedding_281.tmp_0" - slot_value: "embedding_282.tmp_0" - slot_value: "embedding_283.tmp_0" - slot_value: "embedding_284.tmp_0" - slot_value: "embedding_285.tmp_0" - slot_value: "embedding_286.tmp_0" - slot_value: "embedding_287.tmp_0" - slot_value: "embedding_288.tmp_0" - slot_value: "embedding_289.tmp_0" - slot_value: "embedding_290.tmp_0" - slot_value: "embedding_291.tmp_0" - slot_value: "embedding_292.tmp_0" - slot_value: "embedding_293.tmp_0" - slot_value: "embedding_294.tmp_0" - slot_value: "embedding_295.tmp_0" - slot_value: "embedding_296.tmp_0" - slot_value: "embedding_297.tmp_0" - slot_value: "embedding_298.tmp_0" - slot_value: "embedding_299.tmp_0" - slot_value: "embedding_300.tmp_0" - slot_value: "embedding_301.tmp_0" - slot_value: "embedding_302.tmp_0" - slot_value: "embedding_303.tmp_0" - slot_value: "embedding_304.tmp_0" - slot_value: "embedding_305.tmp_0" - slot_value: "embedding_306.tmp_0" - slot_value: "embedding_307.tmp_0" - slot_value: "embedding_308.tmp_0" - slot_value: "embedding_309.tmp_0" - slot_value: "embedding_310.tmp_0" - slot_value: "embedding_311.tmp_0" - slot_value: "embedding_312.tmp_0" - slot_value: "embedding_313.tmp_0" - slot_value: "embedding_314.tmp_0" - slot_value: "embedding_315.tmp_0" - slot_value: "embedding_316.tmp_0" - slot_value: "embedding_317.tmp_0" - slot_value: "embedding_318.tmp_0" - slot_value: "embedding_319.tmp_0" - slot_value: "embedding_320.tmp_0" - slot_value: "embedding_321.tmp_0" - slot_value: "embedding_322.tmp_0" - slot_value: "embedding_323.tmp_0" - slot_value: "embedding_324.tmp_0" - slot_value: "embedding_325.tmp_0" - slot_value: "embedding_326.tmp_0" - slot_value: "embedding_327.tmp_0" - slot_value: "embedding_328.tmp_0" - slot_value: "embedding_329.tmp_0" - slot_value: "embedding_330.tmp_0" - slot_value: "embedding_331.tmp_0" - slot_value: "embedding_332.tmp_0" - slot_value: "embedding_333.tmp_0" - slot_value: "embedding_334.tmp_0" - slot_value: "embedding_335.tmp_0" - slot_value: "embedding_336.tmp_0" - slot_value: "embedding_337.tmp_0" - slot_value: "embedding_338.tmp_0" - slot_value: "embedding_339.tmp_0" - slot_value: "embedding_340.tmp_0" - slot_value: "embedding_341.tmp_0" - slot_value: "embedding_342.tmp_0" - slot_value: "embedding_343.tmp_0" - slot_value: "embedding_344.tmp_0" - slot_value: "embedding_345.tmp_0" - slot_value: "embedding_346.tmp_0" - slot_value: "embedding_347.tmp_0" - slot_value: "embedding_348.tmp_0" - slot_value: "embedding_349.tmp_0" - slot_value: "embedding_350.tmp_0" - slot_value: "embedding_351.tmp_0" - slot_value: "embedding_352.tmp_0" - slot_value: "embedding_353.tmp_0" - slot_value: "embedding_354.tmp_0" - slot_value: "embedding_355.tmp_0" - slot_value: "embedding_356.tmp_0" - slot_value: "embedding_357.tmp_0" - slot_value: "embedding_358.tmp_0" - slot_value: "embedding_359.tmp_0" - slot_value: "embedding_360.tmp_0" - slot_value: "embedding_361.tmp_0" - slot_value: "embedding_362.tmp_0" - slot_value: "embedding_363.tmp_0" - slot_value: "embedding_364.tmp_0" - slot_value: "embedding_365.tmp_0" - slot_value: "embedding_366.tmp_0" - slot_value: "embedding_367.tmp_0" - slot_value: "embedding_368.tmp_0" - slot_value: "embedding_369.tmp_0" - slot_value: "embedding_370.tmp_0" - slot_value: "embedding_371.tmp_0" - slot_value: "embedding_372.tmp_0" - slot_value: "embedding_373.tmp_0" - slot_value: "embedding_374.tmp_0" - slot_value: "embedding_375.tmp_0" - slot_value: "embedding_376.tmp_0" - slot_value: "embedding_377.tmp_0" - slot_value: "embedding_378.tmp_0" - slot_value: "embedding_379.tmp_0" - slot_value: "embedding_380.tmp_0" - slot_value: "embedding_381.tmp_0" - slot_value: "embedding_382.tmp_0" - slot_value: "embedding_383.tmp_0" - slot_value: "embedding_384.tmp_0" - slot_value: "embedding_385.tmp_0" - slot_value: "embedding_386.tmp_0" - slot_value: "embedding_387.tmp_0" - slot_value: "embedding_388.tmp_0" - slot_value: "embedding_389.tmp_0" - slot_value: "embedding_390.tmp_0" - slot_value: "embedding_391.tmp_0" - slot_value: "embedding_392.tmp_0" - slot_value: "embedding_393.tmp_0" - slot_value: "embedding_394.tmp_0" - slot_value: "embedding_395.tmp_0" - slot_value: "embedding_396.tmp_0" - slot_value: "embedding_397.tmp_0" - slot_value: "embedding_398.tmp_0" - slot_value: "embedding_399.tmp_0" - slot_value: "embedding_400.tmp_0" - slot_value: "embedding_401.tmp_0" - slot_value: "embedding_402.tmp_0" - slot_value: "embedding_403.tmp_0" - slot_value: "embedding_404.tmp_0" - slot_value: "embedding_405.tmp_0" - slot_value: "embedding_406.tmp_0" - slot_value: "embedding_407.tmp_0" - slot_gradient: "embedding_0.tmp_0@GRAD" - slot_gradient: "embedding_1.tmp_0@GRAD" - slot_gradient: "embedding_2.tmp_0@GRAD" - slot_gradient: "embedding_3.tmp_0@GRAD" - slot_gradient: "embedding_4.tmp_0@GRAD" - slot_gradient: "embedding_5.tmp_0@GRAD" - slot_gradient: "embedding_6.tmp_0@GRAD" - slot_gradient: "embedding_7.tmp_0@GRAD" - slot_gradient: "embedding_8.tmp_0@GRAD" - slot_gradient: "embedding_9.tmp_0@GRAD" - slot_gradient: "embedding_10.tmp_0@GRAD" - slot_gradient: "embedding_11.tmp_0@GRAD" - slot_gradient: "embedding_12.tmp_0@GRAD" - slot_gradient: "embedding_13.tmp_0@GRAD" - slot_gradient: "embedding_14.tmp_0@GRAD" - slot_gradient: "embedding_15.tmp_0@GRAD" - slot_gradient: "embedding_16.tmp_0@GRAD" - slot_gradient: "embedding_17.tmp_0@GRAD" - slot_gradient: "embedding_18.tmp_0@GRAD" - slot_gradient: "embedding_19.tmp_0@GRAD" - slot_gradient: "embedding_20.tmp_0@GRAD" - slot_gradient: "embedding_21.tmp_0@GRAD" - slot_gradient: "embedding_22.tmp_0@GRAD" - slot_gradient: "embedding_23.tmp_0@GRAD" - slot_gradient: "embedding_24.tmp_0@GRAD" - slot_gradient: "embedding_25.tmp_0@GRAD" - slot_gradient: "embedding_26.tmp_0@GRAD" - slot_gradient: "embedding_27.tmp_0@GRAD" - slot_gradient: "embedding_28.tmp_0@GRAD" - slot_gradient: "embedding_29.tmp_0@GRAD" - slot_gradient: "embedding_30.tmp_0@GRAD" - slot_gradient: "embedding_31.tmp_0@GRAD" - slot_gradient: "embedding_32.tmp_0@GRAD" - slot_gradient: "embedding_33.tmp_0@GRAD" - slot_gradient: "embedding_34.tmp_0@GRAD" - slot_gradient: "embedding_35.tmp_0@GRAD" - slot_gradient: "embedding_36.tmp_0@GRAD" - slot_gradient: "embedding_37.tmp_0@GRAD" - slot_gradient: "embedding_38.tmp_0@GRAD" - slot_gradient: "embedding_39.tmp_0@GRAD" - slot_gradient: "embedding_40.tmp_0@GRAD" - slot_gradient: "embedding_41.tmp_0@GRAD" - slot_gradient: "embedding_42.tmp_0@GRAD" - slot_gradient: "embedding_43.tmp_0@GRAD" - slot_gradient: "embedding_44.tmp_0@GRAD" - slot_gradient: "embedding_45.tmp_0@GRAD" - slot_gradient: "embedding_46.tmp_0@GRAD" - slot_gradient: "embedding_47.tmp_0@GRAD" - slot_gradient: "embedding_48.tmp_0@GRAD" - slot_gradient: "embedding_49.tmp_0@GRAD" - slot_gradient: "embedding_50.tmp_0@GRAD" - slot_gradient: "embedding_51.tmp_0@GRAD" - slot_gradient: "embedding_52.tmp_0@GRAD" - slot_gradient: "embedding_53.tmp_0@GRAD" - slot_gradient: "embedding_54.tmp_0@GRAD" - slot_gradient: "embedding_55.tmp_0@GRAD" - slot_gradient: "embedding_56.tmp_0@GRAD" - slot_gradient: "embedding_57.tmp_0@GRAD" - slot_gradient: "embedding_58.tmp_0@GRAD" - slot_gradient: "embedding_59.tmp_0@GRAD" - slot_gradient: "embedding_60.tmp_0@GRAD" - slot_gradient: "embedding_61.tmp_0@GRAD" - slot_gradient: "embedding_62.tmp_0@GRAD" - slot_gradient: "embedding_63.tmp_0@GRAD" - slot_gradient: "embedding_64.tmp_0@GRAD" - slot_gradient: "embedding_65.tmp_0@GRAD" - slot_gradient: "embedding_66.tmp_0@GRAD" - slot_gradient: "embedding_67.tmp_0@GRAD" - slot_gradient: "embedding_68.tmp_0@GRAD" - slot_gradient: "embedding_69.tmp_0@GRAD" - slot_gradient: "embedding_70.tmp_0@GRAD" - slot_gradient: "embedding_71.tmp_0@GRAD" - slot_gradient: "embedding_72.tmp_0@GRAD" - slot_gradient: "embedding_73.tmp_0@GRAD" - slot_gradient: "embedding_74.tmp_0@GRAD" - slot_gradient: "embedding_75.tmp_0@GRAD" - slot_gradient: "embedding_76.tmp_0@GRAD" - slot_gradient: "embedding_77.tmp_0@GRAD" - slot_gradient: "embedding_78.tmp_0@GRAD" - slot_gradient: "embedding_79.tmp_0@GRAD" - slot_gradient: "embedding_80.tmp_0@GRAD" - slot_gradient: "embedding_81.tmp_0@GRAD" - slot_gradient: "embedding_82.tmp_0@GRAD" - slot_gradient: "embedding_83.tmp_0@GRAD" - slot_gradient: "embedding_84.tmp_0@GRAD" - slot_gradient: "embedding_85.tmp_0@GRAD" - slot_gradient: "embedding_86.tmp_0@GRAD" - slot_gradient: "embedding_87.tmp_0@GRAD" - slot_gradient: "embedding_88.tmp_0@GRAD" - slot_gradient: "embedding_89.tmp_0@GRAD" - slot_gradient: "embedding_90.tmp_0@GRAD" - slot_gradient: "embedding_91.tmp_0@GRAD" - slot_gradient: "embedding_92.tmp_0@GRAD" - slot_gradient: "embedding_93.tmp_0@GRAD" - slot_gradient: "embedding_94.tmp_0@GRAD" - slot_gradient: "embedding_95.tmp_0@GRAD" - slot_gradient: "embedding_96.tmp_0@GRAD" - slot_gradient: "embedding_97.tmp_0@GRAD" - slot_gradient: "embedding_98.tmp_0@GRAD" - slot_gradient: "embedding_99.tmp_0@GRAD" - slot_gradient: "embedding_100.tmp_0@GRAD" - slot_gradient: "embedding_101.tmp_0@GRAD" - slot_gradient: "embedding_102.tmp_0@GRAD" - slot_gradient: "embedding_103.tmp_0@GRAD" - slot_gradient: "embedding_104.tmp_0@GRAD" - slot_gradient: "embedding_105.tmp_0@GRAD" - slot_gradient: "embedding_106.tmp_0@GRAD" - slot_gradient: "embedding_107.tmp_0@GRAD" - slot_gradient: "embedding_108.tmp_0@GRAD" - slot_gradient: "embedding_109.tmp_0@GRAD" - slot_gradient: "embedding_110.tmp_0@GRAD" - slot_gradient: "embedding_111.tmp_0@GRAD" - slot_gradient: "embedding_112.tmp_0@GRAD" - slot_gradient: "embedding_113.tmp_0@GRAD" - slot_gradient: "embedding_114.tmp_0@GRAD" - slot_gradient: "embedding_115.tmp_0@GRAD" - slot_gradient: "embedding_116.tmp_0@GRAD" - slot_gradient: "embedding_117.tmp_0@GRAD" - slot_gradient: "embedding_118.tmp_0@GRAD" - slot_gradient: "embedding_119.tmp_0@GRAD" - slot_gradient: "embedding_120.tmp_0@GRAD" - slot_gradient: "embedding_121.tmp_0@GRAD" - slot_gradient: "embedding_122.tmp_0@GRAD" - slot_gradient: "embedding_123.tmp_0@GRAD" - slot_gradient: "embedding_124.tmp_0@GRAD" - slot_gradient: "embedding_125.tmp_0@GRAD" - slot_gradient: "embedding_126.tmp_0@GRAD" - slot_gradient: "embedding_127.tmp_0@GRAD" - slot_gradient: "embedding_128.tmp_0@GRAD" - slot_gradient: "embedding_129.tmp_0@GRAD" - slot_gradient: "embedding_130.tmp_0@GRAD" - slot_gradient: "embedding_131.tmp_0@GRAD" - slot_gradient: "embedding_132.tmp_0@GRAD" - slot_gradient: "embedding_133.tmp_0@GRAD" - slot_gradient: "embedding_134.tmp_0@GRAD" - slot_gradient: "embedding_135.tmp_0@GRAD" - slot_gradient: "embedding_136.tmp_0@GRAD" - slot_gradient: "embedding_137.tmp_0@GRAD" - slot_gradient: "embedding_138.tmp_0@GRAD" - slot_gradient: "embedding_139.tmp_0@GRAD" - slot_gradient: "embedding_140.tmp_0@GRAD" - slot_gradient: "embedding_141.tmp_0@GRAD" - slot_gradient: "embedding_142.tmp_0@GRAD" - slot_gradient: "embedding_143.tmp_0@GRAD" - slot_gradient: "embedding_144.tmp_0@GRAD" - slot_gradient: "embedding_145.tmp_0@GRAD" - slot_gradient: "embedding_146.tmp_0@GRAD" - slot_gradient: "embedding_147.tmp_0@GRAD" - slot_gradient: "embedding_148.tmp_0@GRAD" - slot_gradient: "embedding_149.tmp_0@GRAD" - slot_gradient: "embedding_150.tmp_0@GRAD" - slot_gradient: "embedding_151.tmp_0@GRAD" - slot_gradient: "embedding_152.tmp_0@GRAD" - slot_gradient: "embedding_153.tmp_0@GRAD" - slot_gradient: "embedding_154.tmp_0@GRAD" - slot_gradient: "embedding_155.tmp_0@GRAD" - slot_gradient: "embedding_156.tmp_0@GRAD" - slot_gradient: "embedding_157.tmp_0@GRAD" - slot_gradient: "embedding_158.tmp_0@GRAD" - slot_gradient: "embedding_159.tmp_0@GRAD" - slot_gradient: "embedding_160.tmp_0@GRAD" - slot_gradient: "embedding_161.tmp_0@GRAD" - slot_gradient: "embedding_162.tmp_0@GRAD" - slot_gradient: "embedding_163.tmp_0@GRAD" - slot_gradient: "embedding_164.tmp_0@GRAD" - slot_gradient: "embedding_165.tmp_0@GRAD" - slot_gradient: "embedding_166.tmp_0@GRAD" - slot_gradient: "embedding_167.tmp_0@GRAD" - slot_gradient: "embedding_168.tmp_0@GRAD" - slot_gradient: "embedding_169.tmp_0@GRAD" - slot_gradient: "embedding_170.tmp_0@GRAD" - slot_gradient: "embedding_171.tmp_0@GRAD" - slot_gradient: "embedding_172.tmp_0@GRAD" - slot_gradient: "embedding_173.tmp_0@GRAD" - slot_gradient: "embedding_174.tmp_0@GRAD" - slot_gradient: "embedding_175.tmp_0@GRAD" - slot_gradient: "embedding_176.tmp_0@GRAD" - slot_gradient: "embedding_177.tmp_0@GRAD" - slot_gradient: "embedding_178.tmp_0@GRAD" - slot_gradient: "embedding_179.tmp_0@GRAD" - slot_gradient: "embedding_180.tmp_0@GRAD" - slot_gradient: "embedding_181.tmp_0@GRAD" - slot_gradient: "embedding_182.tmp_0@GRAD" - slot_gradient: "embedding_183.tmp_0@GRAD" - slot_gradient: "embedding_184.tmp_0@GRAD" - slot_gradient: "embedding_185.tmp_0@GRAD" - slot_gradient: "embedding_186.tmp_0@GRAD" - slot_gradient: "embedding_187.tmp_0@GRAD" - slot_gradient: "embedding_188.tmp_0@GRAD" - slot_gradient: "embedding_189.tmp_0@GRAD" - slot_gradient: "embedding_190.tmp_0@GRAD" - slot_gradient: "embedding_191.tmp_0@GRAD" - slot_gradient: "embedding_192.tmp_0@GRAD" - slot_gradient: "embedding_193.tmp_0@GRAD" - slot_gradient: "embedding_194.tmp_0@GRAD" - slot_gradient: "embedding_195.tmp_0@GRAD" - slot_gradient: "embedding_196.tmp_0@GRAD" - slot_gradient: "embedding_197.tmp_0@GRAD" - slot_gradient: "embedding_198.tmp_0@GRAD" - slot_gradient: "embedding_199.tmp_0@GRAD" - slot_gradient: "embedding_200.tmp_0@GRAD" - slot_gradient: "embedding_201.tmp_0@GRAD" - slot_gradient: "embedding_202.tmp_0@GRAD" - slot_gradient: "embedding_203.tmp_0@GRAD" - slot_gradient: "embedding_204.tmp_0@GRAD" - slot_gradient: "embedding_205.tmp_0@GRAD" - slot_gradient: "embedding_206.tmp_0@GRAD" - slot_gradient: "embedding_207.tmp_0@GRAD" - slot_gradient: "embedding_208.tmp_0@GRAD" - slot_gradient: "embedding_209.tmp_0@GRAD" - slot_gradient: "embedding_210.tmp_0@GRAD" - slot_gradient: "embedding_211.tmp_0@GRAD" - slot_gradient: "embedding_212.tmp_0@GRAD" - slot_gradient: "embedding_213.tmp_0@GRAD" - slot_gradient: "embedding_214.tmp_0@GRAD" - slot_gradient: "embedding_215.tmp_0@GRAD" - slot_gradient: "embedding_216.tmp_0@GRAD" - slot_gradient: "embedding_217.tmp_0@GRAD" - slot_gradient: "embedding_218.tmp_0@GRAD" - slot_gradient: "embedding_219.tmp_0@GRAD" - slot_gradient: "embedding_220.tmp_0@GRAD" - slot_gradient: "embedding_221.tmp_0@GRAD" - slot_gradient: "embedding_222.tmp_0@GRAD" - slot_gradient: "embedding_223.tmp_0@GRAD" - slot_gradient: "embedding_224.tmp_0@GRAD" - slot_gradient: "embedding_225.tmp_0@GRAD" - slot_gradient: "embedding_226.tmp_0@GRAD" - slot_gradient: "embedding_227.tmp_0@GRAD" - slot_gradient: "embedding_228.tmp_0@GRAD" - slot_gradient: "embedding_229.tmp_0@GRAD" - slot_gradient: "embedding_230.tmp_0@GRAD" - slot_gradient: "embedding_231.tmp_0@GRAD" - slot_gradient: "embedding_232.tmp_0@GRAD" - slot_gradient: "embedding_233.tmp_0@GRAD" - slot_gradient: "embedding_234.tmp_0@GRAD" - slot_gradient: "embedding_235.tmp_0@GRAD" - slot_gradient: "embedding_236.tmp_0@GRAD" - slot_gradient: "embedding_237.tmp_0@GRAD" - slot_gradient: "embedding_238.tmp_0@GRAD" - slot_gradient: "embedding_239.tmp_0@GRAD" - slot_gradient: "embedding_240.tmp_0@GRAD" - slot_gradient: "embedding_241.tmp_0@GRAD" - slot_gradient: "embedding_242.tmp_0@GRAD" - slot_gradient: "embedding_243.tmp_0@GRAD" - slot_gradient: "embedding_244.tmp_0@GRAD" - slot_gradient: "embedding_245.tmp_0@GRAD" - slot_gradient: "embedding_246.tmp_0@GRAD" - slot_gradient: "embedding_247.tmp_0@GRAD" - slot_gradient: "embedding_248.tmp_0@GRAD" - slot_gradient: "embedding_249.tmp_0@GRAD" - slot_gradient: "embedding_250.tmp_0@GRAD" - slot_gradient: "embedding_251.tmp_0@GRAD" - slot_gradient: "embedding_252.tmp_0@GRAD" - slot_gradient: "embedding_253.tmp_0@GRAD" - slot_gradient: "embedding_254.tmp_0@GRAD" - slot_gradient: "embedding_255.tmp_0@GRAD" - slot_gradient: "embedding_256.tmp_0@GRAD" - slot_gradient: "embedding_257.tmp_0@GRAD" - slot_gradient: "embedding_258.tmp_0@GRAD" - slot_gradient: "embedding_259.tmp_0@GRAD" - slot_gradient: "embedding_260.tmp_0@GRAD" - slot_gradient: "embedding_261.tmp_0@GRAD" - slot_gradient: "embedding_262.tmp_0@GRAD" - slot_gradient: "embedding_263.tmp_0@GRAD" - slot_gradient: "embedding_264.tmp_0@GRAD" - slot_gradient: "embedding_265.tmp_0@GRAD" - slot_gradient: "embedding_266.tmp_0@GRAD" - slot_gradient: "embedding_267.tmp_0@GRAD" - slot_gradient: "embedding_268.tmp_0@GRAD" - slot_gradient: "embedding_269.tmp_0@GRAD" - slot_gradient: "embedding_270.tmp_0@GRAD" - slot_gradient: "embedding_271.tmp_0@GRAD" - slot_gradient: "embedding_272.tmp_0@GRAD" - slot_gradient: "embedding_273.tmp_0@GRAD" - slot_gradient: "embedding_274.tmp_0@GRAD" - slot_gradient: "embedding_275.tmp_0@GRAD" - slot_gradient: "embedding_276.tmp_0@GRAD" - slot_gradient: "embedding_277.tmp_0@GRAD" - slot_gradient: "embedding_278.tmp_0@GRAD" - slot_gradient: "embedding_279.tmp_0@GRAD" - slot_gradient: "embedding_280.tmp_0@GRAD" - slot_gradient: "embedding_281.tmp_0@GRAD" - slot_gradient: "embedding_282.tmp_0@GRAD" - slot_gradient: "embedding_283.tmp_0@GRAD" - slot_gradient: "embedding_284.tmp_0@GRAD" - slot_gradient: "embedding_285.tmp_0@GRAD" - slot_gradient: "embedding_286.tmp_0@GRAD" - slot_gradient: "embedding_287.tmp_0@GRAD" - slot_gradient: "embedding_288.tmp_0@GRAD" - slot_gradient: "embedding_289.tmp_0@GRAD" - slot_gradient: "embedding_290.tmp_0@GRAD" - slot_gradient: "embedding_291.tmp_0@GRAD" - slot_gradient: "embedding_292.tmp_0@GRAD" - slot_gradient: "embedding_293.tmp_0@GRAD" - slot_gradient: "embedding_294.tmp_0@GRAD" - slot_gradient: "embedding_295.tmp_0@GRAD" - slot_gradient: "embedding_296.tmp_0@GRAD" - slot_gradient: "embedding_297.tmp_0@GRAD" - slot_gradient: "embedding_298.tmp_0@GRAD" - slot_gradient: "embedding_299.tmp_0@GRAD" - slot_gradient: "embedding_300.tmp_0@GRAD" - slot_gradient: "embedding_301.tmp_0@GRAD" - slot_gradient: "embedding_302.tmp_0@GRAD" - slot_gradient: "embedding_303.tmp_0@GRAD" - slot_gradient: "embedding_304.tmp_0@GRAD" - slot_gradient: "embedding_305.tmp_0@GRAD" - slot_gradient: "embedding_306.tmp_0@GRAD" - slot_gradient: "embedding_307.tmp_0@GRAD" - slot_gradient: "embedding_308.tmp_0@GRAD" - slot_gradient: "embedding_309.tmp_0@GRAD" - slot_gradient: "embedding_310.tmp_0@GRAD" - slot_gradient: "embedding_311.tmp_0@GRAD" - slot_gradient: "embedding_312.tmp_0@GRAD" - slot_gradient: "embedding_313.tmp_0@GRAD" - slot_gradient: "embedding_314.tmp_0@GRAD" - slot_gradient: "embedding_315.tmp_0@GRAD" - slot_gradient: "embedding_316.tmp_0@GRAD" - slot_gradient: "embedding_317.tmp_0@GRAD" - slot_gradient: "embedding_318.tmp_0@GRAD" - slot_gradient: "embedding_319.tmp_0@GRAD" - slot_gradient: "embedding_320.tmp_0@GRAD" - slot_gradient: "embedding_321.tmp_0@GRAD" - slot_gradient: "embedding_322.tmp_0@GRAD" - slot_gradient: "embedding_323.tmp_0@GRAD" - slot_gradient: "embedding_324.tmp_0@GRAD" - slot_gradient: "embedding_325.tmp_0@GRAD" - slot_gradient: "embedding_326.tmp_0@GRAD" - slot_gradient: "embedding_327.tmp_0@GRAD" - slot_gradient: "embedding_328.tmp_0@GRAD" - slot_gradient: "embedding_329.tmp_0@GRAD" - slot_gradient: "embedding_330.tmp_0@GRAD" - slot_gradient: "embedding_331.tmp_0@GRAD" - slot_gradient: "embedding_332.tmp_0@GRAD" - slot_gradient: "embedding_333.tmp_0@GRAD" - slot_gradient: "embedding_334.tmp_0@GRAD" - slot_gradient: "embedding_335.tmp_0@GRAD" - slot_gradient: "embedding_336.tmp_0@GRAD" - slot_gradient: "embedding_337.tmp_0@GRAD" - slot_gradient: "embedding_338.tmp_0@GRAD" - slot_gradient: "embedding_339.tmp_0@GRAD" - slot_gradient: "embedding_340.tmp_0@GRAD" - slot_gradient: "embedding_341.tmp_0@GRAD" - slot_gradient: "embedding_342.tmp_0@GRAD" - slot_gradient: "embedding_343.tmp_0@GRAD" - slot_gradient: "embedding_344.tmp_0@GRAD" - slot_gradient: "embedding_345.tmp_0@GRAD" - slot_gradient: "embedding_346.tmp_0@GRAD" - slot_gradient: "embedding_347.tmp_0@GRAD" - slot_gradient: "embedding_348.tmp_0@GRAD" - slot_gradient: "embedding_349.tmp_0@GRAD" - slot_gradient: "embedding_350.tmp_0@GRAD" - slot_gradient: "embedding_351.tmp_0@GRAD" - slot_gradient: "embedding_352.tmp_0@GRAD" - slot_gradient: "embedding_353.tmp_0@GRAD" - slot_gradient: "embedding_354.tmp_0@GRAD" - slot_gradient: "embedding_355.tmp_0@GRAD" - slot_gradient: "embedding_356.tmp_0@GRAD" - slot_gradient: "embedding_357.tmp_0@GRAD" - slot_gradient: "embedding_358.tmp_0@GRAD" - slot_gradient: "embedding_359.tmp_0@GRAD" - slot_gradient: "embedding_360.tmp_0@GRAD" - slot_gradient: "embedding_361.tmp_0@GRAD" - slot_gradient: "embedding_362.tmp_0@GRAD" - slot_gradient: "embedding_363.tmp_0@GRAD" - slot_gradient: "embedding_364.tmp_0@GRAD" - slot_gradient: "embedding_365.tmp_0@GRAD" - slot_gradient: "embedding_366.tmp_0@GRAD" - slot_gradient: "embedding_367.tmp_0@GRAD" - slot_gradient: "embedding_368.tmp_0@GRAD" - slot_gradient: "embedding_369.tmp_0@GRAD" - slot_gradient: "embedding_370.tmp_0@GRAD" - slot_gradient: "embedding_371.tmp_0@GRAD" - slot_gradient: "embedding_372.tmp_0@GRAD" - slot_gradient: "embedding_373.tmp_0@GRAD" - slot_gradient: "embedding_374.tmp_0@GRAD" - slot_gradient: "embedding_375.tmp_0@GRAD" - slot_gradient: "embedding_376.tmp_0@GRAD" - slot_gradient: "embedding_377.tmp_0@GRAD" - slot_gradient: "embedding_378.tmp_0@GRAD" - slot_gradient: "embedding_379.tmp_0@GRAD" - slot_gradient: "embedding_380.tmp_0@GRAD" - slot_gradient: "embedding_381.tmp_0@GRAD" - slot_gradient: "embedding_382.tmp_0@GRAD" - slot_gradient: "embedding_383.tmp_0@GRAD" - slot_gradient: "embedding_384.tmp_0@GRAD" - slot_gradient: "embedding_385.tmp_0@GRAD" - slot_gradient: "embedding_386.tmp_0@GRAD" - slot_gradient: "embedding_387.tmp_0@GRAD" - slot_gradient: "embedding_388.tmp_0@GRAD" - slot_gradient: "embedding_389.tmp_0@GRAD" - slot_gradient: "embedding_390.tmp_0@GRAD" - slot_gradient: "embedding_391.tmp_0@GRAD" - slot_gradient: "embedding_392.tmp_0@GRAD" - slot_gradient: "embedding_393.tmp_0@GRAD" - slot_gradient: "embedding_394.tmp_0@GRAD" - slot_gradient: "embedding_395.tmp_0@GRAD" - slot_gradient: "embedding_396.tmp_0@GRAD" - slot_gradient: "embedding_397.tmp_0@GRAD" - slot_gradient: "embedding_398.tmp_0@GRAD" - slot_gradient: "embedding_399.tmp_0@GRAD" - slot_gradient: "embedding_400.tmp_0@GRAD" - slot_gradient: "embedding_401.tmp_0@GRAD" - slot_gradient: "embedding_402.tmp_0@GRAD" - slot_gradient: "embedding_403.tmp_0@GRAD" - slot_gradient: "embedding_404.tmp_0@GRAD" - slot_gradient: "embedding_405.tmp_0@GRAD" - slot_gradient: "embedding_406.tmp_0@GRAD" - slot_gradient: "embedding_407.tmp_0@GRAD" - } - skip_op: "lookup_table" - skip_op: "lookup_table_grad" -} -fs_client_param { - uri: "afs://xingtian.afs.baidu.com:9902" - user: "mlarch" - passwd: "Fv1M87" - hadoop_bin: "$HADOOP_HOME/bin/hadoop" -} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/model.layers b/feed/feed_deploy/news_jingpai/package/format_nets/model.layers deleted file mode 100644 index 72502c5b..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/model.layers +++ /dev/null @@ -1,22 +0,0 @@ -input : - - { name : embedding_input, class: EmbeddingLayer, backward: false, cvm: true, mf_dim: 10, slots: [ ]} - - { name : label_target, class: label, backward: false } - - { name : ins_sample_weight, class: tag, backward: false } - - { name : label_with_pred_target, class: label, backward: false } -summary : - - { name : base_summary } -param : - - { name : h1_param, class : param_layer, init_range : 1, coln:511, scale_by_rown : true} - - { name : h2_param, class : param_layer, init_range : 1, coln:255, scale_by_rown : true} - - { name : h3_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h4_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h5_param, class : param_layer, init_range : 1, coln:127, scale_by_rown : true} - - { name : h6_param, class : param_layer, init_range : 1, coln:1, scale_by_rown : true} -layer : - - { name : base_input_norm, class : normalization_layer, input : [embedding_input], summary : base_summary} - - { name : h1, class : neural_layer, input : [base_input_norm], param : h1_param, bias: true, act_func : relu} - - { name : h2, class : neural_layer, input : [h1], param : h2_param, bias : true, act_func : relu} - - { name : h3, class : neural_layer, input : [h2], param : h3_param, bias : true, act_func : relu} - - { name : h4, class : neural_layer, input : [h3], param : h4_param, bias : true, act_func : relu} - - { name : h5, class : neural_layer, input : [h4], param : h5_param, bias : true, act_func : relu} - - { name : ctr_output, class : neural_layer, input : [h5], param : h6_param, bias : true, act_func : sig_moid} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/model_new.py b/feed/feed_deploy/news_jingpai/package/format_nets/model_new.py deleted file mode 100644 index 172ed804..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/model_new.py +++ /dev/null @@ -1,188 +0,0 @@ - -import paddle.fluid as fluid -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet - -class Model(object): - def __init__(self, slot_file_name, all_slot_file, use_cvm, ins_tag, is_update_model): - self._slot_file_name = slot_file_name - self._use_cvm = use_cvm - self._dict_dim = 10 # it's fake - self._emb_dim = 9 + 2 - self._init_range = 0.2 - self._all_slot_file = all_slot_file - self._not_use_slots = [] - self._not_use_slotemb = [] - self._all_slots = [] - self._ins_tag_value = ins_tag - self._is_update_model = is_update_model - self._train_program = fluid.Program() - self._startup_program = fluid.Program() - self.save_vars = [] - with fluid.program_guard(self._train_program, self._startup_program): - with fluid.unique_name.guard(): - self.show = fluid.layers.data(name="show", shape=[-1, 1], dtype="int64", lod_level=0, append_batch_size=False) - self.label = fluid.layers.data(name="click", shape=[-1, 1], dtype="int64", lod_level=0, append_batch_size=False) - self.ins_weight = fluid.layers.data( - name="12345", - shape=[-1, 1], - dtype="float32", - lod_level=0, - append_batch_size=False, - stop_gradient=True) - self.ins_tag = fluid.layers.data( - name="23456", - shape=[-1, 1], - dtype="int64", - lod_level=0, - append_batch_size=False, - stop_gradient=True) - self.slots = [] - self.slots_name = [] - self.embs = [] - - - if self._ins_tag_value != 0: - self.x3_ts = fluid.layers.create_global_var(shape=[1,1], value=self._ins_tag_value, dtype='int64', persistable=True, force_cpu=True, name='X3') - self.x3_ts.stop_gradient=True - self.label_after_filter, self.filter_loss = fluid.layers.filter_by_instag(self.label, self.ins_tag, self.x3_ts, True) - self.label_after_filter.stop_gradient=True - self.show_after_filter, _ = fluid.layers.filter_by_instag(self.show, self.ins_tag, self.x3_ts, True) - self.show_after_filter.stop_gradient=True - self.ins_weight_after_filter, _ = fluid.layers.filter_by_instag(self.ins_weight, self.ins_tag, self.x3_ts, True) - self.ins_weight_after_filter.stop_gradient=True - - for line in open(self._slot_file_name, 'r'): - slot = line.strip() - self.slots_name.append(slot) - - self.all_slots_name = [] - for line in open(self._all_slot_file, 'r'): - self.all_slots_name.append(line.strip()) - for i in self.all_slots_name: - if i == self.ins_weight.name or i == self.ins_tag.name: - pass - elif i not in self.slots_name: - pass - else: - l = fluid.layers.data(name=i, shape=[1], dtype="int64", lod_level=1) - emb = fluid.layers.embedding(input=l, size=[self._dict_dim, self._emb_dim], is_sparse = True, is_distributed=True, param_attr=fluid.ParamAttr(name="embedding")) - self.slots.append(l) - self.embs.append(emb) - - if self._ins_tag_value != 0: - self.emb = self.slot_net(self.slots, self.label_after_filter) - else: - self.emb = self.slot_net(self.slots, self.label) - - self.similarity_norm = fluid.layers.sigmoid(fluid.layers.clip(self.emb, min=-15.0, max=15.0), name="similarity_norm") - - if self._ins_tag_value != 0: - self.cost = fluid.layers.log_loss(input=self.similarity_norm, label=fluid.layers.cast(x=self.label_after_filter, dtype='float32')) - else: - self.cost = fluid.layers.log_loss(input=self.similarity_norm, label=fluid.layers.cast(x=self.label, dtype='float32')) - - if self._ins_tag_value != 0: - self.cost = fluid.layers.elementwise_mul(self.cost, self.ins_weight_after_filter) - else: - self.cost = fluid.layers.elementwise_mul(self.cost, self.ins_weight) - - if self._ins_tag_value != 0: - self.cost = fluid.layers.elementwise_mul(self.cost, self.filter_loss) - - self.avg_cost = fluid.layers.mean(x=self.cost) - - binary_predict = fluid.layers.concat( - input=[fluid.layers.elementwise_sub(fluid.layers.ceil(self.similarity_norm), self.similarity_norm), self.similarity_norm], axis=1) - - if self._ins_tag_value != 0: - self.auc, batch_auc, [self.batch_stat_pos, self.batch_stat_neg, self.stat_pos, self.stat_neg] = \ - fluid.layers.auc(input=binary_predict, label=self.label_after_filter, curve='ROC', num_thresholds=4096) - self.sqrerr, self.abserr, self.prob, self.q, self.pos, self.total = \ - fluid.contrib.layers.ctr_metric_bundle(self.similarity_norm, fluid.layers.cast(x=self.label_after_filter, dtype='float32')) - - #self.precise_ins_num = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1]) - #batch_ins_num = fluid.layers.reduce_sum(self.filter_loss) - #self.precise_ins_num = fluid.layers.elementwise_add(batch_ins_num, self.precise_ins_num) - - else: - self.auc, batch_auc, [self.batch_stat_pos, self.batch_stat_neg, self.stat_pos, self.stat_neg] = \ - fluid.layers.auc(input=binary_predict, label=self.label, curve='ROC', num_thresholds=4096) - self.sqrerr, self.abserr, self.prob, self.q, self.pos, self.total = \ - fluid.contrib.layers.ctr_metric_bundle(self.similarity_norm, fluid.layers.cast(x=self.label, dtype='float32')) - - - - self.tmp_train_program = fluid.Program() - self.tmp_startup_program = fluid.Program() - with fluid.program_guard(self.tmp_train_program, self.tmp_startup_program): - with fluid.unique_name.guard(): - self._all_slots = [self.show, self.label] - self._merge_slots = [] - for i in self.all_slots_name: - if i == self.ins_weight.name: - self._all_slots.append(self.ins_weight) - elif i == self.ins_tag.name: - self._all_slots.append(self.ins_tag) - else: - l = fluid.layers.data(name=i, shape=[1], dtype="int64", lod_level=1) - self._all_slots.append(l) - self._merge_slots.append(l) - - - - - def slot_net(self, slots, label, lr_x=1.0): - input_data = [] - cvms = [] - - cast_label = fluid.layers.cast(label, dtype='float32') - cast_label.stop_gradient = True - ones = fluid.layers.fill_constant_batch_size_like(input=label, shape=[-1, 1], dtype="float32", value=1) - show_clk = fluid.layers.cast(fluid.layers.concat([ones, cast_label], axis=1), dtype='float32') - show_clk.stop_gradient = True - - for index in range(len(slots)): - input_data.append(slots[index]) - emb = self.embs[index] - bow = fluid.layers.sequence_pool(input=emb, pool_type='sum') - cvm = fluid.layers.continuous_value_model(bow, show_clk, self._use_cvm) - cvms.append(cvm) - - concat = None - if self._ins_tag_value != 0: - concat = fluid.layers.concat(cvms, axis=1) - concat, _ = fluid.layers.filter_by_instag(concat, self.ins_tag, self.x3_ts, False) - else: - concat = fluid.layers.concat(cvms, axis=1) - bn = concat - if self._use_cvm: - bn = fluid.layers.data_norm(input=concat, name="bn6048", epsilon=1e-4, - param_attr={ - "batch_size":1e4, - "batch_sum_default":0.0, - "batch_square":1e4}) - self.save_vars.append(bn) - fc_layers_input = [bn] - if self._is_update_model: - fc_layers_size = [511, 255, 127, 127, 127, 1] - else: - fc_layers_size = [511, 255, 255, 127, 127, 127, 127, 1] - fc_layers_act = ["relu"] * (len(fc_layers_size) - 1) + [None] - scales_tmp = [bn.shape[1]] + fc_layers_size - scales = [] - for i in range(len(scales_tmp)): - scales.append(self._init_range / (scales_tmp[i] ** 0.5)) - for i in range(len(fc_layers_size)): - fc = fluid.layers.fc( - input = fc_layers_input[-1], - size = fc_layers_size[i], - act = fc_layers_act[i], - param_attr = \ - fluid.ParamAttr(learning_rate=lr_x, \ - initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=1.0 * scales[i])), - bias_attr = \ - fluid.ParamAttr(learning_rate=lr_x, \ - initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=1.0 * scales[i]))) - fc_layers_input.append(fc) - self.save_vars.append(fc) - return fc_layers_input[-1] diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/model_new_jc.py b/feed/feed_deploy/news_jingpai/package/format_nets/model_new_jc.py deleted file mode 100644 index 31802b4a..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/model_new_jc.py +++ /dev/null @@ -1,166 +0,0 @@ - -import paddle.fluid as fluid -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet - -class ModelJoinCommon(object): - def __init__(self, slot_file_name, slot_common_file_name, all_slot_file, join_ins_tag): - self.slot_file_name = slot_file_name - self.slot_common_file_name = slot_common_file_name - self.dict_dim = 10 # it's fake - self.emb_dim = 9 + 2 - self.init_range = 0.2 - self.all_slot_file = all_slot_file - self.ins_tag_v = join_ins_tag - self._train_program = fluid.Program() - self._startup_program = fluid.Program() - with fluid.program_guard(self._train_program, self._startup_program): - with fluid.unique_name.guard(): - self.show = fluid.layers.data(name="show", shape=[-1, 1], dtype="int64", lod_level=0, append_batch_size=False) - self.label = fluid.layers.data(name="click", shape=[-1, 1], dtype="int64", lod_level=0, append_batch_size=False) - self.ins_weight = fluid.layers.data( - name="12345", - shape=[-1, 1], - dtype="float32", - lod_level=0, - append_batch_size=False, - stop_gradient=True) - self.ins_tag = fluid.layers.data( - name="23456", - shape=[-1, 1], - dtype="int64", - lod_level=0, - append_batch_size=False, - stop_gradient=True) - self.x3_ts = fluid.layers.create_global_var(shape=[1,1], value=self.ins_tag_v, dtype='int64', persistable=True, force_cpu=True, name='X3') - self.x3_ts.stop_gradient=True - self.label_after_filter, self.filter_loss = fluid.layers.filter_by_instag(self.label, self.ins_tag, self.x3_ts, True) - self.label_after_filter.stop_gradient=True - self.show_after_filter, _ = fluid.layers.filter_by_instag(self.show, self.ins_tag, self.x3_ts, True) - self.show_after_filter.stop_gradient=True - self.ins_weight_after_filter, _ = fluid.layers.filter_by_instag(self.ins_weight, self.ins_tag, self.x3_ts, True) - self.ins_weight_after_filter.stop_gradient=True - - self.slots_name = [] - for line in open(self.slot_file_name, 'r'): - slot = line.strip() - self.slots_name.append(slot) - - self.all_slots_name = [] - for line in open(self.all_slot_file, 'r'): - self.all_slots_name.append(line.strip()) - - self.slots = [] - self.embs = [] - for i in self.all_slots_name: - if i == self.ins_weight.name or i == self.ins_tag.name: - pass - elif i not in self.slots_name: - pass - else: - l = fluid.layers.data(name=i, shape=[1], dtype="int64", lod_level=1) - emb = fluid.layers.embedding(input=l, size=[self.dict_dim, self.emb_dim], is_sparse = True, is_distributed=True, param_attr=fluid.ParamAttr(name="embedding")) - self.slots.append(l) - self.embs.append(emb) - - self.common_slot_name = [] - for i in open(self.slot_common_file_name, 'r'): - self.common_slot_name.append(i.strip()) - - cvms = [] - cast_label = fluid.layers.cast(self.label, dtype='float32') - cast_label.stop_gradient = True - ones = fluid.layers.fill_constant_batch_size_like(input=self.label, shape=[-1, 1], dtype="float32", value=1) - show_clk = fluid.layers.cast(fluid.layers.concat([ones, cast_label], axis=1), dtype='float32') - show_clk.stop_gradient = True - for index in range(len(self.embs)): - emb = self.embs[index] - emb.stop_gradient=True - bow = fluid.layers.sequence_pool(input=emb, pool_type='sum') - bow.stop_gradient=True - cvm = fluid.layers.continuous_value_model(bow, show_clk, True) - cvm.stop_gradient=True - cvms.append(cvm) - concat_join = fluid.layers.concat(cvms, axis=1) - concat_join.stop_gradient=True - - cvms_common = [] - for index in range(len(self.common_slot_name)): - cvms_common.append(cvms[index]) - concat_common = fluid.layers.concat(cvms_common, axis=1) - concat_common.stop_gradient=True - - bn_common = fluid.layers.data_norm(input=concat_common, name="common", epsilon=1e-4, param_attr={"batch_size":1e4,"batch_sum_default":0.0,"batch_square":1e4}) - - concat_join, _ = fluid.layers.filter_by_instag(concat_join, self.ins_tag, self.x3_ts, False) - concat_join.stop_gradient=True - bn_join = fluid.layers.data_norm(input=concat_join, name="join", epsilon=1e-4, param_attr={"batch_size":1e4,"batch_sum_default":0.0,"batch_square":1e4}) - - join_fc = self.fcs(bn_join, "join") - join_similarity_norm = fluid.layers.sigmoid(fluid.layers.clip(join_fc, min=-15.0, max=15.0), name="join_similarity_norm") - join_cost = fluid.layers.log_loss(input=join_similarity_norm, label=fluid.layers.cast(x=self.label_after_filter, dtype='float32')) - join_cost = fluid.layers.elementwise_mul(join_cost, self.ins_weight_after_filter) - join_cost = fluid.layers.elementwise_mul(join_cost, self.filter_loss) - join_avg_cost = fluid.layers.mean(x=join_cost) - - common_fc = self.fcs(bn_common, "common") - common_similarity_norm = fluid.layers.sigmoid(fluid.layers.clip(common_fc, min=-15.0, max=15.0), name="common_similarity_norm") - common_cost = fluid.layers.log_loss(input=common_similarity_norm, label=fluid.layers.cast(x=self.label, dtype='float32')) - common_cost = fluid.layers.elementwise_mul(common_cost, self.ins_weight) - common_avg_cost = fluid.layers.mean(x=common_cost) - - self.joint_cost = join_avg_cost + common_avg_cost - - join_binary_predict = fluid.layers.concat( - input=[fluid.layers.elementwise_sub(fluid.layers.ceil(join_similarity_norm), join_similarity_norm), join_similarity_norm], axis=1) - self.join_auc, batch_auc, [self.join_batch_stat_pos, self.join_batch_stat_neg, self.join_stat_pos, self.join_stat_neg] = \ - fluid.layers.auc(input=join_binary_predict, label=self.label_after_filter, curve='ROC', num_thresholds=4096) - self.join_sqrerr, self.join_abserr, self.join_prob, self.join_q, self.join_pos, self.join_total = \ - fluid.contrib.layers.ctr_metric_bundle(join_similarity_norm, fluid.layers.cast(x=self.label_after_filter, dtype='float32')) - - common_binary_predict = fluid.layers.concat( - input=[fluid.layers.elementwise_sub(fluid.layers.ceil(common_similarity_norm), common_similarity_norm), common_similarity_norm], axis=1) - self.common_auc, batch_auc, [self.common_batch_stat_pos, self.common_batch_stat_neg, self.common_stat_pos, self.common_stat_neg] = \ - fluid.layers.auc(input=common_binary_predict, label=self.label, curve='ROC', num_thresholds=4096) - self.common_sqrerr, self.common_abserr, self.common_prob, self.common_q, self.common_pos, self.common_total = \ - fluid.contrib.layers.ctr_metric_bundle(common_similarity_norm, fluid.layers.cast(x=self.label, dtype='float32')) - - self.tmp_train_program = fluid.Program() - self.tmp_startup_program = fluid.Program() - with fluid.program_guard(self.tmp_train_program, self.tmp_startup_program): - with fluid.unique_name.guard(): - self._all_slots = [self.show, self.label] - self._merge_slots = [] - for i in self.all_slots_name: - if i == self.ins_weight.name: - self._all_slots.append(self.ins_weight) - elif i == self.ins_tag.name: - self._all_slots.append(self.ins_tag) - else: - l = fluid.layers.data(name=i, shape=[1], dtype="int64", lod_level=1) - self._all_slots.append(l) - self._merge_slots.append(l) - - - def fcs(self, bn, prefix): - fc_layers_input = [bn] - fc_layers_size = [511, 255, 255, 127, 127, 127, 127, 1] - fc_layers_act = ["relu"] * (len(fc_layers_size) - 1) + [None] - scales_tmp = [bn.shape[1]] + fc_layers_size - scales = [] - for i in range(len(scales_tmp)): - scales.append(self.init_range / (scales_tmp[i] ** 0.5)) - for i in range(len(fc_layers_size)): - name = prefix+"_"+str(i) - fc = fluid.layers.fc( - input = fc_layers_input[-1], - size = fc_layers_size[i], - act = fc_layers_act[i], - param_attr = \ - fluid.ParamAttr(learning_rate=1.0, \ - initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=1.0 * scales[i])), - bias_attr = \ - fluid.ParamAttr(learning_rate=1.0, \ - initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=1.0 * scales[i])), - name=name) - fc_layers_input.append(fc) - return fc_layers_input[-1] diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/my_data_generator_str.py b/feed/feed_deploy/news_jingpai/package/format_nets/my_data_generator_str.py deleted file mode 100644 index d4766464..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/my_data_generator_str.py +++ /dev/null @@ -1,89 +0,0 @@ -import sys -import os -import paddle -import re -import collections -import time -#import paddle.fluid.incubate.data_generator as dg -import data_generate_base as dg - -class MyDataset(dg.MultiSlotDataGenerator): - def load_resource(self, dictf): - self._all_slots_dict = collections.OrderedDict() - with open(dictf, 'r') as f: - slots = f.readlines() - for index, slot in enumerate(slots): - #self._all_slots_dict[slot.strip()] = [False, index + 3] #+3 # - self._all_slots_dict[slot.strip()] = [False, index + 2] - - def generate_sample(self, line): - def data_iter_str(): - s = line.split('\t')[0].split()#[1:] - lineid = s[0] - elements = s[1:] #line.split('\t')[0].split()[1:] - padding = "0" - # output = [("lineid", [lineid]), ("show", [elements[0]]), ("click", [elements[1]])] - output = [("show", [elements[0]]), ("click", [elements[1]])] - output.extend([(slot, []) for slot in self._all_slots_dict]) - for elem in elements[2:]: - if elem.startswith("*"): - feasign = elem[1:] - slot = "12345" - elif elem.startswith("$"): - feasign = elem[1:] - if feasign == "D": - feasign = "0" - slot = "23456" - else: - feasign, slot = elem.split(':') - #feasign, slot = elem.split(':') - if not self._all_slots_dict.has_key(slot): - continue - self._all_slots_dict[slot][0] = True - index = self._all_slots_dict[slot][1] - output[index][1].append(feasign) - for slot in self._all_slots_dict: - visit, index = self._all_slots_dict[slot] - if visit: - self._all_slots_dict[slot][0] = False - else: - output[index][1].append(padding) - #print output - yield output - - return data_iter_str - - def data_iter(): - elements = line.split('\t')[0].split()[1:] - padding = 0 - output = [("show", [int(elements[0])]), ("click", [int(elements[1])])] - #output += [(slot, []) for slot in self._all_slots_dict] - output.extend([(slot, []) for slot in self._all_slots_dict]) - for elem in elements[2:]: - feasign, slot = elem.split(':') - if slot == "12345": - feasign = float(feasign) - else: - feasign = int(feasign) - if not self._all_slots_dict.has_key(slot): - continue - self._all_slots_dict[slot][0] = True - index = self._all_slots_dict[slot][1] - output[index][1].append(feasign) - for slot in self._all_slots_dict: - visit, index = self._all_slots_dict[slot] - if visit: - self._all_slots_dict[slot][0] = False - else: - output[index][1].append(padding) - yield output - return data_iter - - -if __name__ == "__main__": - #start = time.clock() - d = MyDataset() - d.load_resource("all_slot.dict") - d.run_from_stdin() - #elapsed = (time.clock() - start) - #print("Time used:",elapsed) diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_join_common_startup_program.bin b/feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_join_common_startup_program.bin deleted file mode 100644 index edb43bda80ce2044da2dcd586e90c207e9fe268c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 120992 zcmeHQ+ix3JdKWFrmPfM9C{5#fv)Sncsbqs8#fxs+Fx!m*KLp4HMIHhK!NDQth#G4+ zGoFhjr6|0+=tF=m&@A#0w^$@wplDGnQ0xMITPzB| zWl5R8eDjiWaegkJyODlQdGtAJtE11UZDlxTJ;OPhqtB^kIH$Ie;d?TivzF|iR|osK zy1sOk`uU1-d44vZ%YR#?u1-6xmhH6a;Ats?Td9t2U?sx@sAf4Q!vv@#J1WiQ3>{v{ za;*%Lt(-jPB}KVs8x~tD?y=RSMH;YqLEUD@bb%4Ia=xyXoOgZs@L z<{2N0r3JF}vNA`uKAtB-e_hGMQQtR`Cfcs~DRmcHaGz1|4hdUyrd6<<-V zh;~Ehx?#0qqoU*&=#yRZt10Fm_Tfh2-J@Jxyr08!UhY|5OPv$l8I%5Q*S53~{F-tt z?DlTQ_I27Puf)2fEKN^zNy;QEZw$`GuXeh%Dvj@YAUVmxS@hLj>sFH&2?s@0uE$O6 zAA4zIw}I=4?mcRXrntB6Iw&V*#*^7-9du_#s)Z}ci~V!&e!yM9yuRL)v-EHZ9(AFv zA;~HF$t%$m{T1cH2i4y63%z)>OrzDaF)%%jm}1qW6Z{S3g?KNgwLtdu;bcx*sbA|2 z1u9jdgIa5xWAC);7)9qp{5URWRKg>WVuE~0eDdU*LtavJup!Sqf(?3gqkaCq@_ z<5FB?IeGJNc!jiyp7ITq#5ZW)U0nJ#W%k{`UrMEe?^l==yUJbmbi6GdGpipTWDB@Z*Y?w zksG&hb1O1Yb`WFIKF_BPYq?xEgyq-YcLPBteKCHMQaf##@Jd+P@gTwX_C&mnN*V+AFmMy=u~&iy2*BjfT7FyR7qmBd};1OKTr+GZ6LF;%Y(N zw%w*+O(%$kpA7!*gu%Mu?Us8>#|s*5!)Mz<=rwka?ll~4uziEurV*G1w_0r5u(-)g zn-(H19l3@|?E_{BpG>uNVbY&_>|Qf&ZDp-9Im5o|OisDpik-F=qqY_Y+OyKR|MQHt zVbbXMbS2wuQ^UgL+WQ#y{FVKBIDzO;Lzo3s_YWLVU*0xt?pG_z;bQ!(*K*(Gw6I-f zTj2ta8WXd(LLEoTy)$!Vqh!CzTf~k*@~B!@-wvDn7Tv_$kvFOX2?xn=}ocxrT22$HDyjtzkKf18T!!{lsRvQ)2A2qbKwg) zF7(m4p>R+J9ApHnm^X^e;4 z1v>ycK(I&#EDKf<<5lU;UWl z>;UWl?4VW!9Dp5w9e^Ex9US>^+&Kys)i%Hmzz)C;zz)C;zz%9vzya6+*a6r9*ulAB z2miWoWX+=L=**(OL(L+2?}(?8C+ex>N4=6WW2>7rtX9!lRQ-ns&C%6~{v~1BsZ=NG zRQHlqpZcm3b*d7P7ma4^P_bJDx;nKFA%dw%>IPx5vU}3SV{Cd7y89HP9u_EjwC18 zU?hK}&VY1z$5a_$eA_1#0jlj!zaTj<`H};}kfAb?1^^il;6JIZ5Rd`L05O`g-Viui zjAjTjRDldg`$XF3X{3FEGaxVMZ1RFmh76;1X}}r48NeC3LaHZMtpqXv8KwytMi-$5 zG5{HX3_ykqYR1@SkYdJ@wh4VA&QJkoKr{)_Bt(-29wNpL^f^!}7_!e$8C_lioB^Bx zoB^BxoI$A1S{=G!$be`PI0HBXI0HBXJdosd9(bO?E#`7dx7*rvWx;lsYnvj>7|PFSkFF|3)ck^>^ewhef z?g#e_e%~lL2mVRD!?w%YV$XJW`<~_4=x8{R*s3Eeo!O4OE8xEGdiC8s?zX&QOZY3A zJfaoUcWg`CqH0;6j^Dy`Y0K)yRwt+$c2Gw^*7K>uS}xZOVfpp<-9V5@-ygq8shzgj zaZjClDTjwWOEv0c&&3+CBHt;foju>b9KU%(>t0v;?)dtxynQe3zVh`L9Hc7*|?f^fK? zin@Q`&|$-4;Rm{t9w4$K7r>sei+@RwQFr8WxL=7gR0!LnD(v>zO>_DL2q1lZIG(ULsRC-d@vo zME=6Xwzi^xjNd_IXZIl#8Aax;%AJne}`(+gcakf#jf2~!(h zr&@u5Dc+FNY9Mxa%7krP$aD1&c^>ZPv_U4f4kp)jjFS1wJdXk%Ejr#5c`ifKm>lK$gUOv*c4gY0Tn$VPXgc+13ML082PUV=G92BfB4jm=zTzCGoYy~+ z$yLGRfIU->Jz#QRa$s^`a$|i??pxKjhdn3vQ2WKeb8=NMIiTs(qbZmim>ihg88W#v zvGg>D0q1 zxE#0~xZH$sx$AN0*(dZFBy(kuIiSzfqYua&$Q;NV$lO@T+{K^g2lnMgk-0L+9N={7 z;S^*JWDaER49Q#?^u)>BY&w}+1(^f-Og;L5%z?~-%z?~}mCP+Z`rF@3iV?)g+-%Cu z+$zW%;B@NY6l4x$4rJ~O$y^%r#K_#^`TZxe+FxH=dM;9yQ>5ZTR$aO8L08U%5>Bhx ztZi1g=58xe9_*8qlfHI9R4zP9l5*iM2+D;z z;m1%1KjjC{W5p|T+P^O*K{T5P!A~J42M}eG3Zfuz;~;PVoY0ZKMUZyKZntgg7%sh_ zR#r(L@beyIkI+>uZ$)~t=Rrp<$=kB3$bq*-dYn)&1#b&Aviilc4+Tx32j26#r$&^@ zL$rJVqkz#d3xPiz;B8sO)3w|H^Moc5my=9X3T4a^NFI=Ltc<_6{l<~AwJEml*Kl)FjoElJ(7YQ2HF z0dyu8bU@ue-9X(y-Nr-R(v)_FQ@5-VZ=h~K(aA+oP&ZIFP`9(7ZfS@Ur*2Z)ElJ(7 zD!YNY0dyu8bU@ue-9X(y-Nr-R{_#)mjF5Co?z&|abOUt*icT(yg1Ujafx4XqbxT8> z7PV-@Hl>=b5Z&Tb|D?U%Rd>bRPI- z54fA3)BaCU7U>98o-yf+=5BuO%`X#0!1sfD2ET8VoCANyKAET5c6nRu+3s$vyiIv) zqbdvBFvk^+uykfSy;AI6eRq$$Ew9)T{))zZ-}R_0&O5dxZjm^|r{lLUT^gXgxfPix zJBTr9pXXDDwOp3oR^GlHj%ShiHj4&%WzX<;Sj_mSQ_1W<7=N!yB|5KU zS#fhWsvELbb?$R!*=}2Qr|us(qHb7D;1}iJWYHHFRP`=B@2YC2Rv~|Dy)y^H9_q}& zhQ-45PNfFr%~LUIRk3IPinO{;>%Pf-d1^roODNSNow0%{9M3Rqt6mnbk$#FCAQlcw z{dcsE4>3L#+SkjXR_GoG+DsSQJTQIj>#N1p-f`({F5jJcbV1IG|x>W@Z>P{Ndu#uSU>tJq`^tx}Xif2G9ns z>!v70(aCh$P$`2pfHqLA-icJ9{AFmvpDixE9AU$YQEmI+!34h2O zTKbbS2rWfwUVSR{6k3W_e8%8XbaFSm6uqKYEdosGRo$l^DIU-F2bg}axb$L#6#0In z$Pi)zL=4;e21J|_h{zmb0z_aBg0^5~Xla52O-YFO<&tf3}g1TbO}F(R=~oy3UmTwTgX7;#0k8$#C&t9A2QU>UUY-DWPgbh|y6zVw_j zuN%BY$xLRr7e4c?Z;+CL@=p8tlXC>K2;bg6dHw@sc< z!U#WQSkb5cDt(XsM+k2X32&`!bhB2%)G}mF4eTX6<;UbOK*E(on1#O7{v_T@#!}EnB z5B$;M(v=8=E__fOxe5OJK!70#kUe_!#rSG*;>LROeHJioPqC}S{>8WIyO}bTe&pG!~a61VF2%r&~0^*o54CCU} zd$+4!_xs}eV0X(f6eAOlEh-u*h=ITejEsT|G6)dRN#>ab2#^ds$}p1*0+`w7W1oBW zUF)o~Z@FueSN>_DX1#OI*`L4tKYOqBKcm0+zpfs>@%ZQd|L#3`IyzXrTwkoOk8al| ztDhWQuO`#+!RX;vo_u-q=EDb%?|t(P?V8Cl;+n>L2XDI{Jgn>DuDYjQH5XU?u5z#G!B^eA-Wq-7 z;r++=9{ezh9w<@-`qzGtJaug})!>x`B@?AS5FGN!--IixDS7l3a)ptg3A0OQe z^hVvfN)Kmq*~8jpGkMuJMi1)#4~E;5tu8;mJ6q*emoG=(I=(zVzr0x8oSvVa9bKQ^ ze!jZ6ygs)^S6!d7>Sw1{t8DnvYSdOwnO7H^qwrhqzIBo2&tA9z-etqRC%@VG>!QOW6QGZ$u_|2ncTaWrv$sp9n6b`{HV zZAv{+e!VG(Nr>%I7rT_YFrQoB%51IoON>>$lvmHq55DP0_ba2vA(b7NOU`azotalF z6R4QT3ABCFv3b+Y)%9?3sC$)1_ z)m-bLdOfYfOM7S=URqNt>aI0c%u9WPXn^vCi@=j_kcrAcf1{c=W_iL(d&W!MAivRc zLQy@_3otix?>#Qz``xBHW4IMw+c#(`sAXWQ5)hhKQ3 zxB|wo&EGeMY`%ah*TNh16|kn-GpV+-LSF#>+3!4lI;*XIWtu>P~(dGa?#-zew# zvlmCV#~-b3etdUyT}tlr;40USzcqTNd!4-CaP+nA1*d$Tb20PLsyC49z71P460FG@^tj| z@RG11)iu>BSiQ}7kgGDlts$)psIK;sQ@y(EFee(6m%s5`m%@)+5C87y*KXE7zFS`$ zuUA)>muIWV*lX2?ZvA|7^reRn++7_=D!pIhvIivfEY17wIM!c5AN3S^)?aAl5%ub@ z<0Gd1k5Jmh3*2tIC?)YtK)0+)tNF>bQ>*pNgr1Uk)^|%H>sKt7JM@LS zx_O1^-c{7kU$)Z>DVB@=l4u3r1vJG{1C3Jf`m!AhZqP$3xIqG?M7vM%VyR7W<<_q- zcZ;Plca=x@_tYH*da>MPBD<@VA*nU{E)%&}MonaY*^cAIn8-@O7sk90crO-hWnjLix=gm1P6X zE>uQ+6DZ#X!SWV)_OLLh~daWIerAdz5 zi6$C1C;hd2?YU!ptfdvf7_v%x4=!QaBX&}nlelSIXv>jovqD=dlrk0u_hmB_gS6fp zzpLkHS_yWVQ_Hw_wxXVTyN$}iptRD$ktKPRnd(9|dMT!qM4PEvOZUu?9yYq@w_OSm z2le7rCQ@NcBxR7k%DUmg_;#plu1!K&lIAjF6w1!bwTY`-rYSI`1cMV+87KKpGcwm! zIa%OlH4l$f84TA7WB zEE{_~xRz>*V}>eiqRlp~euf+`Bdyk_ zXRmg;IgZl}&1E|_g)!a7qj%TmFV-g~rx!0*d3$83PMk+o(vj?+<`l;$RPdBdK#TE| z-yXd=A9s$`T@nV3CAC`4CD3wdLYT#EQqXt{Q@SKkQ zMV|XtBI6fz-8Py`DY!C-7sk{GN}aXa(ncBNcxd)9jPdZ-N52}zE-p0vQq``b4VBux zKl=Jsa^B2PaM|`_n-@2uq(OTWi&}ba8axaUtcImhq@7lPX6MW(lW6g*iBU3s^K|qe zD<+c-zLdtS+PkwN>zB+jyGyoOEMgSPA(~rT!r=qv{>0K4qVfU7+!!WxTXhDK(h%~N z`E=8`Tf^dwCoVB?gW0yXj3PH>*evWy8MYe2uad)e-7L~Wcg?ta@+^YU7fv1=mtPUR#vo_Z2^JWYxr4~zJ5hc&_c49^jpmD@-*s5T})Gy~p zHa07`b<>D({OhAfM|a0fn(ii@`^(kxD%SAfT>&xsvfQGHzFRqFoj? zS__Nl$={yC#{MF2hTVpSTXV3sIVeSh%Z>RmQV2u{$g2)=is{e zmZ6v%^SXlkbR&wDT1io7okEAb*K}}9%Wep$lp(ll)<$aizSwHovcF_IXocR zcBK$v`sIE?_i{)OPL042leCx&-Z^NIqCJy0TBNDsrZ1E#26xS~ow?bbt{deL%{aVU zT^HRMF!nrM97-`{rwkumY!xH|4P~2UV{>f@ zZ`y827_|!)#3p6OoQ+kt6rG4+r9G+x$0}LXR6Dg%k5D^)t7!1WVZITIVo1tW);Txy zw8I-RFfkiGyKfr8glK(D=RjR)EX8CZ_tq|zDU{3Ty|?v3misf`o*K0ZtC;ODf7*q^ zxNA7VVm5ihUt|$`EOo1O_CSNZmu5XR27cieqT&`XVTZact>~>_UJjLD?E721C zqG;DTqd}S}ZML$Qq*?pw*;_r?g4w*c=-UNvLl_1MJ|6FKpwotE7~GMBxr2m7k|n{Ey4azuqQEMk^7CEa0lTkKklCQiL~ zIEk5%$DHYi*R|$_V&K4KJf3U{qV!78fJKB57DF(f`jg1WAsDkJxWSUS|FgzZI9S^F z7Gtmkzu2UD8@~{ZzUxan|I5h*;+KaSLR3^kG>HGHF#0hi{=Wn zWAkOD9;V~XgO#Gn4zlD_eaGqMHAT6j7(jj-G#nZAt(+CDpApKff;p5;BVJ;fyP8!N z3t`cu;~TRgD9J{J`?7D15-AkgFel>|a`d|s^tO4n9#M~e_qu8xsD#CCLYy{@vvtq1 z*m5oyMQ~$qI^S_uIcv8gdx!Q`sUy%?c6(9GOwfyQnJsI99F9vdI`ZMLO8J|%zhf)u zrI;~3%JpH{$z>j#Ffm&%LMXLLp||8pZM}Z){f*etSKS-x8g3Q{rH17tbGxIrhL5=; zS)ciB3|;drLs#d6SvlB>Iu~Pq-`+acwb{%zr8d>LopiQ+lxxE3v}xzYK-v0*@0{3Q z6fvMJD>tBhw~7%ljU@q#jvm^gRq0ddPEYKBva*RWYYp z1z?VqMJI?wDW|K7i$&egSh=JgPMt6m<(i;ZiZ+-bc5q~EfTr;aDJqtm1`{o+bii^W z3&iDBm)h^f0`-1#GDLmwKG8Z$81)L;E|1wRs0wSV&)}vmws6;1yO&qp+I zt)e>V<3r;pkJ%#)Dw@ZfxL2)b9~zdIQ9BahjO3V%^VZ2i{N+i+Q_p5%C^*n%4F zhHhmvN15o)R?hS`S2=d`*=9XZido~TGeOmBWYqAsFaK5>MoiJVFFWUO%9AygY*um6 zit_Sq0*bi2?8Wh92lB+kz{!4JZ1y`u5yEC`gN6)rlzlqyMD{7ICTCWg)6zRsS=vAbSYDeJ7M%1vdJGJoUVDwVf&+pO&_fO6ZqF*p6wdrABtDca1? zflgd@b|01JHq*mo%vMuJ?C}J@aEf?%=?Q<)Cdn9;3w}|wI{?NnMDOP9>$lmv^UfZR z(u4{QILeZXP8YLHvTukbX_OOv<3uc{Io}m_um_wo*d7~S{a#TZWvFSU758-%? z;dc0onDl_E7@cDQr5JKZc=gmfB*ahJV8q^^NX8ED!pWv?j)KFc7MEpe3Pyl3FZGQ9 z7dCdx-bwh2n4l`rkS#HL8*=L*5log-qUeZnF(t%)uj<)TN*5P}V5ysMdGeewTD%;r z4b?=yP+$u;!Lbhm>;TIoK_M$Ph zHTD9_!M(6^Wkg&10KIS~2JxRdld`c@2di&ge4bZ1C zsr;yIQ_JBo-i05nN4QZL^cz4Z!uF=mU@)y^HDndJ!X88ZpDx-eo{ zmO{$CL$}1DxD(E$sA&;MrNbxx34=i)P-0Q&uv@ zYu4WJ5tgM<^CnS)-j5mD)qJtvYTYH%`8b}q#Vf^B`cv7k(Zc$e!nB^M;SY9}ZLNwm z`nznhZp@;hO63f3=TZGLq?Ty*p?Ce%%`{V?Rlls>b|$FJp|n{#~= z&F5YUg>CN_wV{Tw_k7BRna?);p)^eJ*f!$Mc&u!tCJfp@N{p{gNa5I!iLrfvTQ zNd|57ztIBTg3p|D^$UDhsWy7lxVd=eX^!E-)^+uA2?ge9Pw?nGCgE51Y!l+AJ$u7h zWl#mrE@BQ6#f~7#i03y(`NP9 z8qj6oezW;B`gkiE)_rXH=l81*V!Ox~+x4Ry15JyB_SLPIB|%|bbkQ&<6t@}IRbO=m36@+sAB^6r*o`czSlbzFxife06$pb9?l%@oUc8Z`=n(`;tRxFw(O2h|`!B zVmudZgl}(M4qCqkqv)0gkaBTu?K*mUrXH!z&(IG%rE7x9#>8RPBsRA=gL}qAlLbGB+58B85EB|A zCr#%hR?GrdM;C`}t~S*rwm=RF8FMgO-qWoIR(Dmc?~LAz9<4!a7fn(j8%WA<=Pl+_ z2fnekN>S=8QI4XC84VX|ahcmtsTP)j%RITl z%YxaTxBj(8G3!3H^`ILX?qoWCV#YjIEC+26+W>Uzv!uE@w&zCJNNqHt(MZ8yd9|RV zxiMse0y+EFVPUcPf?gfXyV(~HMq(Q_G0W_kUGibvIrkgFd5oYH>xvIqHD3B8Ogph! zLDfow6|qUFek4cCu%dY{3#U%i1oknlES;6>5Dc0a7d!WZEuWW3pIYqw&evjtQqf$& zFe}p20fyy|l+u4Lkd5L@7ZW*uKMRdJ-94H{uz8LgEMvmHcGokl4HFRt+ ziK4U{%DuOh3twh^BrfP)_m!Dpiinw!H}Qhu%m@Z@^EJ)hVOB1EvJHn*nuF43v3rek zJM3Q;q}7bQtuBAl%EsQ`1cmoG&Sn#d;n1e?#6862#}D_fs7l_RCiFg~9LsJ`c(b4= zlP?$$ZPywc3A9s0!K*^J&uv-+C?Vk@nXOACuRCun0*~_QPGY0DI_UC^+ltW$^8VI` znkoj`_ES|0*s)tdPsPN$X`9IY_`#2va75Ja>23?nd`}lc(YqTNVjvhb5uuB+fvx?B zpaZiQlhppU$=VI}F9+Z9Q(4%?ZTj{M$Cmn=HpVIbH_o}YAw=(Qg3%FU;axAx=ALbN z$-+)F_B&E_h~(y_G-!uf;Z%3)IQ@r1uli_C*pD%rDnYelEK-%h>mkaOa1nI1HPnJ#IA$ zEt)2u`=azVjqERX=-4htqQ9irYR&*du~m)u&Y7$-;sY+4+q!kwtkB9$jAi&>S-wX1 zgqPJzv%QrhTc(>^JKQqvSfUyS;gkFE8+*Q4v>#E}X(+Vc59u8W0a4n^jfaRXDeB=C z*0>xC&djkLQ_5Hfo*c7t5X=pK#AI@*EXCLk+xji{dEm?Q^UI4>QHA9AxC7VK_a6VL zi#rI4*+!)r4J1cnvp*EJP;lRvHC*~fZcewvzzo|eChIRKPmD{^)*~X$tv25>G53k; zZ<&O*kFniRL{!fTT`od`o9+#DNOg$c)2SEKw-pPtr=1$l+%?A<8sUSwB z+ZI(wrSPxw*%L9$$_ci#-4`nA7Bh3#L_XEM?zQYtpE~Xc45fj}f(;}?1OvX^(l92r z9AEQ|o>U8&*#r!FF^}2Fud3Ba+0bk5K73`03|Wzm?S-g_*Suagsx4G+*NUxy;rJTa z^;ySGO%$eTfKD)t(f~of#;Dhz8(e(Xu#jt}s5z$V5x5y3N=fU=n0Kk4WY4gm!ZDjG z?zv8UVahl#-N%op)ZHl}iK>{0nqgT5(yS zIvRx2u$>6%N>#sDR_$rWWd$^fIg?Pjv~!v(25eAh=X*T7?1(Af26FiU9zMkJOfL17 z0kEt=*FihHxs%ZH^1L%d#pP9-+dE>_CH8TB&}@0z0z-Y>QL)N!4Ej3yo_55P9J@_O zQ#iz3m0f1qM!PGe6L<0fz2TjLO0*8JM#BV6=|1B=JOXQa*rCUbWZArWDK+{X{i7UV zeagWw;F%!SmCfgPdZ$D^#>}k=+2GCe6a{n%=;l#R-5B;#Ai&*3mWB@tdN;r%@>lOdhQ!S!`&DBB9Doh4t|lfTPz02Dv0jz zS{M^EUH0|T7CK=;+m~ItpzhUcv)8V$vEq~c#uq*}Ce$L8(r3|`bjs+LEz>#E6vJAqLw>FpD{;_SZFH^CnBvW-eP8QU z2o+{Keg;ZKgJQ&dK1B9}Iu_++Gapd!(DC)n(U%@RaPMy7GG{hiEw^Eh)sw^yM3=Tk>TN=i z$M(L77=^7o>TTqvCYwc0scl)4vHc41RL0-Bb8f$J;}&Z-oB~7@&4@GK2i$|*zJ%GQ za*%Wmu7Xx9&)n*oY?@iAtQaKT>nt4fL1nTS7y0_xWC#Z=Ook zE3NF-!fTJ-rj$yw=n!_edbzrqi<0E+;?`S|sw?bGhOkLuQn$ljv>ENj3R@RrBVU!N z+px^s_z51Iw~LVRU?E$YSkQ1xhxd)yRS15O#l(h}gL7=h2g*q?bdEjn!889a*%?vFOpFpAnYznRK5%YNSkZy1!%NT|&Z}j3UqNY_P9Sej)0m4{P=+ja8#uRz zM0nGf<5{I*7JI@6v{?eilnQ>~KEF2H*5%Z8--6qKIcQ$Zx~)x-jxI;cR+#|kW_PF; zph0cjSG0y}yU~UI4N0v;tqYajnrtu`w3C-WF21?l)KEAoW~dL54Ut#l)_!F#uNjYb zm2e$^i1LdbDB8^R-X0KArnNKe{fT&t53Dra8iS6`V=~SwT-qm6vU~GR7O`laa*deU zug~Q}2UN8{ruxmD;Pte%OIL?#Erfywa%q*rGeHzYRc*n2CvEV)QIe_+I{PWgEC_Cz zMY>)&V>I*aqUvyaI)u-Sv8ni0yQBH3+tI9dNWbiMNdL5o9~HCN#~FsRIe4tMfEs!` zb6$Xh@4%VuY9@=Q5@Gq2Y1-LB5Ysel z?HGrL;1_PmAD%m+JvQIl>TQqUDK2k)_$jUh-6!`3N5R<6Hmv%=wkuIuc)poyb_?f? zrI|UN!@9?uMk__?%Q}{6Co%Yw4{)28?HmwwmUBt{7JI<$c8(ad3|l59Yf+3n@62p0 zpR)9gRtjgDn2$|jW_VRlcQOSxh_R1+9t)9w@*n;|iBrKn3AomHBX0r88&Y<67 zbYxW}TVB+cY{rzb5SEuLc3HDxvN-DJZZ>Rso||_JP?SsL>uzV8QcREXnOP^AQn_I; zY_fKn#uzqoisqfqK8iAg4~W5%E49;}W)I21)Ee9L4yLLD?4!$C;7`p?5|7S?eFB0u z$YUD0evRm4AS+$hypEVFk$4dpes?I_X>9uw2yo@iSxq?Q#lu@Ckls!TX8h7jTB#Lw z(+H+cYN9Ls7Np;#ucIrK;cO)ti0w{soRxNCsiOJHcxuoK+;ORG!lFZT%YwD0%5eN)}F^Rl2kSX3cb53l+U_xNQrpMmSip+3KjQgVCXW^aX+YQGMn-n4$JsoFL(i@0^uhz=9H zZ%zzD;wj_C+TwF1b*|rL+=`siO)^FJ;QCF@HVX;dOd{S61*6*dhh9c)wX1)18S1P~ zD2_6of}0jG-dajGbYfEN>AU&bpHXVjG{ZY6q9fhjlCrdxL&ABax4X7Vd&oOA2bAHE zkn4X-gx>xWzwthWJqttW9bUiP*q`1DyRZ!nGg?@7q1%>iTSatXIp+uL3o0zw?kd$? z?D;4-&>XpiZ7;egSHn?)X2}*2bMn_mZ>`T?tWQo(FJ5-e?naLvxqJk%AK`f(-``oA zs@40OpwN@ZSVosKXl@KvEpG?wSN+$qfmLQkbr*ZzPstN;pY7bv6LB|knMH%IKABw{ zF$yx(4_98+*gA(}s;ZoAXfE%hu2Mk{`j0qm*2}HQRZ6C2j+1am0s+i+lrXHm96?mC_VqpyL{`Lcg|qz=l2|C zmG;q0)Wl^z_ABfkX_M7E5u0gP*lsyOnzU}xy)#6vY4ubLOB%G~W}Vn=4ezYKVw#kI z8wmaqE1W-;=&f5&>Pd|4p=b?tw?G`*YGXil?q&jr>er05t>H!eeLiw;{UtG5Q?K6N zh^Kfm_x_@sy3X97o&L(&Earo^O$|qxY%)Xj$$P3_eh~HX1n#Ym=w1IVYbHDu&0%I( zBiRGPHrEoa&d{iBT`_dz<%JdOBHOqpQnn03kr!{eSrkNcNTG%TiYCe0Ab{R35x35I zzCO8ieCCPYdt=ep+H098ao&dD$KSX%>>C&zPDwQSp!deIOE#NXqD$o1$U5H{6PL?F z($0!3t}dD70(Lh;Q`i46eCB4{*G?zwr_6xEXCYvNGCddWv?yip!)R9<;ocH!j8+p^om-SlG=1nT&; zunJ-fW^LYQ?}QWht~Wix8;iD4Qk9nfS`IQQCep&BPc}K27c7f4xO?`)TS@th*4Q7J15ZMfa$mK2=I* z2uX3zChQ|@6*=e!1)QsoOG`A{Q9fH^qv$yQ6_>DxK@3hz-j1kK8!f)W7;0YQ!=lJL z555q~N~-={;nLnl8yiczXyayUE-bNle9>s6@G^NbZM~2Kuwyo?OpmSa{^lx8WSb7^ zF+qvE)k3{JEPj)<8HK&S5sjCt^HMgR2G?!L7 zRd&JPJCP`Ap+;}WrqSN)w+6pAS~mv2c)T~&30q25%x}o{_PK1iP7b9Q<3Y)bnB0qi zW-&XevJQiUmUZZCbBQ|0K8n-ORtA+QbZdUx+wh`Gi^hj3>{2<8hmXlc4b>zzQGhJd zRxMezL7Aw$0$DyNzcTvL9~bRuW2qy`eQ?>So%{gNsj`%VRP7H_GQI+(l*)MNl~>OV z#a|LLw__7UpSeK8>Mc4PH1fF}>wVF7;iPU)@pr;e6EPSTqpVP2d&kM4s!405KE_M3 z91DX4k;RB(g5^Z;+tP)|?PNW2mR4UGJ#Y!JKYOT+@r}+D{Ui$6WJHFIE`c%ZyA+Oq z=Iv&q;I~Rrp4f`As2@8PWpRe|Lz_PGH*GveFH4p2Vazj8c6DD}-m?>xxz!?DXRuk! zx5(sFZUkD`owBgngcn18SnO@}x$hTa)+;SyypM_cxx@ekRo@!DeZD@rX#Ed;0#NPJ zCF|v5zIw-k7(Ye(4%Pb`*~nT^Wnl)nQX_pcrxocc%Ikw_hhm}%JC$A5Nn73J)7#kZ z-qupafic@etFtsf{T?J-ky~c4gN2l%)bXQA9O6 zNL;0!jmsADqN2BjWUbnyoZjEaZnc8rWYObZFgniD0!K;-c6l9Xy>ol3C|5KWjnHnW zi}D1km2)O*B(@>tWV^SihP{OfpVEFF(0Gb$dkr`O0REHc;WUw>%kP`@mRLN$$$|=R zES{e?Z83CNlSnyqnR-AaYBL-d?TwXmJc+uhc^smyMdRxiRzeQ&Mep07@-DcDioj^? zCVKm?;+G4b_CqX&U(Htn=a$k{h>mfmV9%U5C9m3{)MKPht>f=$sj#MoDJYkrr{ z5dG$jD#CiolIPYAg&Z?c@0M*!Y;XN!lMLz^S#;-e*7p2XlW5;7ZTFUYOQnozdFaHrm{Hp%6bAwlLV3JJxT!J=3C2r&F3_aHxy;iF)@q ztgCDhO)1Li4N5?HW9^(z+_<%&>K#j>DYDkRqW3p)h`V@${vj@hYv)j1ObAiarVe34 z%VM|Re(5c?=s`_D$+~sDjYMffEzxWY^-gz{vcm=6DEf)2=tX1rvtnb4$xgAp@fK9G zh!JM88@07GyzkP#`56m{Ecdc!sta#8w}iZkplrWU-8-;?Rj^G^H!2|;M_aFCRXg9c z5F=4uZCN)24WGw+r(CjS_m`s0esXVH%BIm~sgMyqC?KRnnaO*$K3`wletLSdUY*~a zt$Gp*YOmue)TroeG$}$&X=h{cm96H-_zo?gy=;UX*uKIWgH9>2@HU$npT zzf(>f1o~Z=c>}rL0jB(B_h=~hEjy(+N;7txdqum{z1@i_R_2>h+Ci-*8n2jYfYnNC z=ooo<-Z6L(m6Mleo!DP-xh%8#;;S^rtGtqQywyZWY?bMpgnM|pWSu0dQbTWUJZr53f#+F;e*SuK83G?7)ti-q{j#*x1FIid=M>c=E5^S2IbDqGxLx64#% zty6jn{Ql^jM60Fo0=V@CaC29Fom8Ezbyq3% z++A&CO`y`yuyo%o)yvY;;)4sl(&AeDht`G$=!JT1zcbrAKV&lw$C3Ve^Sy<<`hx5x zysR=42(VbR`6y~n_Iw^}W1rbu>{39CJ0}E+fCvhbwXUB=K}4tcuiJ;iLtN&=eZtFR z-{l<#JuzLxU7ev@oY3kRzlpnw?)AQg#=0O2URZ(1VLNA@envZoyDiB4_%~vs z{j^6I>$rGCHXM*FE31ANMBKUVA{b<<>}QrK76bWoF@Wu3ZT1Q&g#l*Dhg?l9O~FpJr2 z&JSS&Sf$yNQVbqp%BEHm9OPw9Sk~ZZ>zq%EW(!`Mi9-2PCUB1K$457}D}My5Za&l- zjiR;uH&VhoDzm2Z5eL!z;;wn~Wufq{^0H=2FTAW88n)T6Rz73OiLNY$b2>#~y{Bea zSaIvD{f^Ntz~~%Ci`Es7*n9x9_YvYZSvx6ve=a=G`fZBH&)-ga}jx4iN{Uz4sBN5?yo-TNN=OZkrG9Ey~}t z2jjg(kqxP?aZ00f4uHfGATM{`mAlN(;bak6a5WV~v`Nu^7ooRJWa+zEb)vHJ5yQz< zO6lWXG={@$MRELxr;IrrtdT{_0Ar1;hH%HBT^Yh!GwuZG_Kcyor<7*YPI80l`G~%Q5OcYu67dr6k(vR5>}uoxs?*$zet4%W`H`+B>{>ip~r4r$rhi{#++r1jf19-=gm_MRd~i6dwKObv?BE}BL$ zWCyfg8}6;ERwiTQ%14X`ho#aw^>0E-%gY+uxbQOVSa`N|EW91GmmgR`drA{n5zV3r z{85=PgT!z0HqK?2p0Prr#nzqb%_r;Xf{^=%puF5g+wOF^OGEA%v093-`f(;%2=`fu z{<|t|?bq?0+S<@6%217qwaTsC4pH!8IKy7Hjf*7SrES_Iw%)dp#cIWM?{8!SMQ(Y! zk4W^FURKs^&h}rf6rxAzxQ{w-&dg#KWB+9-W6B3hl@l_}X>-qHC_{l|_1t6nt0$6N zwbusO2C7rOyi5&jb-E?%Cdn#IrQAs?ml#xeW1(ZTX#Mof%vYW+Zo4^l*k!D0%agdZ z|A3)UVx>o|glOdvg90iTW<{GlZw#~W7wyS*^ex3AsbK2~8B-s$^=YNzZTT!|4uBxBDW*6&Q%}8((LS zps)@qg5o|Y>;m+aZ3(SKj9nsG;9=X2;5*_G2@LWX!y)qFKyN&>&1ecSEZrwTBHPDM z)k@h1fh4=2=)|Ff{ja5OWCe@Jy0*q5y#-Nz<3~jAZ$wYIFhQlK zf*~+zH`ff-DTR538QY}P@3c}{?{P}02K%?9b~naLp>`k-o3v7Mf+*#9g`M8q`T(2K zv)Ztf6C}4Hvnp7*vSwsZ@$51Otfdxq2KLQs=J#%d#Itb82;31u$uXaX$%ytn+)OW zX0o$*-K;RmOb$S|2^lxapv;KDRY@6z+WM&6dhlSftfTvuQae=-w(P2nq<2nBD+y2Em&nhMIQ4F{bmW6iRem5&%cPgPn^JIo2js6P-hmz#1> z?0V|w^J@$wXiZ{FKiMR&V(y!FUZ7(1+u)ehYO&L+$najd>{!+HRVm4CHU-Z2p`G}m zZInW2o6_}BZpWxEh9Ftfli>u`>SHWoN&&U79(kGiIX>;-8!U>}k%M#CxmA0_PVaZa zCLl_&{C@rp5RCi$+EUYpN&yCys_Y@vw6z{-oo}ftBTY7O*7?G9OfL?un1MWPzHVdE zRF)}U7$=!Z3l1`=eCHT*~BP%FKbW7Hl-x-exSKgTHR@sU!TVe-B7pfzlc2|S^ zweH=(FK#p~cN)_TJdDOu3cX_#tmQr+#$TaE3dusa>w5d+>HjzyJsyqzu--d$yQT;3 z<9nmGNB6&fadYtC`0Vuf6ZiM8xc{{(|Mtm4o z;BSmZ|MBI`&5zcnFF(5Ny)HZWtxI(-+OZ=FjHOZ%%L5zjx*CarAP1^Ze@Qx+}s<<;oW{om2;?e)zE zpL}|B{qpA7%k}MhPpf}%zxunE7whkQXY{?FeevZxJNn#hh<~&`yINm&ia05h0$#6g z@2)QnZ`NlYK07~hZ~9l)moHuYo*my^Upwp9)r+&s<4@lE&a<{&?|rwd%=?Gm{cc?? zSBL-dHPzwf`Nc)20@-*`EUG$e4~$M*|Ko3;-mK0pPrm*8hxLE_6Ia0hZ>xYOr#GKu z?ND&OR_CX;s}I-flgaAi(~Fl^N2jaLPLD3nPVdf6k1k%WK0LiRI&*h*1IrDCYD_#o z|9o|^zP)*VetEJ!bN~72>f_^1VT)N+(v>@vaQ9wSC9GQc?C6EN{^-N<(4wx#`*rg- zl{#~C=kSBWYN$P{y7K1Pc75O5+^PAoJNqbSYV*H7D^|Czi~ipA-P+au&)VAGUb`-K z&%L}T2W?%UPMPy@Rp#UCwOdP9RmG}uS6e`hWn1;Hu6#cztNnM}-q0UZR&MXzoL#ym zbQ8@Lq5RM0^7>!gd|Y)d$j7c!e-T@%Y+28&zvUi%ceei9dXe5gyt_JSi|q>7t;G(d z`_Aa;yA?z(+$6s_zPwsL{?d2vJ%9T4=*#tZdi>=-y7%;7Y506pw_;ftLRhw1k$5*8G3d({fVlHc87;6@FU1 zhrJpzH^1K|XDwOLnXg%A{_{tpQdOSZ9Jq6i$zt@*=-!VG-ddl(Sf8A@7nOr=*6+|? zZ~w)+XFPfP?DF!HyDRrn@ZxMO-;}>LddrR7o2#Sin{}mV-x@twU!5Oaxwq(F8NK8F z=2rRJ>(du^Zda|kKvl*61_=HlAoyQwG3E={)TrMy!x!#pR?Dz+s&`#ns*I2WO6Gr+>2k;2&16(kG|K zw{FCrH<)u5m;dV@xjXww;_g?zPFMfZnHnF||LEq?(fO*}+xeh=asQ+8AO0ivzuzn0 zBR_cBKK^OB>BAfOZg1pIMvpqf?D1P4d6)Q~-`n03{Kn{O*X#33w|{YUz5Z~0d;F23 z*Beg{MsK@qtrNE!b9HufTq3X9KCI}ts$TUS_}q@D-yu0YsdgCO9ewSidi}gvAK#u{ zUfeu>^fZ9-IFEp`eD}!<52q-u9EkGG417)BLl)rc=|5+!aFPqE-l>b_t(Ra`uW30RpKU9XwP#G#i z<=0H*Tm~vDR3@kI-veheB9)0$CQ`Y~w*EyVS_fJ=FJiPZNqtcHRXjZjRVGxKQ01sl z<$%hWjEYmVGFg3487gB+bg_f&&-;hJn2p-7EXbWcER2s0@{%avCa6r7@90 zWm5Z~GE|1jP&o~i^QbHma{Hh%REEk>ISrL3lZbfbeEjV8{Ax8Ow~yRDa{I{bBe(DM z=Jt)3QB)?k53P(=Mk}M0lW67fA|^5}C$|qOLuIH8m6K3;IgQC8$;s`5%1{|9L**n? zo)^;cUdCYLjs0@{%auO87e2Cavrs>T#(xbm7y|JhRR8(oK0g+`wDXVpfXg3%1}8Gm1A=I3Ud3PGE|1j zP&o~i^O$du6y)|nWvC35p>i53k7p51o&~vmP#G#iWvHBl%6ZhcND6ZMpfXg3%1}84 zmFMF{)V?ygeNY)HLuII(gv!%w8nc5zZXZ;J%1{|9r=ap|8MlK$ZXZ;J%1{|9r=fCG z#`T2UKBx?pp)yoXL1nkE9OJJ%A-4}ILuIH8l~Yi8HlD>q#!blWgUV1DDnsQIRGv@A z^N7>F3Auex87f0%sGNezvuqKimC5ac%1{|9L**n?9>-~Aa{Hh%REEk>IR%yH(`*v6 zgF$W|REEk>87ils@+`|^vPdT6_CaN+43(jB3M#wPzNmd=a{Hh%REEk>`L$8`r;kQ& zkM4i}IR%yH z<2>dQQB#uppfXg3%1}84m1pBc%yHk8pC@-pVjeN(dgpfXg3%1}88mGfzoRwla-Dnn(c43(2mIm@D`Om-hshRRSGDyN}x z9+PuDCA$wQLuIH8m6K3;yo}PyWcNX3s0@{%avCZ}edT&eavxNN%1{|9r=jvJ#`k_k zavxNN%1{|9C!ul{WrQ&!xeqEsWvC35lTdj)jydg{k=zHBp)yp4%1Nl4En`gMX5{uk zWvC35p>hf;&&Tr^Ba9ijeNY)HLuII(g37byBqrl}Ms6QehRRSGDkq_GHjRqGAh!=H zLuIH8mD5l;kNHN_jNCq`43(iWR8B+XMbr)kxqVO>Dnn(coPx^kv@dE0gWNu-43(iW zR8B$V*?1n~gE1$!4=O`ts0@{pP&vzD;+5y*_CaN+43(jB5-N`uF*_J@a{Hh%REEk> zISG}sam+`q=j8T5WvC35p>iTBM`>kp`=ByZhRRSm4V9NM?nHBP`=ByZhRRSm4V9-c zStN6E`=ByZhRRSm36;m=7Dnn(coP^5xBx+xo+&-ubm7y|JPD15H7PYTT zZXZ;J%1{|9C!umSjq$-)klP29p)yp4$|fq&gkBc4&GXyzgVA~oL;;<_-mv4 zKfJp=IB5T6wIH_-Dnn(c43$$*c{YyI%H;MzWvC35p>h%`XNzTwRwlO(Dnn(c43$$* zc^;{i$?b#6P#G#i}QgUV1DDnsQIRGux07=PsjxqVO>Dnn(coPx^paUPR# zy(G5}Dnn(c43$$*d6tc1GOm~8_CaN+43(ksYoqc{AC2A~-T(f{&B24LC?+u}*GrQ7 zz%p0{%V0SbmSbY$mSp#VWv~pE!EzcbPqHYrOnM(!2FqX>ET_S8F^P%8Sd!ldmccSu z2Fr=CJRL_^6fH^c1Iu6;EQ94lSf0duQ*}v(A6N#Pi@!SZArlX$%(#SbilWv~pE zUmKSH?9u2^9U%ASY#c{rQv9GYREEk>`L$8mg)MKWoJl*3C-sM--ramCDjSpI2bRGy zSO&|9uq?ylG%SET_TpOu8*=SSHI4EQ4jR43-mNS$_9g!!lWZU>Pif zWw4wG%TY&u8CiZ{87zZku$%_Vi#RNk^%U~HSgXKh6j>_`O$?^ls zU>PifEGNQplqW_`mLFIK%U~HSr@`_vD*3t~%MUDrWv~pE zlVCZIvnndc@&n6Y87zb4L|Bg6TQ11*1Iu6;EQ94VST3Tn{0g%Cz%p0{%V0SXmgCei zS$<#{EQ4jRoCwQNRz(F_eqb3agJrOs2+L7feg#>6U>PifWw4wG%Zr%g>w+vlund;L zGFVQ8$?^lsU>Pif43@z%SWblHsJP_`S$<#{EQ4jRoCwQN#-043@z%SWbiGEGo-yLY5y`2FqX>EGNQp)ZX%xEI+UemccSuPK4#Cbdo7q zeqb3agJrOs2+L7Nep9miz%p0{%V0SXmZNO&r)2qoWv~pE!Ez!j&tv?Sr)2qoWv~pE z!Ez!jM+L}D$?^lsU>PifPifWw4wE z%dEGNQpoLVN!4=jUaund;dV0j*gWwQLhGFS%7U^x+%8vwKd=mz!7^A*gykr!qB&W9U>PifWw4wG z%TY&ubF%!vGFS%7U^x+%qdponC(92kgJrM`mJ?w)DiUK(mLFIK%U~HSC&F@+gWQ}f zKd=mz!7^A*gXLw^N2BIs`GIAy43@!iA}q(LWwQLhGFS%7U^xkvvvHhSCd&^jgJrM` zmJ?w)8@)5S_oIWi*5@zQCnu*DFAtuK?*H)a_Tb(2Usl zEGNQpRDj%qEI+UemccSuPJ`tPifWw4wE%S9ZP z$?^lsU>PifPifET_S8R60pM zCd&^jgJrM`mQ!GPmQP~>`T?tWQo( zFJ2xz8QuTk-R;4L&$D@)S|-a6EQ4jR43^Vic@Y&LmyzWMmccSu2Fr=C92Fp!k>v-L!7^9| z%PFutn@{8RmdWx1%U~HSgXKh6j@!f_%MUDrWv~pE6Ja?jog^d64=jUaund+HVL2|H zge*U>43@z%SWbiGxO9@7EI+UemccSuPK4zszvY}PKd=mz!7^A*gykr|<(w=(und;L zGFVQ8<)}!EoGd@E43@z%SWblHsC1H?EI+UemccSuPK4#C0J)qjKd=mz!7^A*gylty z-*QftA6N#PhY!g5sHa!!^XSO&{r87!y4a-6YePL>~72FqX>ET_Qod^*mfHZjQZ z1Iu6;EQ94lSdL03Dai5z%U~HSgXKh6j!Gvf$npcrU>PifEGNQplvPndmLFIK z%U~HSC&F@+RndejKd=mz!7^A*gXOrmPifWw4wG%TaO56SDlkGFS%7U^x+% zqvDn)Wch(*und;Law058#Vt?B@&n6Y87zb4L|BfpDw>ex2bRGySO&|9upE_6G9k+k zEQ4jR43-mNIVzoGLY5y`2FqX>EGNQpT--8Qeqb3agJrOs2FrQeksn!pU>PifWw4wG z%TdYKQ?mTPGFS%7U^xwzi>P#xDOrAC87zZku$%_Vlc%@u4=jUaund+HVL8g!b4HdQSO&{r87wEla?~cqj4VH}43@z%SWblHs3X4_S$<#{ zEQ4jR{MxYm=Z{8jkM4i}lvgUfIkF2m(iT#kxdo{{GVm*Fy8hRdnA z9Jh}_q90s_%WxSkC*pDz7bHifA6$mZa2YPA;&PNR#*9=yxD1!!GF(o@<+yV{a{b^k zT!zbVITe?q!k6bH`@valkW$Y;WAu?%c;1WkKP&G`_aK$>+=`ulatenmj_Qq z_kVbId+=`iFROe`z8_qM%WxSkr{Z!E#bxsS;4)l>%WydrmnTtNCf^S(!)3S(ms4>$ zE*^t?Ke!B+;WAuK#N{F`9)o;8xD1!!GF(o@<*0a!1^Iq(87{+RxSWd1QSlfH^8Mg4 zT!zbVITe?q;xQKF`@v$%71x5z8_qM%WxSkr{Z!{zTbj; zKe!B+;WAuK#N|m;zTbj;Ke!B+;WAuK#pS4cza{y8a2YPcWw@M*%Tf7$OY;5TGF*ns za5)v1qw-3Y%W(O%arvi@Mvq4K zzkhOb@aAlh$8BSf?gy5^GFS%7iLe}#gt{Kfj@%Wydrm!pE0Gm`z_GF*nsa5)v1 zqmr;QlKtQ^T!zbVITe=|G3S06$$oGdF2iNGoQlg)$$lBhesCEs!)3Ufh|80xY`=_b zKe!B+;WAuK#pS4Uzl?N0xD1!!GF(o?<*1Kg7a93}a2YPcWw@M*%TamQ8To#287{+R zxSWd1QF+)I`F?O2F2iNGoQlg)@fbPzesCEs!)3Ufh|9C6bHAK?Ke!B+;WAuK#pS5| z<(zy!xD1!!GF(o@<*5DToP0mH442_DTu#O1DBq%-d_TAhm*Fy8PQ~RY-=ds+Ke!B+ z;WAuK#pNj9qMUp`xD1!!GF(o@<+wOG^8Mg4T!zbVIT4rVQNBev`F?O2F2iNGoQlhF z@yq1t{Kfj@$%H?xHwjW%E%WxSkr{Z#y%jbk_Ke!B+;WAuK#N{k1-ETs=A6$mZa2YPA z;&R+R2Kjz)87{+RxSWd1ar+qL`@vXlhRbjnE~nyh)b~oJ z_k+uD87{-+R9ueo#h;Sz2bbY8T!zc3xE$q+KPBG}F2iNG z43|@JIVyg6O1>XlhRbjnE~nvgk;my}^8Mg4T!zbVIT4p5dL~YI}J`x*u2u%U~HSzcwuY`J>U>HI^S_ z#bgp?^Eo5k4=%%HxD1yQad{fIjX}B}T!zbV87`;da-3c!-48CqWw;ENQ*k-U7Jo*% zA6$mZa2YPA;&N2@@{DvpxD1!!GF(o@<+ykZ(*58vT!zbVITe?qK36g$-48CqWw;EN zQ*k-!b0ss<{opcOhRbj{5tnCCpDUS@?gy9QGF*nsskj^!zdR@34=%%HxD1z5aXBh} zc}~6`T!zbV87`;da@1#}=H&aqWw;EN;c_Z2M}0?esl8u;4)l>%Wydrm!tCi=H&aqWw;EN;qq(a za_4g;SvHP4_9NX7EQ4jR43-mNIcj%#LAD=Q2FqX>EGNQp)aLSnWIwPBmccSuPK4#C zz2ybDeqb3agJrOs2+L7catl)Zz%p0{%V0STma`})xdoYiU>PifWw4wG%TY#h3ljan zGFS%7U^x+%qkQBREGNQpRF>b8EI+UemccSuPK4#Cbdn`m zeqb3agJrOs2FrO=I?0kOKd=mz!7^A*gypDok|kMwU>PifWw4wG%TehhOS1gHGFS%7 zU^x+%qtZ#1Wch(*und;Law058rIRel@&n6Y87zb4L|BeWCs~r^2bRGySO&|9upF08 zLY5y`2FqX>EGNQpOghPAOqL&52FqX>EGNQp%txapW3v3fGFS%7U^x+%W3v1vW3v3f zGFS%7VEMIS`OhDX-X7im{>jb3gREF&F@ejIF=>8q87{+RxSWd1F_FuYF?oJ)87{+R zxcu6<{L@FHN3~vlb2gvExMEDkPifPif<=2MgfBSGW8r}Qjg9pcFr^lZhygR!8!@Jvqw~vo*ZdX1+`OWfwJ^k9~ z(dE_Z`toeO`pMDt;|EXw?N>%`yMH-8Iy<|$Jv#p6@q_RE<-NnhKRi8KA3lBl(dGI2 z`Db?@pC0}A^!e52w;x?ztX^dfw~eZIcFd49XTxVgOUw)4Dh=Cc2*7XJ3--RJ^zQ8R=;Gz-!_$kSv(?$98%@uz++aF-xqg2B`RZbQd-MGK@??GH{`1q-$H$%0 z&BxUkSglSk+`U&-0jqI)cJyL>cKE^Jhi7-EC(r82JuCn6-p%^#L-!Bv&+i{rMgQR2 zAD&$v-4^+`T^%+pdwYH4#@Y4i@}eAXb+I~6&g&;1U$2jD*Q=^{)zd$|JiS;QU!I@4 zv0nYFE5r}V!v7ujhd-$P{k@yBOVAThZzi{*O=J@hz{rF4Yz4!d-S4UrI zF!A_}Z@%&LozX+rCRJOz|9#(47EUP6MI$YY6?WYm+nA zoEMHR7q_byN4Lixt!_^L#P!GNC+n6@zhPb`vGu<_1Y0N1t68!7Xnl6I?#}p0p>Xwj zeS3F(aac{Bb2l+oSJ#&>9hRRR-(6p?%dTY@8+YUtMv!}@b|L!53_&d`S-NppIcgga&&gL{vZQ{4%n{)gb%6) zethqD-+1Fc`wyNz8a?n-eDL&FM(;Q(b_QO>&UZ&&t$NW9&*~q4b@ZU>$jA4`KllH4 zZ+!3RzjN{VA8mx+m+SxSK>yXdoAv6)caFo3zt@ky{6}v* zeLNbK|9Jk%*~!I4^)Esp{#T&(^cR5If4;@6FF=L&p*GJNDO3%dim9~<|Ht1x`t0<^ z0k!;pr9k{QTZKvi(t0rq1mev{m!GbV&pz3PX(bOs!B|!4y?*PS` z8H$E{y&7^fdb4bChrfGI|J7TgcT3D|p(=cF^*LNUFRuRrEd3&|w0XUH?Ful9e{qFN z?v^mzc^WccJPgx z^^fn?7su<>)#c?`b4hmaty+|+e;q&hYVafZn){yvxnBh2HoG-nG~_NWy0diN?0^QN zpweGYuZ~XK4y}8ad;0r_?SGHH_pg5e0&F4QWM5oua6#ks@uhKY!jUrM44%dp1P$C6?7C4Dl?+X5lOkYO$!FxW|A zH>?eqBw+2#y=zAWti662=Aoy`upAkN3}ZL!@aaIXD@2PX+OQ03OuQ@^R$tO5!@LU^ zG7K4p3?pyACqcLnWtZphDzO!>Uxs<;sWPlUh9Sd{VaTwEJgvwV%V8PTc=lN`tiGgA zhI!LJWEe6G8HNno2N~v}r^>JiG7K4p3`2&clwplWrzOMcOZsG(H~B<{A;XYi$gq8o zVIF#_44WdukYUI$WLQcW)_C$-GOWI&PlkC@Ut}0E3>k(D+Xorup{L5Q88QqRh73c7 zrIcZf2e&1|>Pz}$m^bl8h9Sd{VaTw3kYOHrstlVW!;oRfFl1Ot8Mb(Kdw#VVTQaP^ zq)&!5H^4Wn{uTj-6&=etG9}5B?0k42GNASKhv<9gsWNPV3`2$?!;oPqW!Q39hSit! z$*|@I$S`DB`2i~oEBL}v^|`2D=1fTsJynJ+kzvR%WEe8+waKvmI~}U;i^k`pw5V*$`txjBj1K4|k?kV$~Oo6eIm#FPYZ^ ztpoR2k$-UT^(`m;3@iO#DidlvT=8(-_wV;^xmIXe%-c#^TFjg2_h>QP^si2zpPjxy zi&Y;C3WPj|lN?T_I-J}|i+S$pS`0sZv>2Z?TC*>fbYIMSOIupZJL>mnF)a1bV(Y`4 z4@_Pkx&1KW^?yVXBW7LyMusDmUtzv&-9mccw+`i+O`-ON)71{T?lb zuRdA~Eyg?K7xxbNYC-!uX)(_|U5nwYj}}9Vp~cc_G4C>MX)*7u-=oDa*GG$?#n57V zrNun=bS;LxK3WVdh89b!#k|$DrNz9#evcNzVIM7q7DJ2el@{~d)3q2L`)Dz=7+Nf? z7W017mKO6a`#o9=n|-txS`00=S6a+-PuF4??W4ueVra3nTFjeHTUyLp?e}Ogy!O#z zXfd?dUTHDUJzb08wvQG=i=oBRYBBFTZD}#@x8I}1Fx*Frp~cW*d!@xZ_jE0W3)wE!*w4mh89DM?Ufeu+|#ufzWZn~v=~|}sTRv4eD|Ac`h55O zE%*WwUqIpuNcf9=0bV?>-_ycFOZVN+@!dy@p~cW*X|@f!d*Qq9p{4ta75MI>#n57Cv9wyu`%fGDVukIy-~1WheSG)v-N$!- zKYaH+w0g7{zWZn~v=~|}trm;$-7jq4{pQbTF|-(33@x@NTFi4#J)HA{%?xV%fVra3nTFm=TTUyNa-7oOnM~k7w&|-U~#XR?PEr#ztS`00Q z7E7zey#KVN#cbdG0^fbK7+MT1wpUurb5GY|`0k^{&|+w@v|7yjPg`2d_T4Y=-A9X| z#n57VrNun=bS;MOK3WVdh89b!#k~KtrNwOD{Q}>8v=~|pEw)!$%yUoIV)*W(#n57C zv9wyu`%hb1%=X<@#Eru3Ls>LP|zWdEJeZKqt7WnSt zyN~ZazWaOOyYHc;ds1j(Z~lzeD_*a7z2fz{A6~B>T0L3} z-+i1|K-hbNCVz%%81mAtM7+MT1wpUurb5GY|`0k^{&|+w@v|7yj zPg`2d_T8W0yN?z_i=oB#N{e~!=~@imeY6-_3@w&ai%lbZ_nT|_eE0n=@ZHCEAK!g^ z_xHkg-$P5!qnzTqj}}9Vp~cc_u?XM&sqMSp{26~S{KfDW!(VJa{KY)9dbAk6`)Dz= z7+Nf?7W4ko#u??*_T6v(j21(Sp~cW*d!ofW_tZ1WDZcw?F|-(3EUgyv{?nEgvwioc z%`G^igH-BbdjD0co#n>0ylYKGI zJzb08yN?z_i=oBRYBBFWZS9NMzWY;r_sOFqkCHsfeafTs(CUd7!*?Goh89DMrPX5I zf7;Sww(tHF-+i1|K-hbNCVz%%8 z4Bvg?#fTRpUTmM@#XPin_QmksM~k7w&|+z|nD?Kyw3zL?KWlD5ycqFf#ETIxwjc3g zo_qSf7{2>xF|-(3EUgxc@ZF!;zWdFeIiuu^k~2!qDEH)y(sNJOV)*W(#n57Cv9wyu z`%hc@Vz%%84BvguC^@6#jB>BeC_VRdEr#ztS`00Q7E7zey#KVN#cbdG8NT~yF|-(3 zY_GJK=bo;`@ZCp?p~cW*X|<@#Eru3LtHmOG_vg0n ze)DIv7+MT1h8EisE#|qWYcYKH(PC&Zv{+g#=KZIweKFg2e~#}yvHQgC6T81pvHKoc zJ!h2o?xV%fVra3nTFm=TTUyNa-JdtNAa3ko zXfd=HS}d&=^ZwJu8Rf$E-EaPk7DJ1n#n57VqQyM-)HBKjzWZn~v=~|}trqkC)0P&q zefJm5EjXj(jFK}-&M5cejM8&Y*JAkYqs7o-XtA_fEW&qxVf*eke`a5deKGdM*caQA zeKF5HU5nwnj}}9Vp~cc_G4DTZ?TgvI`wM*c$)hBXl03?N%A@qq>WLS_cONZ=7DJ1r z)neX%+R|dS@BRYceY6-_3@x@-TFi4#-xtGoA1#I!LyM)=V%~q+(qgvn{sP~9v=~|p zEw)!$%yUoIV)*W(#n57Cv9wyu`%hb1%=X=1;Jc3&LyMus_DYL+?&(?#-+iP#CIPph89DMrPX5If7;Sw zw(tHD-+ki6h!-PXY@g!AJhXcD#qixni=oBPVrjLQ_n)@3nC-j2Y;Hlk81Z7nixDrj zAMs+Id-}c@zWZn~v=~|}trm;$-Cx?i`^}#@qvVW|GfK`V_vDPyb5GY|`0k^{&|+w@ zv|7yjPh0z9w(tHD-+j&~Iiuu^a<9%PJ@<4ihVMRF3@wHhORL4a|FosWY~TGQzWZn~ zv=~}!ue6xwp0364-A9X|#n57DwV3yxwzQb-yT8PDA1#I!LyPT|7W3TGwHUtpXfd=H zS}d&=^ZwJ87PEc#m-z0Z#n57CvAxn_o_o3$!*?Goh89DMy*@4Wf4(*P=IGuZAH2Ih zf3ZF}IlXwf%EsP!|93_Ye{l5K_b*P?pB?<#&HBf8>x<*{>gw|Hti32Z_|4J%AKu;e z{hjcA!uJW^Cw%`G7ry`BzBPJxbpH?j{tph`K0dm+T{TYDqi>bi>!I5C;MYd?{=vb) z>gC#X-O=s(Wc8Dy>k@iT?jQf;{OQ+5k1nrP*OzDO^4}jnc>2?aqqp6^93P#X-P|4> zfAaXj_x|$U;o%=Ta34N>{?X<6`uS&fADn>gDnAWRb7VcX?;~@cQU{{pscP zC(o`vfBOF6+3CglFk6l%)dV}bx>{eHtS+xchwlH5Zf~z|KKSI*qwAM9&t9%?-+Nm9 zi~H5zy}Vd|=R2eC{p^b`-`UaUE@$|o_1V?>x>LkSp%n0XeS3F(ad@*n`|#QM(dotN z>iY7ftKYNZyX))q#qH|F+2!#k?|tW4Td()NTUO@%!|#5#u9mCAzkE$~xOskY(WyXI z%odBP4#$@lw+J!>SzF*!5|2_KNzuqd@$?44} zS$pDhbNAx>^mg^(dVMlkee90duZ~VvpPe3Eo}J#EogTR(_YY4mj?Px@z~kub`IS39 zI(oT&e*XFDVtsq_{QUA{edhl2)78hvoimAfeIT(~onE+mud3p`xcv0+gToKc?oLmh zRlRfbEU3eKrx#auw;#Bp{`U{7Q~cY{uht)Y`{wTa+wL&s&)Q1dUb_Ps1lgVlGLOC< zL53GUf(${1AWJOByeYLQ$h;eWpCH4IA3;V)KOz0U=#c*1jx0U;dITAU{0K4x8G99eQ?$qw0`?2vi%^$0Rt`4MCYG6Y#-LFVnLO+n^;`TGPJ z#{BG%u|viV*?#SidGz%NGOYO#WC$_@Szj2f^5G8nMYrb zAj6*@L53hhkR=vm-l*CXWZt2_Pmp2Jk03*kA;|Vika_g=2r^9i5o8E51X*H1=H04I zLFR4x`ve(2{RlDy8G>xT1er%)k08UTA3=s7Ly)BvWLb<;e{)fJwx*^&`j-WC*gvg3Q}hn}Te> zsXxQ1A3=s7Ly+y4AoJ+cW5_a``VnLZG6Y#-LFVnMO+hx`)Suzhk03*kA;|Vika_g= z2r``d5o8E51X*H1=IyFYK{nvjpW)PxAVZKL$o5N+dGz%NGMxGmWC$_@Szd zHsI8s;na^HLy#fJ_Dhg?^z{fboca-D2r>j&VnOEZs!c&Q;MAYt)Q=!TkRizSOOSc= z^$0SY`VnLZG6Y#-LFVnMO+hx`)Suzhk03*kA;|Vika_g=2r``d5o8E51X*H1=IyFY zK{nvjpW)PxAVZKL$o5N+dGz%NGMxGmWC$_@Sz1As$2j#j7xg;z`IIZpiuG6WfdEU_T-cGXr4Sw7&@-~1gx zh9Ed$fNN01@N5M=u$$UORbcF1t*N01@N5M+r3nYXJp1=)a8 ze~wc>f(${1AlolN=F!(9$Z+aMkRiwrWQhftx2rY<*??1jj#EE^3_*q<+b==p(bpr$ zaOy{pA;=J9i3ORrt2PDMfKz{tQ$KPL_v$Pi?S1(~<2HU-&$ zQ-6+AKY|QFh9KK7LFUodBgk;-N01@N5M*fuSrOyZ-(1w|)bDSCQ$J4qIQ8SyzaLKh zo?1OwmIY4z2r>j2f-JEhi*f2N2AukvzvDxO4;emW_>k?151FS{pCH4jA3=s7Ly#pF zWZtgYIlOz6dgpKK;nDz^NZWh9Ek(u)^&`j-WC*gvf-J_VzZh`pZ~o2>89QX`kg-FyFFRx&eLaE< zr+x$(f(${HSde+UYIBEdz^T8$sh=!MvMkB6+^Z~0Pp!TfGMxGmWC$_@Szd zHsI7>;M9*GLy#fJ_Dhg?^!4nJ;na^HLy#fJ5(_eKS8WQi0jK@~r+x$(f(${nUxLh| zuSbyK)Q=!TkRiwt3o>t4Z3?mhr~U$`egqkU3_-SEg3P0@N08yvk03*kA;=O7GH+LH z3bFyG{sO0d1Q~)1LAGCl%%iVIkm1yiAVZKL$kGb3NsLo}b5XBTzrP8#7ua54dx7o6 ze%M}kYV`#5PjKo-kRiwrWQhe?j8p$)z^T9aJGK|tUSNBH?ZuwhUU+Kt2{N4e5o8E5 z1X*H1=IyGjBg@HvQ-AY!1Q~)1L53jP7eVIHryp5PaOy{pA;=J9i3ORrt2PDMfK&ej zr+#9{h#@0}Y_DR-Jhl3E$Z+aMkRiwrWQhftx2rY<*??33q`3((WW^BghbB2(rY2%-dC)f^5L4e}Yp#f(${1 zAlolN=F!(9$Z+aMkRiwrWQhftx2rY<*??331gCxk8G;N!wqJtGqpwGh;na^HLy#fJ z5(_eKS8WQi0jK^6PW=cn1Q~*CzXX{_UymTesUJaxAVZL)6=c&Gr~c-mUZ;M46S6GH zvLwrrEX)1Kvh>vIvAvk$)Q=!TkRiwt3$hre{^@{IfAe>;EXlGY%aSb1J;}24)anyt zIQ1jQ5M&6l#DdJ*Ra-}v(*dXc=I;nH1Q~)1LAEb~%%e{~vYg`7k03*kA;=O7vKXiS z>3~yz^LGRpf(${1Alnx~=F!(9$Z+aMkRiwrWQhftx2raH$OfGHr#ST!)K5@9LH&Cb z)bFX)cVvlEKY|QFh9FBU$h=*(DaZz#`lroJ2j|N08yvk03*kA;=O7GH+LH3bFyG{wYrV z2r>j2f^5G8nMYrbAj7F2L53hhkR=vm-mcmdWCKq9Q=Iw{WC$_@*?tK!kG>v3hEqR+ z3_*q#yq;na^HLy#fJ5(_eKS8W|x&IX+No4+H-5M&541lhg_GLJs}$a02LKY|QFh9FBU z$h=*(DaZz#`e)5eII`r(k|RrwEcfKd(xb0Okm1yiAVZKL$Px>(7^nW(fKz|-cXr6w zA!CP(9kPAdA@k_#5o9>^BghbB2(rY2%-dC)J7fb+{WF~U$+9HNk}S);%Chv->Wd-6 zsUJaxAVZKP7G&P8+7x61PW>~S`VnLZG6dOv2{Mnqo*gor`VnLZG6Y#-LFVnMO+hx` z)IY^BghbB2(rY2%-dC)f^5L4e}+>(f(${1AlolN=F!(9 z$Z+aMkRiwrWQhftx2rY<*??3345xks8G;N!wqJtGqpwGh;na^HLy#fJ(h9P9j8lJe zQLj_KzX`S%*j`|Jf$hb9*j{*Q^#t|Laq35qA;=J9i3M4VQ~!LxslWL z#h%z+cxv?tGMxGmWC$_@SzPL_v z$Pi?S1(~<2HU-&$Q~w;NeqzXoAtQ!tuVTnNwfc6*aOy{pA;=J9i3ORrt2PDMfK&gx zxd}03#E=m~Mhw}W#E^OP_3V)0)Q=!TkRiwt3$hre{`r7YfAe>aEIG2|$dV(=eL1r9 z=<5+=IQ1jQ5M&6l#DdJ*Rhv6x15W*OoccMkt4Z3?mhr~Wxk{RlDy8G>xT1er%)k08UTA3=s7Ly)Bv zWQ!Q5{^p`yr+$AEvMkB6B+HU4%l*i*^wjFHy;$JXk03*kA;=O7vKXiS#eh?P^LMf= z$+9HNk}S(T$+Gm+>Jwx*^&`j-WC*gvg3Q}hTSu0Q0jK`v?+7vk8G;N!wl9LrqfbAw zT;SA?AVZKL$Px>(7^nWlfKz|-cLW)N3_*q<+ZRFR(bpr$aOy{pA;=J9i3ORrt2TGY z2AujAIQ0|MPf$NW{d*PE@2S;yWQkKhf(${1AWJOByj`^^$OfGH7tKuw>L;k5pnih- z_avy_qpxR&45xks8G;N!mROK^yJ}OA4LJ2LaO!7=j2$v|$o6Z8%%iVIkm1yiAVZKL z$Px=OZ&z&!vH_?51y214G6WfdY`+AVM_-R1!>J!Zh9ElOehD&t4Z5>%I2b}twzaz*HWC$_@*}e!e zk3Rj#a*0zvf(${1AWJOByj`^^$OfGHm(5K$vgF8;BTJ4f_vFaZqpwGh;na^HLy#fJ z5(}~zr~c)DQ-AY!cF5QvV~30#vVGYh^XTglWH|LB$Pi=*vc!VS+f|!8WCKq9OPu=2 zvLwrrEX%#hvh>vIiy_0QA3=s7Ly#pFWZtgY6l4QV{Y#ws5o8E51lfKGGLOEV9WtEy z5o8E51X*H1=IyFYK{nvjzr?8@L53hhknNWs^XTglWH|LB$Pi=*vc!VS+f|!_Y{031 ziBms<3_*q<+b==p(bpr$aOy{pA;=J9i3ORrt2PDMfK&exr+x$(f(${nUxLh|uSbyK z)Q=!TkRiyD3bK40;neRh>T~M%e<#b5EK9O1$+Fy!EX$T!dQiXn^OO6>KRJK;wb7%? ztJU@8*?RSpqwB{Hp8oXV=xz5e$46&pH@8Q}pFDo>y}!J7c=(47+=ow}e{^}ie*W3r z$EQdCe|u*iBT0JK_ubpuyW6_GyS+J%J2rM`I&pS(GoIbE&DM%+mbAfIi;7|ued zRNGxM+c(uO*VTQyi)G1ykN`oBz#tP4Rw5x#A_oC6HdX@BS&lXJWo~cZcp{r?Pve6XR2!Yo@b`}`+T17^LxI}8~s!c+@o%{(XrjZ z-OSn!C*HunIhq{rj&E0YDnWnfSL*wP=_EWjzUfVl!rdc( zx?PR0h@X1@Xy{+PYQ6l{`r8+Hw?)3o4L=zBles1~98Ci!{&Y4ORzg2G+#PuRp*x<8 zjzs@-Y8mvV)ISXQA&#(Ed^-{pw?a9KW-)p$9_lHMg zukYUKd!wK~3;N#h$UW>2y}%WS>ILFg9{=w52DjazKMnT=qn;m#FK@c9cjqXwQ;T)i zb^Al{+HQ>LqFt4PO8m##jsIBTt{ySBua9o-RKgphn{GEa-l=@|Waf)*eDb6l(}{p+ z(iPW`uK1Moc=|@U^wLfXGu6*ru%5Hdz4AS;Ze8qp;nYo$%X&v7doP6k8#8~{_1)-I=A_R6WF+;oWQjwD zLsqauCgZ9zfh;?z->#9=e>bZUg-Uf`cpJY;c9Sq?ogB zhk>xg4%aAMMF5$UJ_C@E)DMsWWB}P3Rc#fAOvY7b0$FxazgIcXGGJvdLAd_*`nLw7E)Nj{F>IcXG zGJvdHKqjTn;E<8j50C+509nC6CgZ9zfh;?z->#9=50C+509mIcXGGJvdLAX_D=zn(l~cF0KTXUUQ!OO`B4wPcy9TFnGxB=rMi02x45Fp$Z( z>P&|$C#kQ9nif6!lZoUzVbNDSZYYBdH%C1IPfff`LrNRcAV6IZ6F>lKNS) zWXX~x%W^GQO6fBI8A<&B89)Y*6%1rDt~wLQa+3P%B=rMi02x45E+CWAX8Q9nif6!lZoUy`DJsahr= zBdH%C1IPfff`M$6q<%XmsXx6BkO5=>89-JRAd}LkU$V4G>IcXGGJvdLAd_*`Sv6#K zPEvn*A0PwB05X8AEI=lu&j4g3^#f!889-Jrkjc2}Od!ii>bFVir>LKzev0}_Rn#w4 z%d})kQa?ZjkO5=`1DTAg&IGcYq<)*Eet--h1IWq+WK#MJ4jD=P02x3AkQEGMGOjuk z$a0eU?es<1Wyvl}c3HB^vLw4KrSutqjHG^m3?Ku@3I;M6SDguDIZ6FCN&PrvIAl0v z<#NcR^cjGRq<(-5AOpw>1~M5}oe5+)N&PlS{Qwz229T8t$fWccfQ+PmfD9l5$O;BB z8CRVNWI0LwHc9;e89)Y*l?%wE^cjGRq<(-5AOpyX1u|!qr2h0FV^Y6-5o*Y&A)|(j z8nSZKkV(}t?6P!7>IcXGGJvdLAX_D=-^oepPw$gMMh+P{WaN;QB!^6@mI=s6>IcXG zGJvdLAd_*`Sxc5qPEvn*A0PwB05X8AEI=luPrqd8kkk*50b~GK!9XVCsxyHsC#l~_ zUxXz~mMmGaWXZB5OO{gl3_wOwKR^bM0b~UO*(yo>PEJyPdLM@jhYW`dhpa3PnUp>Q zkdf36kO5=>S;0Ui z1hSl@eut!ffD9l5$jSv|Qu+)I8A<&B89)Y*6%1rDt~wLQa+3NTlKKHMfD9lj7m!Km zGXNP${Qwz229OmDWHPQg6UcIs`W=$`0WyFLAS)M;N$E2H8A<&B89)Y*6%1rDt~wLQ za+3NTlKKHMfD9lj7m!KmGXNP${Qwz229OmCWQ|pl`qPJuN&WIgh`k{8g4hdUFUk>n zAyvyz)ZZYfA0PwB0J4ICY?Y+`MovTC!~9B=x8F0WyFLAOpzC0%TJ9^h=fvlKKHMfD9lj7|3K?btaJIB=t8)>ZgW` z8Zv6gN>xK9RmYRIJY85}Z_ z`T;V43?M5Q$W}?}Z{#HPr}tU1WXX~xOO`ClvScZx&j4g3^#f!889-Jrkjc2}OouEd zslP!|KTDP@S+ZnVt|dz;eFh*SsUIK%$N;i}flS6#X98JHQh$S_et--h1IWq+WK#MJ zKt@tOKn9QjWCa76jH}KBvYe#;21)$@89)Y*l?%wE^cjGRq<(-5AOpw>1~M5}oe5+) zN&O9y`T;V43?M5PkV)w?02xXB02x3AkQEDL%~g{6(}#>n{qjZFWyvl}c3HB^vK+fC zrD_>sFPbFv17rXhKvpo2t&-H=%t`7`@3YI2U6$;!WS3=0c3Dc*G65M${Qwz229OmD zWHPQgYss>ilhmKy2gm?2fD9lj3y?|a(=S;zN$Lm405X8AU?5v1slS<%)Suo5$N(~c z3?M5DkV)w?02xXB02x3AkQEGMGOjw)A`b$#OFQw1mkdf36kO5=>S;0Ui1hSl@{w7KN02x3Akd+I_r1TkpjHG^m3?Ku@3I;M6SDguDIZ6FZ zlKKHMfD9lj7m!KmGXNP${Qwz229OmCWUW<_`qPJuN&WIgDC(!EpQ3(>`pZ$&FICI1 z%d$mMKR^bM0b~UO*(yo>t(>I(^gcOc%4=nShL>et--h1IP*nG8tE$ zwPe}KN$OAU17rXhKn9SN1<0iI>6a{9B=rMi02x45Fp$Z(>P#TZN$PKQkdf36kO5=> zS;0UiIcXG zGJvdLAd_*`nLw74)ZZehA0PwB0J3rcnUp>Qkdf36kO5=>S;0Ui_s_ZFQjT2iu&6m z^#f!889-Jrkgbx`-_A+uPwx|ZLF@&w7sOtaB=$n8mI=s6>IcXGGJvdLAd_*`Sxc7f zoTUErK0pSL0b~GKS%6GRpMJ@*O;SHV29N<{1p}FktIhQa?ZjkO5=` z1KBD`{q3Bj{`5XemMmGaWXY0cS(YrN^cjGRq<(-5AOpw>1~M5}o#~L}B=xsR>SxK4 zB}P#TZN$PKt)DMsWWB^&YfJ{oC0mw+|2gm?2 zfUIC3lX2CVK$er#-zKRaAOpw%vT^~Lls*HHk<<^60b~GK!9XVCsxyHsC#k_vy9 zet--h1IP*nvQ?7$J2^@H>3w!tvdfZPmh7@D$u3K&S|%VPsUIK%$N;i}flS6#XDwNF za+3Pf`v4h029N<{WdSlNeflNK4oUq089)Y*6%1snB=vW4lKRv802x3AkO5?60Wv9l z1|TD;A0PwB0J4ICOvY7bI%GLX{T-6}De9-FpQ8R!74=KiGA&t>)DMsWWB^&gKqlj= zGl488slSuH2u1xA^;6VOQGZE_`la+495Ryn0WyFLAS)QiWL$M7kmV%xcS!2TA;TfV zAuE?dCZ*2+WF++iWB?gJRxpssxav$G%Sr0*kkk*50b~GKxqwVcp8?27>IcXGGJvdL zAd_*`nLw74)ZZbgA0PwB0J3rcnUp>Qkdf36kO5=>S;0UiZhomqW*Fe^-I+1~M5}owa1SpOe&| z-UrA4GJp&qD+`cG>C-P+?vvCHkO5=>S;0Ui@A>fa}+pIw&hvSgQKsdiaP)iTwPk<<^60b~GK!9XVCsxyHsC#ip*q<(-5AOpzC z1!PkC3=SDd{Qwz229OmDWHPQg6UcIs`u9od2gm?2fUI0VCZ*2+WF++iWB?gJRxpss zxav$G%Sr0rC#fGG1IPffasio?J_C@E)DMsWWB^&gKqlj=Gl488sehlOet--h1IWq+ zWK#MJKt@tOKn9QjWQ78mQ(GaaUp{0?>X-M~Wyvl}c3HB^vK+fCPgF}^)GuydKG%J7 zP<_UFax``)qri9H^d^_iSAXgW>!P^Q^@1Rrdfnqo=U@K$vz5wwg>YA@dpAY{fA7}p z^}hE;Ka~UbsM~FHY#Ah`hKkuf55!)*dO-X(b%ep zf8KOD2@j5MdXuAY_sE}aSK}+>K<=j-972o_O+-pJC$qKl5UAU{NgF~ zA>11d=Q>b#TAfbZhwf-N1;_xhf`M#>q<*KCozySy17rXhKn9SN1<0iIX&o{lY5-Z- zpZf0}3+VKY{BUpV^?HH7cNon2y*+^J;G@-9=(`cLzf?)T`j{ZcFD;Q{uOA-QPZ&L% zT_5zP?xFAZ8t&`;;nCRZySMt@DCp0EzBfE_5Bozea0Q}zf%uijzx%zxZFlHT!@a?% z=Lh1;o9^q~Ig0GmV%>G!{!qNO8)Ld?SLL7*|FL%CKUTP_N6hW(qnkUG@W$w-+YOF) zD&IYs`Jx-2Jn6=CA|RS{#WkcWK4m?gzELhc_QA8&uU@cTw9dY^^_)Mr?)Q4)&EPsU z8CRXLWGNpqJ7npL5DZH&EWxly*BYd2%eo-Of8j|5Gyl@rcbyUcs?S(3!(qU%pZ1r zH+q#h=`#QsN&PHY;*jBx73`47xav$G%TDTdY9#gF&1yv9lgm$?Om^0`qLR$H7Ttck zovx@XjC}EQt$H%LirM7J*VfBlTxwYXv&nX{Ah9r;6m!Jo+2leWw)?~JY@R*;7T|fTwyLL=Iq;HAZ)S2HA+_zKqjTn0AwWf17rXhK(9veb}C z=`#QsN&NsBKn9Q%3}iB{I@2M`PU?4RB=xgoN$dr&7iEgQI8iOrE=!X70WyFLAS)Qi zWL$M7kYy+JJ2jH}0WyFLAS)M;N$E2@A>aUa350C+509m z1hSl@{yIth02x3Akd+I_r1TkpjHG^m3?Ku@3I;M6SDguDIZ6HX^hGG@r>LKzev0}_ zQq(V{&j4g3^#f!889-Jrkgbx`U(ZSEPw!LIPfIcXGGJvdL zAd_*`nGRV_Qh%MKewHj*vSi7!TuYWx`V2ruQa?ZjkO5=`1DTAg&IGcYr2aZd{Qwz2 z29T8t$fWccfQ+PmfD9l5$O;BB8CRVNWI0Lwb&~o4GJp&qD;JPS=`#QsN&NsBKn9Q% z3uN{xN&V?V#vC&FA{6yg)K5`AMg8R{>X)iz$RV>y>IcXGGJvdLAX_D=-_A+uPw!LI zPfIcXGGJvdLAX_D=-_A+uPwxX{02x3Akd+0ZhomqW)49^-I+UYu?VabvuOO`BIvMkAxrIbDckdf36kO5=>S;0WIN>aa*lhmKy z$05TZ!y&^VD~m%WrOyClB=rMi02x45Fp$Z(>P&|$C#l~dsh?ez?6PE+WvO;qO4Tye zkdf36kO5=>S;0UiQa?ZjkO5=`1DTAg&IGcY zq<)8_et--h1IWq+WK#MJKt@tOKn9QjWCa76jH}KBvYe!ThopXh3?Ku@$^~Rn`V2ru zQa?ZjkO5=`1DTAg&IGcYq<)8_et--h1IWq+WK#MJKt@tOKn9QjWW@qmW0j=-^dVzX zzkCs5FNnP$_JY`pa>QOp)iMS;0UiTjekLJb); zWYmyRLspU+GAVrqhm53tfD9l5$O;CsRg(G}IZ6HLeU>a)vSi7UCCjobSxV_M02xXB z02x3AkQEGMGOjw)AP#TZN$PKs)Q>}kLxw|EE{9A?p8?27>IcXGGJvdLAd_*`nLw74)ZZkj zA0PwB0J3rcnUp>Qkdf36kO5=>S;0UiS;0WIN>YC-C#gTZPYxM5WaN;MLspU;GO1c7 zAS0P&|$C#kP#TZN$PKr)DMsWWB^&YfJ{oC0mw+|2gm?2fUH;`Yp;^jpFU(v>X$D<>;89-JRAd}LkU$ShI)DMsWWB^&gKqlj=Gl488 zslQE9KQ(03kWoWcsv0t>S|*2#q<(-5AOpw>1~M5}oe5+)N&W5gMW`X8hKw3AYRF1b zLnfuq;E<8j50C+509nC6wn|ceJ140>z0Z;*OO`BIvSeA7B}*xN1|TD;A0PwB0J4IC zOvY7bI%GLX{cV!^S+Zowk|oP>Em=zGGXNP${Qwz229OmDWHPQg6UcIs`r9P+17rXh zKvpgwlhS7ZGLrfMGJp&qD;UURTy-XpTi?O50C+509m!Qa?ZjkO5=`1KBD`{hgeo z{`5XT29N<{09jdpOiG^t$Vlo3$N(~ctY9FMan+d)Sx!=ahopXr`YGzCsJ~Q2{Zh3| zOO_<{17rXhKvpo2$++rFAj?VW@1!q6Q9nif6!lZoUy`DJDSZZqjHG^m3?Ku@3I;M6 zSDguDIZ6E;lKOGTaL91T%H@zr=`#QsN&NsBKn9Q%3}iB{IupoplKML&^#f!889-Jp zAd}K(05X#L0WyFLAS)QiWL$M7kmV%xcS!07$N(~ctXx1QrOyClB=rMi02x45Fp$Z( z>P#TZN$T&A)DMsWWB^&YfJ{oC0mw+|2gm?2fUH;`+g~NAKYhrU)GuFzqJE0{De9-F zzZ^yVQnd`bEcZ$32gm?2fUIC3TP3M~KPRa_y-yAqIb`IJkwaFJ95Sg|CLklJA0PwB z0J4ICOvY7bEm`j8B=x8F0WyFLAOpzC0%TJ9^h=ieB=rMi02x45Fp$Z(>P#TZN$THE zUxXz~mMmGaWXZB5OO{gl3_wOwKR^bM0b~UO*(yo>`#DMd>3tkB95Ng-9I~=FWK#MJ zKt@tOKn9QjWCa76jH}Lc$a0eU_ett!mnFL_*=1R(U6xX{Of_UA^#f!889-Jrkjc2} zOd!ii>fa}+A0PwB0J3rcnUp?*Lq<|RKn9QjWCa76jH}KBvYe#;eUkbCGJp&qD;JPS z=`#QsN&NsBKn9Q%3}iB{IupoplKS^a>IcXGGJvdHKqjTn0AwWf17rXhKvpo2$++rF zAj?VW-zTXbAOpw%vT^~Lls*HHk<<^60b~GKp+MHCt&r3&A2KEN%lquIWS1qoEZJpQ zj$M`~s--XL7q>5;>%KXtK4U#O8oQHG;Ja^nlS}8TKlOxlQC#VIK@d*8?(wDbFaP}6 zO69#mxGUAY8>4~0cWd@~-+QB<%7J^-?KV2LJGh%!+u_6;_%}zBP{u-5B*Ag zzt)I9VBUD_4}0!tY*oZRZ#tcX2gf(P$x*m_a;-&%kB0`IoS zce&vQV}CN&#D=43;KZNKCc{eT2Zy@@uRnChlhKjr-)?s{nfSx0dp#I+kGHSxp7d+` zTGW}H%C&1rw?rR)@s#=y?hS`?9jH64PABd|cQl*=WB^&gK(<0sf1{S2)GzM?WB?gJ z29T8n$fWdX9Wo(m09n|d`tKeK==6^KaBuAOdV#-p7|i;;J%H@sqt#jHyAia%R7t=3 zm>|b5EsR=BH2 z%&^8#^|Qo4UTsz-#wZ6q8pz)>Be*-AewZ=HKZ#(Wj&t0Q7%39!L!w`Ua(%Y z&c3$woIkkk_j=;Z;5KS9t~z7MQa)sM$kG=f7?xmIf?<)aHAvT%bwP~(!jlSS{-v|; zIwSs7pS7MCk4C}t_SnDl$hC7H`tYk&p>X}so%-QaDBPLqXD(RJS?6B)o>#XncD-=w zrpRTzBa*!rLjR4KKkWK$^eS`GX8dELTW|JphTQ7fcsbvMsCfmt^#KLS+%n_Go zlM8*=?hnVa>4BKY>{Q~>Jc5FQE8%Q#g}J1dvu}riu*DA7C|yMWnUp>Qkdf36kO5=> z*&0=C6^Bg5Rc8WOc2a+%mc9rM84ei^84g)V95N|=1|TD;A0PwB0J4ICY=xx$MlCz3 zU*4yNj2bd($f&j`OAVQnJ_C@E)DMsWWB^&gKqlj=Gaa(*r2a;Yq<)qxiM=59qD-+D zC#q%IWl2&$Kn9QjWCa76jH}KBvh1Y(MvbI?fD9l5$jSv|Qu+)I8A<&B89)Y*ogR>V z?QdEC_dU^mqf73QFIHB(sUPiQnz%LPN?o~PD_0!lN<+EQRIap?D{bXUN4c`U_3Toc z>oxUKUA<(hmmKv{L%q~gFSXQ5ZS_(|y`;9@R$FhYt+&@f^^V$lM{T{Mw%$=&@2IVJ)Ydy{>l+n>l+n>lziupn`-NuYU`V7>ziupn`-NuYU`V7>sxB;TWafDYU^8S z>sxB;TWafDYU^8S>sxB;TWaguYU|r->)UGU+iL6EYU|r->)UGU+iL6EYU|r->pN=e zJ8J7YYU?{{>pN=eJ8J7YYU?{{>pN=eJ8J9q)z0H$zpu7_Uv2%q+WLL9_4{h; z_tn<#?{94>qg73Ft**IdYpyw(YYok{rsi5pbFHnp*3n$k^g|50O5f_5e$+MnsB8LB z*Yu;V=|^4DkGiHGbxl9&ntq4@Sm}otfz@l8euyzxeNNL4F$$~CY5F0?Vf8spKg39^ zJST=?^_r$1VlY;p)AU0O$Le#Meux2CeNNL4F(j+cY5E}sW#u_BDy!Et{Sf1_`kba8 zVq{jI)AU1(&FXWSeu&Xoc}@(^>NQP2nwoxy5nBCOO+UmKtv;vehZv>R=QRBgsTh|yYoPSX!DUaQY(`XNSad$KWAx3cZIZZ#r7_L62>4zA_mFL7T zu3pphV_(w`F_No4tLcXr%hl&J{Sc$M`kba8n(^GOsr|5PYCr6n+7G*?_QS5J{jh6l zKkS;?54)!J!>+0Quxsky!`6)Fwq`uH>zaOO#&cUUp4*!7+}4cewq`uHHRHLh8P9Fa zcy4RPbKBPRLo=S+Vp6^Q-mo>}xhT{aEhnQHa&uRW1n(^Egb8F>i9Zf&P>{@+J z(+@GfR-e=KL(H(%=QRBgb8O`~&3JBW#&f%&>4#=Kx5Y$T{cmXcA*R~ubDDmL$+r5O zrXQN|+!pg~T{ZYXvT9}%)ynPZEO0W8P9Do4Of3w(+@EbSD(}LLrlfh z=QRD$jOVtPjVnLf(ey(zp4(zVuKuj1A7VsTh{?J7oTeXQdagdF_QTPP=ZCW8P6TfcCW8P6Tf{M^xu=ZC!~n(^GxjOUJKe(q?-b4N2jcQoUSYTb1IjJow}OmR4x(qid)ZH=RPo-Zapo6r@P)s`sd}xlJBd} zSx-fA$ZyO%arDU#_>(v?^pa&o7YD~dZ#ayvC`aP_+2b6Evw;NmpInNfTf?zKX-MAG zABLm(bA5zCBM-$9Z=AVfhdz46yVVb`yi|$)FG^eg^`%A?kdN0%4jo#^$4lSguaEk} z<($0a*0#uF7bgrw%|56TiFYcIc$W2O^!vSZ=EbwsKYGFXy{D=AQCES*)J7>PQdfb+ z#722aT?H1?8s#Z<4O>iVmY))H8s(C@hAn0^%2R6V#e7D2N^Lz=KeFtNsvr5Apz25d zCaC(6zX_^-^|OTZ@}QCXMf$%0={Iqx(gTU~zqJ9R-`5`l8jJ|ujDt!IZ52_g)0E?;+d->@&eUb}pu=S@9#IGPNqss-}A^<;co%$G;AsW@!q(f7ad9p9@S7W_Xr zEO-+#`ZG(4wLWKL;jmy~={94XI(e(iJ0n$p9QK3JP@Er-eiiv6zwpT7S5*m@~KE1o_a@HkG$qsf+)XJNU79Vc|?@>X>mDz>bxx$$y!@~-Fo6Xhduw+tKN9) zMRAlu^di@9i!%?!x#&l3a#n%8^*1c*eWNh^UcY~IV`_Y^zV*$CH*I>M+T}~j|B2{u ztMqJkVKJRi)%TyFzMD|rPZy)U^;*YCPDWXNW6Z1Y6>V`_v!gZ1mV6jB3z>kZkbtE6xd|2 zt|+z9Z~3n~>tWqLL2Wmowhu7tetH9}+fsE_u&!<$&_KG;qdS#^bEB`J+=)9H2I9o& z)W{*)eeg{Z_pELysqs{Kx-8r`hoCEPU$t9GY2+c1nQ)JV?a!=^v}ZNe0N zJh^sDxpMo`qX%csh-W@=(Rwyg-UT&X)ZFWO&8=U4#t-~~Kb+p|i|RA4*NdvtZoBcI zE%bHZMXu|a=;AHqqS^taWbI29C2JcfSvxIjz`fxIV}HJ4t?ne&tT!I}!=8J+?}hGY zyxGDjpyppI<5bwI|g9p{`?Y`|_g`KbS=puP|VyewZ-y zGuDNu7`Z!>wa6<-{m&xxo1o{PI3*?3NPRP@7D(0dj*!l&y|^fp@fS|1P2u9P;$q86 z>_VjKKJlls$*{7h>33Qg%le&f5pV>&@<2H%e`cwoZO>DDU3E^>Nd>+g1FlZlv) zxz~eH_jvp2?#YjBUyC}kQ@M66>6YjN&I!&*Jov``Oj0e9n8CuSc{N&_`!31PQ|W?ch{#*M)B!HyhF{0B^T~bt2DYE=S0?E%(>O|UBx*`p29hKTS_X zo^ehpI421B1J3%|N{yvzjz#Ar)#rKV#8#aXxr2|IlM{zw&d0?$X>@FNaMuT~Ej!fI zoPfg*gL9Iq>7wQ;&PnnV&I!)R-uU+P#%O5ow(sE>IBHH#=3SI0R>VKb6Gy|16oH#- zPEx5~bWW0sI43wKM}8=*(q1p{_YQ-Zxo)OT&B=Xx&D59s!Na&pdiTyrs?YNj@2Jj6 zvg4fHIzwts*694>oUoAiFxH%;YPzVoigS`Yg>!;)f^(9tSEO5w*sVj&$pcoNxC(fi zos(4R7oC&jBF+iU3C>Aca)kAb^*vwmUU2@&V$Dga&+`=DP@R*;E;%R3jty!~)>ku$ zbAog7Fghoxnl5Ut;+!N;;hf-{;GE!`uv_PW-q?A6&Pgiui_S@M5$6Qw1m^_jq(2l} zIo&(eAY>YfkbsRkC#gQqQ+!i(PMSC;HJp=m?>5Cb!8v&tos(2e7d2OLPLii^PH;|e zPH;|ePTt1ONh!;)f^&j%f^+gVc1}{MUvy5Ai#R7ZCpafKCpae$nsbus z^E}13Rp+FQbAoe%bHb^_O9!oUw$8nrN}Q_cqUI{jN%9oV3C;=53C;=5$=k{~`ND_O~%9>gZ}W~-Sy_K==;FCbr5}Zb*D1)2L3@c zerV}GQ5C;|PoDe+at>#-)8FXC_ojXl5`Aa;%X}@v+Z$_F*;#neIwyYrm(ITH%$f1G zSEKLx!{ocSs=p);=be|St#7f;iTCB!W!+yXRkFv^H^4fMSpQ4p&m0wfddKYk;mK!h z-SQV}gnKv}cBg$2;eOMZf zQ^9;q$mOk%?L?BfETQ6iR1_7B4hX)BV}Y+))m2pVk1km6w9dY^_2OYan2JM#uiq9! zW;peZB;4I0J};cQN#=Cz@}phxol1OPM)Ecxd3W$)-I6!;hv9<2)!WI&#^~=K3)~f< zSZ*&;vnw(1?Z#h6g7u@1R4VgpiUnX_y_WEk%6;8YI9-OlV00wTfepj$ScMOCSg=zG zyz72&u*id=@7uQy;?v;c+pc#QeRh(M&`u@(4IErK3`XA6v9Cnu*WdXI0f0M|Fc?kU zlxD>*O^kzH5V-Nx=-U;g>uKu=;q}J9>`P}}JSWuX!pVD4TsU9-iwo8_T93Ui`U8er z7o&Hq)GzCbb>V{(zt`_h{azLjZoOzd`aba%*}5d(EK(OQKOwKEGWa{l;3j17XHJd$ zhQm3=tK02*J*GVQ4v2-&n@%TU>U+f>hy7qQ6l$1$g%+ZP2bJ#Jlz#mfcgcD@eYc4z z@C%>+N~ik!Pg>7MV5bOI#Cspn@Lq~`Lh<5R^0R*65B%ZuWBmncN z+iuUBdhVg{QoQMHzyB3VP zUJ!0yjo|J-FLk64&`n05)j)Tt1BsUz5wSoqKRDbKGd}mG-#@xBT_9Pw^;D+ZLN#wG zs(Hz>?tD5fJ@&yfLR=s9ZuLV!vR{|~&Q^c>NrALN)m3#i$$GLzQ#Z=`((1kVT&yVT ze~hee0&LJaQ*=QeHo? zTnD~4Jn; zUrCCn``4EmUI?r@237OkJQ6;j>dZ#H6Ao+a$VT^(uTzn)AAHh!A(Egixt8ho+&8_6 z+o;!~+MkaI({wZ#h^5Y2TyOt;;tV`}a`AyUALDBX;3fp{$Ch;DlmyUeCM!CJ$#QyC zlq00zA>S2?P;nj7$5$)7|GA6SbCC!y$?CEIN5%JA z0bZKGi(}gL6ZjQv2c?ShcP#ANw~>A8rWHx!+f}i+9nYklL^>8L*caBcA|p!pw?cAV zQ5!Oe>yh4C61meF09pw=rUSru;=7)xzk1V)YOsW^Ef!dn`QT%~I@+^BL)%<09CAo~f@?Nn!XWhrTsn*2l?G46W zsbH`4y?aZl!Cr~0cpfM%U$$3LpM z8rUn?E7&V!ujn^9zPx473`IV!Cpx=o=4 z>=o>lhrwP+HGkP&NiJirV6R}WV6R}WJh1jkD%6W~-_qGDE$kKS73>x473`ID+AFE7 zE(@^6UP+$BUcp|$Ucp|$UU?Yol~nVW?Um#*_6qh2_6qh2_R0fmucSh~NcU}>z0$^B z!Ct{$!Ct{$S*N{{%IdNJYwVTeN$eHu73>x473`I_qrLLQ3yTMPrkcO?oOSL4v+35w zZqOgQ$#E35%a2Cisi#c*))VoO5M3__#Ifq#<4fmX{`r&a-@O~7fxowgQ`HYA;>d}c zqsj4nBHaE%Ovs;D|L-{PZu^s^bi#Ulzg>%S{iDBr%woxrA>qMCtKvOUeW@b8|Ck7T z{(Fy9D)atRuOA-QPyY1b?E0WTbq{^N*Kl9&50A!P-@VoMMnQiT-1}iRt+;8fX2u`( z2mQdC^rs?Ka5Nc+&E$uHIHhAZ>Ev$MKN^hsz3qjIQ5UyQ;zV{TgZ}W~-Sy_K_yr8S zTL;lsS9dDW;U@>x^s%M?OjZ0MK6&z&$w|Jp(_iewKS}F&7-3g?q|DN>GZ(FM;xBON z?7PmK8Gm~<`mR4rzI&_sD^^6auPv(8)?Z5`>>c{QQ)(_ep1uvL;y>39-KifYN8vnU zU6_Tw+nx1>!}x3*b+^89+RhJOKnHt!@yDyk)6o9#UW@G-7+VRx(ydZt=>+~JB|MSv8Y`UD&_Vf1-lZ{ z-fsMLBvK#W1oI`5t7E?ID4Z^XUNAaxgHaf6$J%?K$AO(n;9d8FgGCw?{olTI5c%2jCZROIPt>xsTNe>wVPi$e#`2@$$*@)i^q&R4&7!TLt) zvG+xPlu&TroNQTFtqUKV_`QC2>i4!X>2K>r>(Td#cgEHw`5rl$>OLW_sQULs^luaT z_cN!4dBfox(ADjBy&fZ-d+$qW zCMLQseEut)>gS)do{y+a(W?mNKC*SbZaa;pP_@{*d)5#9fj^wy?2C!4I3CuuFF&(% zNuc6Ww_Onx>bc@X*nu|{r@5X{74J`&*lt4cK3$OF)mzQfMVo(nj1sT!N@Rl!jsxL? z2+_OqO{9BYE~<)h(WGR^vB=#ym`^RBt0j3jKwph8H4i`$l2&-aK-3;uiH! zOtjOfdGrJlb}AC~_n$~tN}}JmTT2!}(o&%KQo7W;B^FqOAH8*cIvBgPMQ-$ZJv}$B zsE^9J$rm0g-MY!oEq$)mCkO(jz&`sf??LvHLjZeS&%o~)BBh+eYF@O=vl z3G;FL=bUGDu`oW8`$Yf3pBMtpXriSXwIPn%( zhAMGM4^;{+{j60wT6$}VN7hA4MOK+$##Ms;(67|%?Ya|B@RmgFp3FG$wpYwJgd8Ei zQxCcRWtA1Dsak6OA+!S>XKP9LtanO(v`ihZlsd-WPgm_W?6OGH~#0#oRF28}u zZbD>F(P8zclYUottaI+FtaudffLLU!civ2*PW$_=M`o-m-rZAo=pTK*_43!2hJZpG zR?SwI1(!iIKCN+#ZG-Fmp*Okhj+0tAS>Sl5n9=nPqYrNK`Q52>XOlPmgDZdg1Fr}o zAI=8u^v1*wZ;XOoc+jZrx3CpX#a4LMdNK_9Jzu=LM6r)pG(KB>-gieQ$(O^!LoF`ulqqghYxJG$R4uSwD-))01Pn zjJs<7H3GK@fqO7_)%@Tl*0MD9hzVW_mr4-xb-8i6JDs?xdS9Ondx5WAyxLjZZ5{2< zSlCLlM4#-diT3%4txMgxz2NKjK6Cfg{QLZ}fV5mF-gA?R+SWHE2B$Lbwi}DJL&jyU z`!QveC9?m9mTp2zfBghc*0w`2m7Zi%xd9sMNy(%lOYua0w6wkY`=Ww2F|Uov#l@TP z`=fFx)RwvZsm$&F>jmqDh!8chXjaVq%g=;jB}lk7ZeYjBC8{U+RRnGm0{1UABXBY$ zW|_P60+)J~Z%q8j1bJhV_Ec=&I-LxOFIy&g+mCL1MAg0ezESt;Ligq?O76GHT)TX) zkRn1V60wSp?Aer%-W4gv8wA}DAtmkNlxO9Ca4($Lx%_xghY!R4)PMI_q)vLP9{kWq z7a7K5f7lbtz%fRpyNKh}(Sav6sf;J1Bat|{E0)|QViUc4UC6=l_SM}q2v+PczUEDi z!e}S)@y+NfF^AfatT}0o(B1H%cRhp;3Gq2qv(e0l9TOjR%zW5^4=a^Qvg0>VoZTaT zy8X5cC*PEi-UA;ZqzCZfDf8ivxv4a*`O-oD$?&nYGhnzE|!Fj zeShrs`h)oU3)Z>l%X2kN5YQjvJ8ojS{HvSs9i1e|Va0`&#CH_f%%Td>kSB5G>tDkD zlF@Z+8>rH*nbmR4*%qt0j>zanT*nQ`Xd?~NU2S@vxQ@su3A?9T3G@^9!<5a->pVB% zPoc_9P~|Cvnypf<(@5$JSENc=u9F_q!Gbmm+NZam{dMCyOw&k^PHm0z>;HlhZbAuv zc{B6trXA;Ktf+*^{Q3n8I+BbR!hGDJ8uJibP38S`e!=r@Kn?uPkxk*ZJ>^4h>FE zIn3R0-Maiv5$P0&WQ-_|Evj5j!mC8Eq#N{){pvSZPba%#qmm!D-@7H^h0a77lyP$P z!bN4j%?$gQuRt(~p@un9!%mXS9dn+f@Q#kt~@ z>e^Bq;*?R1^YA6qy+E!D(GOFxpMrKSVa zmWqpv$&!kP`Hwk3Z4>gz0cu4ZpoX9iN0R}9e!7BwKZ(8nz_J?JW7O(7gTc~4YBE#( zeh*Un9hS8=F(f>Y2dSZ+sORYivCki;_TMc_tZpU8sh#fz{qC{u?6g$tH!tz4aL z8g%xT(AiDs?1Oqb8VV~)9Y#g?EoMkT0*o_X3ir?o~#6rFs7x9TIQt|a}{B?99 z&z~=~st`m>!ap^Ln0x-2eyJ(%1hkzBhn_tchn|UD@A-@R^T(d0T4pSfYAlsV85gJS z$3#^Yw*Dg$x(Nv_>#=8uY^9omXFy}%U$w;Epo^oiJM;#A*d2)~`$w*w-K#!jJuWUz zMu8viANqu=ByEUH_uRA2{s2|mgsN>Qf?a;%Rtw8ax3=9mq6bYw(*l28XM_Js z)Apf;=>QAIe1L_skpnDH&yAGLZ%RGy;Q))d1I|!S)N_66`S0$92-_K_nxSP6la}4* zsb**yTDCqdGn1g{&XFR^8a;BfYhlIZMjf*{e(jS%F9wDZ1>Ekire z&h=^MkK7+WHr9Gt*?)o{H$jl4J*^BWL<-j@h5yI6sM0XfVq<@e6X@SX{x%_hn>t?S z9*&quj@Lo}b;`(?| zoSUQP#ir>T+lKiZ+s0DVWHBd@A)g!J#ZAfQJ@DfE31rA8^0_|w{8i=Hu{?5YG*9!` zvEK$IH-VDnId%;BL;lt$fA@6O*k2e|W}3#8ndUkV{{|QzCSbhZv&K+N6mxxwX`XMJ kPW*0|PyB9dLR;OH=D)mxo~<$Mk*zO)vcPU6A|Y+P|G$248RA916UR%2*N>D z=Bet#I(cDU*xeGWvHTHi!1FW@h5>y6>B#~t>CJ#)7=~Z`qzAtl{s0DSap)bF9Vd3K z6)X0s+)a+%zkVRQ^F-{}*Wdofj&&LRyMKB0&RdUv<^Or`(b4E&^=f^-zB;~HpRRs! ze6^b7lY`Ma-}>mAqqpCA^!UN|-cqkABCg5qT=QH$D_b7?u6x#pqwlz9`C9$z!Q%&y z-*ONB>&gxDgZJGH@6?rZS3OX#nnhhzL|v7OtG+MmHC`Ni+tusc(YM}t=xYDZ4qe6m z#^{5Si}RD?n^peo=InAc*4z4xs;z%8djD*Fe7+htmxe>|Z1mmrc73)!zxjM~ybj7+ z!u1gLw7Og2T&eKyS53*QdiZJL=KF)kWksG;y_z+>`aL&`erxnNTn_Haay|TBRZLbr z;5~QUw?>c3>&jm37BW7t3aPKGM?34e@?rJ3?v;II)>r;PRk3l^z7Jf*Mx%FJ#s0TH zc>Jq}u5M4;bx=35r!04Q*~mXs7S~*#C+|Lf>!a_Fe)oF)^V{|L$$E8paj{)Z#`C5t z-*Gj0d-RQW9=THUzRSi3zwIvj?p*-A*He>Oe@&D}%$nJ?*CVF=k5C%xSK~lq>&uMB zDn-|8G&|@Pts!|L0I76EU6@e;ZQP5!x}sjoVSTkyd#KYhoBYD>_JUSw4;p$(^~=3m zs?uwI!R$4GdXU&t6HkI}O=SCuWzSBnmg=sKS%dzvy=FzRT=dTht>XK@qgY14!(X;% z#SM696*oX2rb@Bge}xxIZG|gEzrorqmd4su9^v=lJtlgw+-D)XtCcCKb^AUGxmZRm zWPjP7^Tk-mO2rq(x)5|P7K^yS`cI=<)XA)oOcjeI28sl~T2xOKG2fjp$t@O3~WQ1YPvEr3d zwH-V-;(c8>&lbz#A;NnYQGV zqU*~HBr9_)Zr#n^WZg}Tbth{w)z*+~WOJF(NM(qod#$@^+`4Nn+p{Knt-GnV?v%sU zER%<=O4q%~@|^A^Os8?;p}EZHIypaPTG85A8$8i2H`pL6vDB7}ylkrJ1!X`6S)f&T zuh+3@96F|Nvc4zA`c^7FF&3eA`Cb!mqRkH3~4w_T1 zp_754UM~C1q{1Mp(uHwvZ^fIeu41p%RcNbA8%$%RXqOvHKNvXk`tjK(?6`5;0ldN#F0}n0SrnLRKEN&&{ zgDX+iI*(jhx$#P+l&T$!D3@t_l`_U}qO$V67I&^KZUq^+u~3!vXg#zfOXOk1OJCN-DsSrcQ|6pxsU-{7z>i(B8#Wqa0s zubnoF#6f1@K=et`d_yrRuFNeBA+qRfu)DQcZm>al#Ndvu)IM_MkBwbfskpPrQjN<> zP-vI$wVG8c6RlKZWy;FMVD)vBeBI)?96mWi&+*?h9wWt1|cRxT?>`osX+Z)y3Fl>cM;2u~|oPC?(ygo>x8t)yq1EVZ()bv>1g3)yQ)%%#Azh zwwVL0O+$mXK^?O8P}8V`oDa?GvN0e2`sjB;tF(o-qVzjX)ne#XD&wXi5|}yOBLguqtTv`S}I{oUC_ z^h;)$JtTK}EMgYRDVke{iQxn0eh7^zDj!hHjcHQ%Rc9h89U+U%r@PJ#rbW4*2Iy`_ z%cycwhuzkt)ZtD?)N0F|n@y=Z6*p9BHj5}VtjE&t(MIdSJK9g$#vQW1v&C++6#d;a zbk`Y5L)~bB9-v#vO8dY(^$LRFEB8 zTMW^g!sK+L{Z@4aS6h;6Pp-V8@T zPM~(tMNXh`#B{jRz=&mCu8(YNHt#v=DetVTU$9g4ZJbSsWe zcbZnghty=&=f`j?ngQNzB|#_Dny=~E9pscc1f^!Jna*DQkS~pXXezOTw^9kQ{Bpl$ zdr>1Grha-Ci?o;wmK^j*(SD3IdZcOMZY-202BqfN-e_z;s~YtX-8g(cT30>W$xfx} zq7(DsQ~0{vu~}}=B}MbmbJvN=gcgHdXs7FPp=Omx$R55@!Vh!YFr)61yFTZF*Endodc^WFlq-@Au#;mN+<^9 zWGUu52_`0s+}H}^j8V+@Z1|gN(-9_L>T5b5LY1{98#Zy@ztml)JnOtO#Qvk5F~m(p z_n6`>w2e~3dc>TMhIeEU#}jqUJI4n~_bCItdy5y-LJV{<@w!Xx@zR~Mti^bonb;MR zI9Jd4s|HOv)-XzgRoQz_4F)EA_shJ=bEvu#7xRmpGaBNyz{a#Jn!>X&GpXva!Xot zjy#6C*d2d1Zp%jV?hH!A&e%Y}>}9d6v-U-?w|TM$v$-+YVZ~!UGS%GJ=UYho3E${I z+0=-7ZJs_w?vDmj7jj?ghWUF7CgGb~spI5}ISMv_hqa}7boBbU$=)lHA(5$(bp!w8G z_1+13r?nBo#X7du!YwI(^^wq}IgWcu2X|Wu1@pN?Yq$ z+p+iTy{5BEDAxo%;LbSr1bQB`7Mi-o>(RU=oY6^Q-^?4MmGCv9dAK=of+^n^Oq)}_ zEYuzA?72k^8yDA=otTvkMl7FuHEZ-#$=Ph*!_I8FtHrRo$eN=tgFRxrvblDWA}%YM zNf{1niG#^ysGP^cg5nd`F>QleY|_HQdK!XmL981obJX% zDN#%wf9e@do@QWn6I>}O=*Mgt@fOsSYBpKyl||!1Z7iIiCOa9gPE9|efhki2)aC4V z-@Dss7EBsH``sI|Is3zEHwH<&&I!t_t1Z`pQ3W>#r}I4zk+b%YvUh6VsdWTh%V95y znYkB;d6_M14IR!)F+1{MGe3FHofVEP%}`1d3#*j@=9F4=Y*Um{T~nO-s5ztRS=X^H zQHlvhrDzQpj5!qCk+n9J#vM{wbflu_QDp!+=EvglYDn#OXff;k5ob8-gZGX$lz`E$ zpeOQ}V}+`+wge3db>@Kkz5>RW1;{QgI%h+oi)$&>@f9ALM|sRiYtYa<<|}*Ed-nIl z@{-(s4;RaEc!!q#m0O;MjRr9l`JLALO~#mQRqJglSTWJ_vG!6|mNZ_Phk-%$vKClI z^~zAUtrI|mK9@Uf6OO>p-eTz5b6}gK_REpzwXFF965_%2+I*Vr-UpRMAlo$dn{saj z>&apREEtVE=FBR*qlh>nuA66WWRl~ZY71((0^QDN&N4Bct?KFRu5#|?GxJ@s##Lv5 zDkF8Le!phFNvQdMeAEQFo?^}38d2EW_g(WxYN}Un?!JjQ@eYLp{(^;F&Y;t{KnHJ+^vJ&@j+zGg6nO=mI`4HMxNA+Z6nBPW=zeaj-WBlXJL35xI!KH z%+0r2u@l?6M|8G70t-94p4{g7qnzCCq^o6JFj>oY`LM;U%rj=5&=NDx7i&$=csS8NCSg8Qu}& zm{3)t^MyjGMmO(8&(g0GG|0K*1!3>p5x2C)o4vQlZDaUsQI1LTg*zM*aapE%i3D`^ zZzjgf3`ZsAs3*K5#*$2QWV;FUcI2IhL~Jv1?G&AN2(fm=8Fiy{!%i=$=Zq^_h$%BL z+%G(kj2|^vWY)=13N<-OS*3ADz$;Y>JFg8tZ`8>rr7@Rry^zEdFZOg9%NKs- z85OEK-qccIE8Cz5MxkQNn-jor%)@f`w#Llcz$r#e#ENKrMS53+xTSUD>Aj_vJNC)7 zz)Ie<*y@xRtCe6~w9Pa6q#SZ*RA{X=`6k;j{1!`G&eZmhq_@k0TF0y$F)Yo-ZoCDC z6D-Dspw@Brhp_q@`|wH(cBUrP2rJka$LjL&DeD2DKv@+~B`v7sWmnfH5|c z6(u(}``UpuM8M{YfaC!V#tjWtq!8jA-3y z%kbMtFo%lvXvA3mVshqv=RLE9?J5vFJ8yGO7|#yw$l4=H;|@{VqB$T4XOh4}Yk(Yw zpcu2%u<|j@6=cj>)b&VVH|+Xg9mKduRFCd_ zeO8vAXi+wNEEEin^P(MIJMt;5%I@uJSNZwa-aW&LwAlrWNnY%d;=HKDs2 znup`1wA%MoZ>xhbj`?WfYU@6!o96*Rna%!aZhm3Ewo}~b*Wiv_R5{}gWrLj<8*K0Z z|DtF%B4~BY+^8m;eX`;%na;=Nn@&*C;*G43y?sJBo5J>(?={NF=_2AiE~t9mz62SA zSaw<0%2!z`a-7|l*#RLnvCRUNBCRY0VaG+vhlOpE6O_5PBV!ihLl-=vg|a;uv$6f{ z(v8<_9OV_TTUyGPiv21xKG~aJegq=(_IpVjBHLU^Lmguu{*@gwpP7vg#-p_YH6AN_ zsj=QS)^$wureK-6)i=ET*28Y@5#=R&`Np(PD@G55HEqq5jXT7~+I$WUyUZW%m`$%e z#p_oP`@qY!-TBIxA2H=vHio8Q#p;rFcvIF+&?iOA*KV;!c3*~i*N8-h&2 zBNXq4)nu_Xhb*yc_a{mWdc`@K4A1G{j=c39*=ugq^Cuia`KH`?0T=ZydtzySs~5wU zwPs_z!xyk7#_qkGcw_shM7-{_tm&;rHO{tJ4u-+$(0vCkCVq-r!zbI#$?JCh-dR>< zCg-xMLRAA8~E)O zzS1jI$Jw?nL3?9tpd~FE9UOLbP;R>@j1?@Ql6UluSe*6Co^4pE6}j6eMUiE1Eo`Sf z(fGVsXkp{S(TsV=s%NG-n01S7mnq4k_T{J-1VLq;CEM^S*K&7HMec6d`_41EUFM?q zg9#J!mAabT_LnJl$voR9VnK{^r=(g1(PA{S4+z)_!R=mbaLhMSCvWTP=A{6GX*0k(0A7@0{G*vo6LZyjrOCw~erK z+TJ%hC%7YzdH;~jEIMyEqM85E==(1>+wJP);{5vN`21$|;`rv|v(@$HAFo&2&FgiW zZde_Qa)YfVXXg>h#0y%n3%C*6?kwg!rGcPn*(0XSq6(Bm#@>oB?nyt2lWDTJ52tl3Ax%gRoeX6AQqcZqxT-0ix` zyb1a~=7T`h_qHAkdS;g$#28R9Yh3KPraya!OLS8)?7fz^fh3K}l+%1X-ebABsKCR6 zA9Qr<++b9r8a&%>Jt#A#X<}6jf6NJ<5p&WIm=$?*BaJq}jT!?|H*Tc)i zOmZKOhh|dEwNB2up>+~t`&t)jn@LOJbPW&%aomh>{)#&pr&KfO{0Jn;8aP4KiXc$~ zF$msMD;$KfqRug;Ek~(ndC9nw&%+7$P0dv3P5FGIU}E0bJ~tcw`iPES#9 zF{`q;Bxk#iExnZ$4RmqwhG-m2gBV#Q917SQUkBYhM+Nw#gcG&@1M)oEyVFnBtzi zO?}ZjpK8t6vA|cBW!TmjTRAa~&LBb2JaWBfGh-7khka>#dQ{@2u(xOVR2RmfpEA{C z>)=!Apg_DtNNQy&>@aMo|FJych zl%scoib+N-UPpip#?+GgJqX+DyN3g_d$t0#j>z>y85V?w@ zUBTRI`tWQx8Xeq;;x@^r2cvhs_0cy+Z@=@XN2^mKs~fiW9e1xEWzXE;;+`|LjC%9|H`V+F(zQAPnXh+mP+9sjg@f`a_A z@y?CFxam^o3FVRi87>3JaQj`MZZ(?LdgXH$wu5$c zNhc%AP}TC7v)Zb?9jj5L(5m*G#8%2Ba*lN}l88AbB*O5wNVx|)FNAi_5KpLD)%ozK zR9T?Je;Eij$Y8K1F^+?>;O4_qSg`1X_QS8aB*se<>Opn07z-fTvxO5!Do7X=ND*^6A&+!xWEXjQshu3$A`x4f)* z2@e;#ysU{r6J8cHB<5&HSq7bN!D1P>c3*#X@p<2LD-HdW5-rQtVCobxwma3duwMtn znQQZ*H0&$^&vvT!F1Mh6TX$#!N}?u>`FvQDs!8_NF5%GjRc~$OP`IS=?bwOI6?JXC zN6Wgp2-R&d%G3{fDJEBEFbQ00>0wruJ+`n@MJTNwvJ@Ur+MYnb;PpayhF3 zi+_Lg-tldDpm1|`xystZhHqAf4NpcRci2$xEBV3q+;PKijUJcB4gQ}8E)~q<#`e~E z>g~=nl_4wpbKW^HQ7#d6_QxjS{0Ig==9sM>%H3Ir^5|fGOj-jnW8DRJ6z%DeafdQp zI_a{5ZWRYKE>`dGlohyXKic#zU@2*=ptZ6E-Qduo1@iLT`n*>$%6@UEF0oBbf^o^) z9x%qGDzx(*LYW@Hl#4FYHa-%wy=Y>nhqK+)&bg+TlUJ1F{R_g9f}Zg$67)_?=-;ZxJMJY)k5`QAhWAyjsb|Bk1y*#Ri)ZF{)-Ov8m zQ#Z3i*P*H5oSlZJMlj|vF&gEV&v%E+!Fnr!FK1q3)FXR7v*Y~*v!`f9r!hOjJK8j+ z#vO8WTOHOrx6f3a9Hu8ca`pg{v<>q6e}ztiRaVHsWiUrRf99JU&HE0n?g?ZF@l;n6Nac&;Sv(;J{~E}lT&lZVsIdtlAJJEN3Mto zQ;m}C*dykYH^~k=R9>b^j`D$XJ4S{WK-Hr=7!K!~JvaH;XT?bIE_3sY6 zK_~xdb!-__Hj6#i$?drl78>)dN9h5JCgB5GgF0jF1$Vd{K*NJlGQcGy+AWfx_v%C6 zpj0=Vhf59a$YTVy+@4xZ*$e03DK3@EaJ`fPnCw2i1x+tyd^rW3e}MIYLM%J)W)ZH= z8nt$K%<`J?cwb4tw&|qaP8Axi4Gz|Oi+GF^@k5UZ#x9T1@f8p3Q)4;2 zdB=T8bdM|1c}bb7seI^wN))(M^F0Jd1C8--{A)|rW}Gn=zo^KtZkn{tFGjglA9T(z zm2ET_%`7s)g$shRgSJv=HQ@Gg2%j5c+9B4f8{KNIhU+yb*hh3Q$E%!7W81U2>PS0A ziL74pZ8of4S&~a)XMssB%63@d7qU-8XuKm-?Y83wSM9c=REW9TIDUuCR@Qgt)mN~0_CLI%h;H5`iRta;;3-+mE?suA`_^NPx9CCDMx-2yv2DsD^;@0#)2UzV(6N?k0qP4&Gz_ebMxiu{Nn1&njE!ol9h5HyW2q& z{UjH^`YAuyO4>zj06b`H7Gr-bX0?y{Xn;W0zENc18W!`kF})413aY&Gb~74TPTOwW zc4b2mk5<1Jh_7Pxx5Ti1qJ#3*)3SHj2jdo_R;wo2il@G0H>Z?VmzON|*+$#XJ43xa zY;%`){I`{=$+z#$DW6y#WkWLiL?f6syW<_BA##bjXt@1LG<-k|%3Q6T95zQtPNujI zA>B|$O7GV19CjLZ@n~lOx}S7!&(MrcuHS4rPBOiM5Y zk{1teJ;CEveCy3J&{1xSFom5mW9Pe560vH;2=9&sdh6XiSLqSG^|^}cUFS%!=L>Sv z5n=C26m-BR8dZ+!!Zw;6-!*RVVy?H_yuZF7Iz{eE-RWu zH{oS>>KuWC;0|{vZ@pmF@T!)P?e-~at48b)mu2b+%=ap{^rCW0-#M)Rsq;#&R+8-- zQ(a;Q=NF5u1rTN&su_`?zXMbaw7(TdpT{Ojf;Flg@V=b0* zYe3MuobU9j7uZ9C)v+j1x*#aDhzVGr3`57ZN32QF(BU`upiVnSd!wOZAl+$*g0XT+ z%ZDv~f!5n2;vOH=$I#Z*a<$EoWwFB{QG>aZwR6vyOLBg@Q`WX4#r$^lcCTJ0X39&( zcGQ@v6dP7zel#ET!hsi~nybbxVYkF&?f(7IyX&(T>(kTC`Ku;`{WqN>-01OpPU46o z1;2y!PLZGjlNd`rG5T3+N8bCCx?6W9b;`1j3+7Q7#E*F44avs{L|dpX|yav-xfjiB;yl5Znz6)k%*QT;954 zx|}qF?~R$=H)1tqYH&t*SrgznT$|NAvSZ%Lk{Y9;u%uwR`P3RaF&Okm_b{h(a;VIN zpwwONP=l3Ki|kjMpip-%KRjpUf#-6!_JXE0mPjsm&u;1{?Gof{wt=uu<&1P+uKFi= z-SnMxnDX49=|w9fjQ*^T+=|^uDy0f=Oxnn5y&Mc{5_6&`bWm%V)B6xHcZ*>I5}&Dh z*WzG+>$xjh><<|YIUy?JJu$*TmqF5cYEn+M`aQsouOs@c4Tjy@Z(@2j_84JJgCX2) zea2)`CUi%H1pVw6(twSf*yi^$X4_3vqt>D~)I%BZDOx+Vkb}Du#G?82(z7F>i1vG$ z^^UEax=k7hW9o{h_}9tar#KePoS%0KD}@?jJ^1L(;VhF~W=LOIrkV{vw8tOc^|nWh zuJ&Rgo?fUyUE~N1$I?_*juq2GUS3#6KeCT^yNH;U^5Wevi;HEKs1}!^OS0Cmp?63` z(f+h3oIhf${pn9{!D6hn7k5$Py!CR3FIcl?6a_mK+9;IXg5{9x&M?K0$hnbqa^Q)} zrS|EZwTY`sW;wg)ok~#_zT~NfO0jLV>CJk(q5c%^SEfqz`ct?ld-!{U(hY_cogBpW zg|@f$!FHO*1jv-l8050-d(C1>X{-TV(O755Jv3wMC^q%$WnUDVhteG+Z}Tvzqg#tJ zrS>~9E@rH<96H9E;&R!6^*e6}9a*URXx?syqEMyA22*7fG(ah3?%q8`4J1q(bDgkV z^3fW?lt&wl*CH(EZP24S!gBd)Gu`trsxk5l8zAQD?wd^S!W8>KJ#E8+MbD^lW0aoR zp;8QryttLd5tbYZnby|Y*s(<)H-@TVx#ICfBa*_)WMTE^?(ix(0Xu<= z1zW|Va}^V^PX`5=SpGYS(>ucA9(UR}B)DV?)O9I4PorjqfzBsZ1q1SVeQCX&#r2Lj zN=b60tXEk%1p9YQp_W;AX)(FJfF(WE%zvE!)sJ5EE^XW28}7mT7sWiAoPg zU=>4JG!D|?kjiyDY;qx5sQMg=3S`Yp3@ad;HmLuUH=B+LyciJK)t#t9;%Zr+L4GO^ zFkV&V)pJ9Cmzc|Kqf61z&Vo|e_vLdt-{(a)gbS-RZZI}bQ4JqyEFAB$LB*b5rW(h} zupp&9#tW+)*1=^jr+M5$wn8*~Uw*N52UXv8@P2pnt#=+ie(?C89XiVarENQbD8@=u zv~dv)`%A9$;b=?YEQrY-AnFpA<2G-jMD*??qW(Tsj`7hz=|^L&iMp#t=d#Ssr(Q8O zK@Vn);qQKXv5$ul4yUZFw&=yQA698Wjf=6DF2>4Ndc=5bwgDG< zAE0dQ9q)6ox|GX1=6&Mw`pZDarCJ%3VEX4VCRn1Gjfvu3Tu*?fiwfgs1G z6GCCQ%&B&wD9h|P`VW`cOsK(n8ecI)=MIPY5rD~?ughUQWUmgsOS#nC%{%U1H0(GV zh(43;8+@gbw+zBawpEidC9(!`oyop!P6~(mRwIxsOtnvz?e)F`y%>@6*7{N{9x!Zk zC_1#cWuP0>&XOP=K7B2RD(mFAS3oAJsCr6?s~UHQupzRCye#M)0@5vpU297Cw4)WP-`dO`b=JG=8GSB{WhSG}zX z4klwlSct*O)$a)s2f1~8AV(|erLVQT?`@y#l0g|FtL_51^^;6+$saNAC6hbepn{d+ z3B~Z&XVIaJCu2AT#Z`?XeR!4V!$wjY4HUNwhe1$UIXHWT^+0{Z8dhI6Qqyd*#)D!^ z7OZ^?69u=Cs(L4_=nCgcHuMcj&RX}iy?@q9n0CHAiy0_d+O#3;Xj$#$;Ac-Uf|}Hk zZR>_V615F?{pJv+X9d@viR2g!R$MgJe=s#xh3s@IJ5R#RZ-0-^uII?pVMo>rJWc zATLF2iza_n*m+uQ4c#Fx&pTl)#GuQ|vyP>zxLnp*ec@EP<4xWQIw8_TP2?lm_))#k zQW|Ty1Bk}Ds7P_sX&|_yvj*Ocpfa1ZdhTuzT_G>;e)kub%bpwlgetlsQx7C%O~dsnr%r)Uk=;_unTz3;t8G@(#qJBiDD(P2wvkGt#k zSt@O8ncfNqqvXv3d!Etb8NADW)B6;yA%3TL8}`mo20%NtkQM2K7ZnxJ zT4`v8GJMf?eYQTo`FwM|UR~e5SoL bQ4q;oXUSZ^tX`)Wnypd0~BuRcbQ0bt^T| zY$0S#vfMzo(4+G~Jt`|dZ0;DYlGf$Hct;UW*@dp`9V~aK?E;7F=76&62NlWMh?_=5 zM8Esw)FEobW!}0ayi5*HZXKG6QeEu1ICa#CQ76`!=vyE1&^Yyx6?efQ`cWG8bDKcM z=;!eG#Lq(G2GNSF%?8;!Lu4yl!iN4<$je=1ul~!$Xyr@oELE1;b(j&OB`@#h3K5r! zO8CI(LleHVCKXG7!noBT3_1UhbY~uZFtIM zk*_nIkauEC2#=(Odn+-StVv^+8X`y3BpVL+uhhhf;w5V0 zkJQ7OD5Xw}X(cP(hJ@-}PjzVB>7CHJ51jS5_HaYx26T&wJ}vsd+r@=_ATG;Qry|kz zycJEoZP!}3*U^fLu(a1!G>9G9isA26)6}9^M%l19Q;Lq{jP?0SsmYrhCU=4gi)rDy%{kofG@;UM zN-f?%EvaVuvfCOfrEpTpsgQNP6^k|nZ^K2*CtBAFF8uCp7gTydeEe*pK!r6G4RUAB z$}T3mbePmZt9kz6&B}l7R0&)k0gB%6X+^>k zwR#%rU6h(P$#}w2xbTX} z_Eo!g;I*0<3rKl{KjrRyM5!ScollmE3=qZU!-swHp7!%{Z&hSRs=JrcDV;AeV!_GF zU0lO{2#Jb2FRLNW$%?Cv40UKoFTYis*P$Ur9g5+|jk1|t_b|~b@)5&cLP|4zW4h62 zMA7afd8h~FJx%xtKLRI^YR!s z{NRqPRo6y`C?l%be~QceXO_YJXJK$t8S_Cji<)?=-Qk+-hFvTMS*mKcuUw|tCCYX! z2MHplol8I04=JT4+KELu-^U%((zy9<*{sWYW|k#)SpgUMt!+qT1$M9IF+-&tIw)f> zsQ0wC?IWLdLHm1`zUvQ{ooqL-N`ELdAKb%h8Dyt}6y^FLK?Y&PG;=nnF>m(y_zfn! zyQ!TgV%P(;?cBNyw8{3i2<;u7;6q4Eo;;<4v^N$xvm8MC8yi%k-8=VcYn!UKx2;;8 z_L=SZcyL-Oy`vc`l#628F|}0|Ma_}jIgWCP)*b`-T`kr#?`kb38_~o^8`^Pl%O+XO zPX7hXSdHSIyfu2+XOufVEtZ)$@KZy^$*y*0#QoD*UheE=_qyCNFH^=RZV9@)2>q2% z`nuj`jPcRhX~-X?;)d_i;5S+-seii_xd{O&wGu&k=1JTdhgto9Ta&`?y?~C zkF0F0JM!(nT&YBl(sKEjO+bk*$Xi#%-Yy8XJs(1M&+~8hnEvJow^GgEQ}%%xmt9`g zoB@Xu+Oa(Ax}_>CS4z_ArS#~aO@00J-O5*42rQ}niZ`Jx}iA2JKFEz(RZ}zpNuCYCbWLtzB*?7@26v7#F3d2e$*|p1}iLrDrr5E-rJ% z@ydyU)+5HDkgV`)?1E11@Y3x5;_2}^WU0>F>%|X-K+gg_x%TIa48)&h- z^I9YDr#xbC&T6gL>$PI|m{|^NW1%$dGxyw3GKoceWO%D+i>bHs-38FF$D3p zaKr5}_AaG@nvspXOii$+b>qIcxX8IByPJz}qSwc!XDO0@>R#>fteOQ@y#Q!5o1 zRGmr%>V+Ook9wwcqLV8#O?GkC$!;7oii105AT^t`CnhRoo$|##-=*?46-qBz%lC(I z$6z?BXI#1ct6x?+v+}sfA&j#N%9l}}D||vO;}uF)*rhw{Z=aZ_Srf?3ctp5Mwl=Bz z2~H~%iRi^bjV`A()c|=dV|Ofzn;gr%rf9gN|%ub*9hdGz#fyE$JUx)9)#r-z&K!{f`#_4(=Q z;&OE8{yDz6xw`)J_2yJMk{qWzWuiy6g%k|ajv-S3J zebuSrq);k&wZ6H%IzPN#Z(lw;JKmhHF0U?Lx%NFfxxKn_@t;;NwihR_pM3nRZP$}e z%EmlB{N$6mS*{KL^;gt}>*wd^od!&^Mb(0Kc2!;cgK})#iA})_Nt1mXk7u(J4c602Ce7QM4-mY8^9BznK6XBVgIt^3!{J2jlu6sq()o*uf@xa$1pA0Kb8-CxH~t0uVT z|5-bI1}cy(>oxW_-Lr4E>%Ub!{bv_$)}CCPow?Oq)#>TsoeFg;+^tttr{+$jrXP=v zJ{rB(nNN@3{p?r%p9e?Z9(}7}*yFdp_tw#GkG^xYKD%%ob9uFXxxP91%wg)Sql3}= zZaJO0330hSJ}KGuR!P4KgEhSz;U14hzU7Uw>0v$;6Ge*9iY z-*Fy6-{u4h`a<8mcDR~T&=>l4@DF`k?VC-e5%g_7UqWB#3w_bPr4&Z{CeXglV=98a zjnxtKg}%@i`a<7CClC2Ji=b~~zXN@tFZ6}J&^Hl%T}G1#`ZiV~&=>kbU+4>c6VZ2G z#MnGECUVdh`a)mm3w;yO*CkVo(7ugV0rZ8w&=>kb-&FLS%p&O9I9xzq=nH+JFZ4}C z-+UZF-^Se)`a)mm3w@#QJ?Q)IJHBq&WG?9Yd$q6ILF-Gi@-h0rFZcz&;1~R+!f!4O zeii)U?S`-MoA`6%?S`+haCZKhd%FedH!ot;FYa#O7yJ_Hflv?k5A_h>HHeN>I7q2(eFX|Wdi~3Ege#eU#!?PT(H}DI7 z!7unti{Irm#_b}<>ka&ZU+@cl)8cnt$XM&r@*&6T4g7*%@C$y^;y0VhaK8$E@p=Ql z;1~RY-^BPGN2y=D-oP*T1;5}oEq=%2s84Try@6lw3x2_GV*DCfs^~*Bkf+zu*`ACdTi077=i=!0QeC zf?x0pe$(PNkIHsY;PnQ6!7um)ze({sA1|WLe(`z(zu*`Ag5R|Goo3UR!w0ka&ZU+@cllj3)lEuz#fUT@$R{DNQb zn-;(0IQ5Iy8~6pk;1~QR#qWHYO=1oo@OlHk;1~RY-=z4RWqFL-#RRW6@C$yyFZfN0 zU-#)P>g*Snvzwj6S!r%Mz_ur33 z@74T$dp3@ezf;`az%Td(zu-41e&^#nCfm>yw>R($e!(yJO^V;ycoFmMZHn6)_yxb< z7yRBIzjqFQW1>Dx@q2^6@E88V-~017IQ(74WP6+9_Xd8!FZcz&Y4Mv+qtq{cZ{QdF zf?x2P7Qa~*g@S7IDSya%68E$Xj7yN=>@S7ID<8jQV zw;674;1~RYU+|k2zu7V- zI-cS627bXW_yxad@taMfEFbWC1Ha%G{DR-a_|0R|4bAX+1Ha%G{DR-a_+3ODKH&8R ze!(yJ1;0u0>ps0j9X{ao27bXW_yxa7@jDyOV`4tc@p=Ql;1~RY-?aG6vKYJHIbLtz z7yN=>@S7ID<3-Hj!yKUfUV8~6pk;1~R+#&4AR#p@0Hf?x0p zeiP$&85446j@KLb1;5}I{3gclG{)^>j@KLb1;5}I{HDe4cpMXQXO7n!_yxb<7yPEh zZ$61S`^D=G{DNQb3x3n$cacS%{o?foe!(yJ1;1(Wn@wY4J}mHh1Ha%G{DR-4_?>5Y zKKfwv;HL-guFqbqPfs`JuMU21^zbLQHwOppUseme-oP*T1;5}oDSl_;IQ5Iy8~6pk z;1~R+#c#G)#;9Ms-oP*T1;5}oDSqdX>KCs!@C$yyFZfN0Uw8NrrGD{x1Ha%G{DR-K z_|38yw~GZ{Z{QdFf?x2P6u&OpMU?u*>ka&ZU+@cl6XSOg6K;Kh*Bkf+zu*`ACdKb; zQN+aiUEuWwe!(yJ1;0u0J0IsUPRC2U-oP*T1;5}oDSl_!IL7ICiPszW1;5}I{N5kG zzZ{L;A3glh>Gi>*tSBZiUdKz^-rz6%g}?AOIe%kJ&zAVT!C&|bf8lRp{!X$e{fpxp z{Dr^p7yc&ZZ!w9nepur927lo%{Dr@%`8yp)#NAoq`UZdDFZ_kSsrfsJNw~Sh_YMBS zU-%1u6Z3a6j`2NS;`|1G;V=A!zxU_wzaNd>tL@L;o{i)1i}M@!1;5}I{N5kG&ir?W z-%N&n_^8e~^x-b&P&UT<4gSJk_zQni^H*A*Y5c|g4gSJk_zQni^H*A*Y5c|g4gSJk z_zQni^H&Dn(fEt|8~la8@E883=CAZV*7%G28~la8@E883=C8Cq)A)<~8~la8@E86j z=5HQ%{)_t?{Dr^p7yhQ^Z`}DW?r-oH{=#4Qo0`9K5&c0sf580>{=#4Q3x8AdH}3ow z|2Oyxf8j6uP0ZgS%Kt6H{|)}aU-%1u6Z2PzKB=5PWca_qU-%1u;csI8&SdmojlcN6 z!C&|bf8lRx{>tpf8h`PBgTL?>{=(nX{Ehnlmf`;ff8j6ug};gUyNKg2{%`OX{=#4Q zo0z}LsQ3>V{%`OX{=#4Qo0h-XIF7&gzrkPl3xDBnV*X}P@z*o_-{3F&g}?AOF@K9V z`!oFC;4l1zzwkFTf1~{0a{S-mFZ_kS@HaJoqs||4{NLa&{Dr^pH#L8w;!o!IzrkPl z3xDBnV*bwJ&L8l9gTL?>{=(nX{Ea(*!2b>Y!e96ce^c`}D*sT9{~P>;zwj6SrsnS~ zCjUi_{~P>;zwj6SCg$%V?)(A&H~0&G;V=A6&EL552mIgQFZ_kS@HaJoqvAj0_`ktl z_zQpGZ({x~qx_Ex{NLa&{Dr^pH!XkjxcEB-{%`OX{=#4Qo0`8-=f4I1Z}1oX!e98C zn7?uP=L`Jb;4l1zzwkFTf8+Eo{%`OX{=#4Qo0`8-@plUR-{3F&g}?AOHGiZ0-wOQS z;4l1zzwkFTe-|J7yiOu_?w!)QSm1y_`ktl_zQpGZ(9Bq(>VQ${~P>;zwj6SCgtyZJdXSRhW{J< zg}?9@{-)+{-1!6kZ}1oX!e98Cn!i#0#}oYD;4l1zzwkFPf3qn6w+a4l@E88VU-+Av zzftGEQ~clHFZ_kS@HaJoqx>(X_`ktl_zQpGZ)*NVeSe$c{|0~IFZ_kSsreffe|?Jo z8~la8@E883=I=Zv{`wUEH~0&G;V=A6&EF{dvnl>>@E88VU-+Avzft)wrue_XU-%1u z;csI8=27;)Q~clHFZ_kS@Ha7kCvo-<_`ktl_zQpGZ)*O=*+1a_27lo%{Dr@%`5X29 zZHE6F{Dr^p7yhQ^Z`Aj<8UAnZ7yiOu_?wu&)2R3lGyLD+FZ_kS@HaJoqw-(O@PC89 z@E88V-_-n#`u;Y<{|)}aU-%1u6Z3Z#r+@K(gTL?>{=(nX{EgGU_`ktl_zQpGZ({z= zUk;GyLD+FZ_kS@HaJoqrSh*@qdHA z@E88V-_-n#ioY|*{|)}aU-%1uQ}Z|K``aAzCU-%1u;csgGM#VpyJkL zwtrb=bNt`nFZ_kS@HaJoqw-%Y@PC89@E88V-_-n#ivO^{{|)}aU-%1uQ}Z_}{=)+Q zH~0&G;V=A6&EKf_4-5R?;4l1zzwkFTf1~Wr7WluxU-%1u;csI8=5gl__`ktl_zQpG zZ)*NV#Xno%{|0~IFZ_kSiTPW^@fZI$_zQpGFZ@l+-$|VPFaB@v7yiOu_?w!)arVFX zzrkPl3xDBnYW_xje_P`J27lo%{Dr@%`5R^byTtzu{=#4Q3x8AdH|qP_68|^&3xDA+ z{7uc@sQhnB{NLa&{Dr^pH#L8w^1m(de}lj97yiQE#QdE`#b00I{|0~IFZ_kSsreff ze|?Gn8~la8@E883=5Lh$#S;HF_zQpGFZ@l--zfi!CH`;l7yiOu_?w!)asC(hzrkPl z3xDBnYW~LfU*u!_-{3F&g}?AOHGgCLFY+<|Z}1oX!e98Cn!hpr7x@_fH~0&G;V=A6 z%ir-Nj=%W7!C&|bf8lRp{zmy<WC4gSJk_zQni^Eb}^4F5Oy3xDA+{7uc@IQs|u-{3F& zg}?AOHGh|*4@M7udhqW0?8W-@baVdd;G@yQpWNOYeAxbFm1p?B!C&|bf8lRp{?20J z|K=I~Z}1oX!e98Cn!hpefAb9gH~0&G;V=A6%HMf5kJG>SzrkPl3xDBnV*W0o?9Vd% z-{3F&g}?AOHGiY*&ocbq;4l1zzwkFHe`oV)-1#s5Z}1oX!e98Cn!j=95BR^qU-%1u z;csgGM)_Z4_`ktl_zQpGZ)*O=`Cs7w27lo%{Dr@X`5Whdk>mdcf8j6ug};zwj6Srsi*y{X>rb8~la8@E883=5Lh$MUMX){Dr^p z7yhQ^ZKjx{~P>;zwj6SCgyKk z{K*{uH~0&G;V=A6%HR2PoJXBM;Qt1H;V=A!zp42f<$qD&{|0~IFZ_kSsreh_e^KE7 z27lo%{Dr@%`5WbbQQ-duf8j6ug};gUn?>b+EAW4Vzwj6S!r#>Vjmm#f;Qt1H;V=A! zzp42fW&cp%{|0~IFZ_kSsregc|A7A+{Dr^p7yc&Y?`$!Sioa9f{|0~IFZ_kSsreff zf2Y9z4gSJk_zQni^EWE~PJ#a${Dr^p7yhQ^Z&du93I1>J7yiOu_?wu&arVCx{NLa& z{Dr^pH#L8w?0+ZtzrkPl3xDBnYW_yq|4#6KgTL?>{=(nX{Ef2zo#6ilf8j6ug}fy9|2Oyxf8j6uP0imZ|BDI!Z}1oX!e98Cn!j=Ozxcnw zU-%1u;csI8=5gQO@PC89@E88V-_-n#@;{#9{|0~IFZ_kSiTPVZ`Cm-&e}lj97yiQE z#QdE^fojou$U{L$(4!J}+E9>*O(-~b1J z0Wbgtz{vp|WdS_H0}cQKU;qq&lLI*J2m%*401SWuFaS;s;4IDt4IelF41fVJ08S3z zs2~tCoZtX300zJSI5~jhKEmMz2Y>-E00zLx0UTupJjV?V00UqE41kjZILZoQjvpKV z2EYIq04E1qfsfB`T7P7UBZ$_iqRCma9H~~`v`|88~_Hu02lx#2XIt4 z$|as~02lxRU;vyPz;RX(c)|f-01SWuaB=|0SwY|l2Y>-E00zLx0UTomQH=3~1Hb?n z00ZFU0FDVqS&Z?71Hb?n00ZFE0G`DJ<0-~?!U13a41fV}asbC2LEs4ofB`T72EfSy z91{Ioj~9S2Y=x&{Dr@%`5WViT#WI9 zgTL?>{=(nX{EhKLF2=aQ!C&|bf8p=_`TG~6(fjrJ!=tQl|BOBuJ^1OtyX&(T>(kTC z`KyDEMh|~-dvow%`ixWNHn z01SWuaB=`g83AXw!2w_Z41fV}asWs9A!oS30bl?OfB|rF054)b!ezL@0bl?OfB|rF z07tpOWw^lsU;qq&0dQ&nPon(bGW_5GFaQR?05~~-qa5Kf9N_>k00zJSI5mKyQX&@_ zo^Svd00UqEoE*SWuE-glZ~zzp17HB09Kcbo$Qhn+02lxRU;vyPz)@BZIi7F;7ytuc z0Gt}Yv#5`7Ii7F;7ytuc0Gu4aQD?w8o^Svd00UqEoE*SWXTUk0Z~zzp17HB09Kcb* zcyc`905AXszyLTofTM!(~Geg4@4gdpS z01SXr19%=4j3>tv4gdpS01SYW131nO7*99=41fVJ08S3zsGM*Go^Svd00UqEoE*SW zW@rVTZ~zzp17HAre*ph-G~vVkb@g9E?-7ytv{RzyKHk1K{KUjtWLO!4D1q z17H9QfKvlFi*kgU;0OnR0Wbgtz{vp|cLaea8~_Hu02lx#2XNdG1fFmJ7ytuc0Gu4a zQTZ??c)|f-01SWuaB2YOac97I!U13a41fV}asWrUB2Vyy1Hb?n00ZFU0FKIsF~JiK z00UqE41kjZIL-k00zJSI5~i$5@O77gag0;7ytv{)Bv7ECB&HH2nT=xFaQR? z$pIW?2Rz3U4gdpS01SYW131bKc#bC=00zJS7yu^+a8yR3Ii7F;7ytuc0Gu4aQ5lKm zc)|f-01SWuaB=`gdBV-{gag0;7ytv{-E00zLx0UYHCH^&nW00UqE z41kjZILZ@ljwc)d2EYIq0N)?LorD-!HjeuSha(*Pg}?9@{-)+{)CuqcKREacf8j6u zP0in^1KVjS7Rdzy%Ke!e96ce^c`}Dhk>H4>f8j6uP0in^AZQC5;NUO( zg}?AOHGiZ0FBbT}!C&|bf8lRx{zipBTj2i&f8j6ug};zwj6Srsi*y|HTsjH~0&G;V=A6%-=l9|6+;%8~la8@E883=5Lh$#S;HF z_zQpGFZ@l--zfi!CH`;l7yiOu_?w!)QT`W8{NLa&{Dr^pH#L8w{4bXHzrkPl3xDBn zYW_y~Uo7!|gTL?>{=(nX{EhR!!2b>Y!e96ce^c`}#{Xh6#{Uie!e96ce^c`}CjZc6 zjQ<<_g}?9@{-)+{jQ`tYjQ<<_g}?9@{@$Oze=!=pKYIA1)9ZsrS+U4s41gzN9N+*j z00zJSI5~i0EPy9tJm3H@00zJS`2GO?nfB)do$#!${`ryOS!=K#V z9K3&We0{U>mcDP7|8?}8(R&w{tE-Fcdi9IrtH+Oy{_Ne+`|d9%$J_1o&GE_W$B%ya z=MN4K|9G=qA09pb?BZf7Qc3xVXJpo!)$Tx&FZ)9lC#iR{i^b_VMV4|J|Khoo=pQXYKf3-@Z88+^k-% z*Qb-!&o<|;E{`{>FE+;)+s*BEbA0}4^>TB5yj^WC+-!P&=_b?htM&7ad)J*?{d=?`9RFOF}D{0FY` z|KitN`T4Y}`)aj0cfGQz>VCc6zI?Vmd*Nzc{i|=-llH%^pVq%r&2Y8;r+%&7Tphbr zakaWQFIPw1iJh*>v+8LlSL@@O^{VR3>WR0Pr|loyQ-56c@{@@s9WJ~ zy{bAjcfNG6d3qSs^yAUdhZUI49XPH}E-u%Pzwya~=SSZjeXHTu*{{)uy=m5dU1Sn^4aQo z^N(HEZ(grkZ2lGVGRf2b1DnPfHNpJ4ss8voqes<5d;Dj?@Z`WJF2W}TQIo?U-Z`S|qcA#(HUawa_zjYgg+Ze7MfAde?I(j@Bm47^Yy*)iY zul^!5^?%PE@SE5J{>wWsP4M2*w+G}|&CzTmH!pj=$JkJJOf`S4!&t{Z6I6 zdcxO2F(CZ)XBVHZPPVV_Qoa(@FSoaw(`R){pVbX|@(;^B;pyQ{iaW5EEpiz5ghzVc zQMz9<={*{~UCsyx=Lbjs!)Wwj3E?{+4G$u`%UjPu#J>ibzX@pmTx;^422-udzI7e6{bU4L=#+t=%#->%P3 z)~m~li*0jBcJK$apjCezfAsC(CV6)8-+_Iy7|N6FPk(TId&a47 zc^drnJ99V~z1vRW>fGV2w~oF$dZ(Nm<=k*5$q)aN|MbV@S@Pz3b+f*{sb1s%=TgKz z{M&!;$IT%@qgO{Cl{nUk>JAVd{DXtXt5<7xa_^4e%SCjx8h`ZgaSVu9r|W-etU6yT{E@wwWc{FI4Vz&$~p2!ht0p0M+N`mor-?- zM+Mr~w_lvyrLEqcCyvYRTUK+j(0tWeHO=9$_iG7WN7QBlR76#@Yy(wG%hs3lX<2`Y zfR?SSG4f~g=hvH?^&d8$+ppbs_tVqu`gwJ#_v`--hn8*4TPA4P_10|!SU8+J!r48ytWVj1mPN~=Wzn)|+52i) zk7QNL&d{=GS+p!#7A-5ZY?Hml(z5j>eOlJ1zd_5QWzn)|S+wkZwX8?7s%7VBS+p!# z7A=dG6DeMz5|Z3-a1Y@H#Em|UIE#KbWmjsf&s@t6Fw} zmPN~=Wzn)|S)pZ@!&1)ycovQ?r)VsiO-=_I77g1F-QwWpT#NLID%5-p3C zMa!aPzwTQ0{~nEgw@fs1u*>_EjTt@K@^0U;59a{(6p_Mb2(8 zGr-J1Gc)*KdQyM=eo5pWq#n35jsM`_yLWPbWhDRlOO=m?gb5@}_|!#K-@bh2Gon9f z|BHkPUtz)oP0;#OGnSzB>A!jeE&0DLH_x}57YN!qQ!MARoX>7o729i=aX#zLXZI4c zp31tQB?B0OmTb2Ff)iR!Xypm5&o^TUTAu@~N6?Z43_-g-%;)0?g7&MW z2cF8hpd}X=f)+uGpheIsHncwTj3sD&Hn1K+OFA$FErJ$7yS=zs)iwslTTBj)Tq4HJ z_3CDQee<{sZ?M-1t*5dsXh{f$pheIkXc4rEp!MlzEJ5p2g7pYma)Kdf5wr+e1nu_b z%ggoSZ+vp^6IxGYUC@#h3_**aMbIK>6+!EB&{%@j=LPE#v?K;Y&?0CNvMgkJKJP zOQtXcErJ$7i=b5mtsfsfsfsLJA|g{*b4_2SFkb|j!pIaxrZ6&vktyunnZi6$^*FQznZgjX2wDU!f>sf< z5t+gYJ5yNmGnvB36h@{nGKG;T?Eaa;JW_iEEt$d)v69`&Q zWnIvcDGWi2pheIkXca;0bJAFX*3J}GkSPqOFr3103d1QZl~b5UYLCeQnZgjX2wDU! zf>sf5wwb+^*L!QL2G9UE65avpheIkXc4r@1g)pC zE@;UVhM+~zB4`n`ilFs5X)Hl&X9_FG6o#Nh&?0CNw8;dmr?M_+$rOg5MbIK>5wwb+ z^*L!QL2G9UE65avpheIkXc4r@1g)pCE@;UVhM+~zB4`n`QqWE!GKDqQ^koY3MUW|s zOkrdSBU2ce!tR|Z%p+BgaxfuN7=ji-i=ai&DuOm5Q`p4L6xRGq99rVg5{H&Jw8Wvk ze;itm)E+@grZ5C8f)+uGpj8B|&q-sL98By?Va?A7S_CbE7D0=kO(1AJl~t3237NtW zvoX~PY%Ly$fw4Bf;IHC1a)&(t@!Vt6wS_CbERuQy5CyjMNYiA0ZkSUBP z2Shm_$^lUhQbjrNNbRwqB~uuJ7D0=kMbIjO*5{By*v}6iH&?0CNvsf7}^%Gh$g&}AWvm>ghofXM+S2bdfrFgfs4 z)&(t@!Vt6wS_CbERuQy5CyjMNYiA0Zk|_+6156GuIl$x~naP2tvMy-J6o#Nh&?0CN zw2GkhIcY3GYiA0Zk|_*9i=ai&B50EdT2EzN(2^+(L5rY8&?0CRLF;qUSc2Bh6gDMO z7=ji-i=ai&CKI%t%DSK>Qy794L5rY8&?DXh7sFH@K=f+z<>IUvdbQ4WZ5aPKGw9;tfRt{Iua5VQzd z1TBJA5wsDR!e(}+u;yo?91!JzC5wwb+jmQ)>vonP?KO<-nvxx&~ie{32m|yT2EzN(2^+(L5rY8&?0CRLF;qUSc2Bh6gDGM z7=ji-i=ai&CKI%t%DSK>Qy794L5rY8&?Kw8Wt$4lQwL?;nTOBeh4+k|_*9i=ai&B4`ys>vPf=CI@pnQ&{sef)+uGpheIk zXcGupPi58QU{0nm1TBIVL5rYO1g+0WV+mS2Q`o#I0+Rzw4lp^ulL=Z+W&MPfOkoIG1TBIVL8}N_pOeNCw05SjIhn!`v%5wr+e1g#=y zeNGxn(At^8=41*(&?0CNv z*e=3$5w?r4UH1;#<&mm8g)PVwhM+~zB4`n`ilB|i6t=K4g*87Dwu`V`gzX}17h${Z zAGXUQwMWpBDGWi2pheIkXca;0bJ7?l2Maq>So1T27D0=kMbIK>69`&QW!2^2 zsjLfHGKC>%5wr+e1g#=yeNGxn(At^87Gw%T&?0CNv5wwb+^*L!QL2G9UTaYOX zL5rY8&?0D)30hBOUC@##3_**aMbIK>rJ!9#WD0Ap>B|)6iy+DYQ4WZ5K$HWb9Nas~ zfk&zywrfeIFa#}v7D0=kRRnEBrm&@*DXjUKC5wwb+^*L!QL2G9UTaqaZ zL5rY8&?0D)30hBOUC@##3_**aMbIK>6+!EB(pZAl&J?yJQy794L5rY8&?XbKp31tQ zB~uuJ7D0=k{n`uKUyerK8$I~@2OqA_UaU_~H|MWb+1RHF`)i|jeti7JkIqlmUmX1I z_4?+_TK>hj`Z+g_9%{0F0lKe@f>`<=*LMD8MT7m>S&+@(kE`d>$*4@VFG_TT&Q z!TTr2*Eg#ssmqX+-s;9&J?eZIarzFD8HesO$NBKf0-C%-s5`p)RR zi_6v3#dcl({qdutfA#L@efO7>E9eSW&SxEvk2 ze~xc%uC70Q{rU0LtLta4);CX%s=v6q{?5hu`s0sBKm51p>$g4ra(%V>Y`wi)Uv;WD zDU=Fct#59x&JVBG+n3MIjyLD4%d3l5u6@r=Zm+J^=Qpbt+l!OePd?oqYm4CAa8Rg0=c)oQBNr>8MhtS`vq$YEiYQT2!s3YJH9xQ`P#^VSTE$DFjuEszueJYEiWbRILYdkE$hm z7^)Uki>gJ{YO2MJ*&mY#OYDplrk5SsM-RSGjemzEC`Mfc#3(T@B*!M?d^GcWU-k8>MEGd2RVydNrM|Z>q|@xcJ<$;pO&rbNZ|ro9kym3!ZGw zFK=%?b=K%l4=Y3Sn=dcdpZ?(b_Us4Fq7JZr{lI!Kdbgd#j~{&hE%(pScSrA(lcSs) zk5kyydNB8>T2hFiYEiYo_v;7WZ;!rpd%a#cuY$+l{FAqiRBth#t;STfK8aYLswImU zs+I@{L_i<{LONCJ!Q7*2$s>lUMb)BeQMKAxtxs8Fs#>2(tWVXFN(@ztszueJYSXD& z59S_KOENK3Evgn(i>lRBt2;tWVXFPYhLyszueJYSXD&59S_KOGYtNEvgn( zi>lRBtxsNKs#>2@tWVXFR18&%szueJYSXD&59S_KOIk5hEvgn(i>lRBt2| ztWVXFSqxQ+szueJYSXD&59S_KOKve#Evgn(i>g&rZ5ESTthuN+x0o-4$X-PDBC;2e zy@>2}|HxjRsXe*HGIEQdYEiYQT2!s3YJCbD>#Q~#$Sv0VPR3j^=GJkux(Szw?6tXB zyiB-NM4o7ESagJ{YO2ghhYCV{HR4uv1P_?L9R4uAjQMGwYZn5T~-rQoo5aQJmuaK z#gci8d8YQ+)skBbRg0=c)uL)ORqIpOn5uRlw^&YYF;p$87FCO?O{Z!-n0wA@$t{Me zMb)BeQMH%rWkYRN5zszueJYEiYCs`V*sOjSFOTP!EH7^)Uki>gJ{ zrc<>Z%sr}>++wI&R4u9&Rja95pTfpewF9}ua&n8IYEiYQT2yU1RqMgrqiV@5hN?x? zqH0mKimEMQa*H(=_2w4yg^*i}++ySwBMSrZYVRM}%QLkn20}q@F;p$87FCO?)l{ud zVPl=u76ZA(n%{|6OT1d*)e^6kc(n=Q)q1A(sakT2p=wdJs9IF5rfPi(8&lN|kE0!K_;#6yz2|)uL)qwWwN6)%p}Rrm7vtEmkyzplVUIs9IDl zsy2bD^gJ{qH5ErS`X%)vs!YCp=wdJs9IF5rfPi(8&lN|!wWwNDt)^;y3L8_^4&)Xq$SsDdMb)BeQMKt*tp{_DswKA=suop?szuc*s&*2S zTdcXLH@BECgvee*_9C(uk-doQb^pj-o~b?FViR(Up=wdJs9IF5rfOqyi%kY{i#5L! z*^9_tMD`-G7m>XZME3Gb?Nhbn7DLsdYEiYQT20mZ6gHLx!ek(~So1ro7FCO?Mb)Be zQ>a=GX59i|LT)isEvgn(i>lRBtxsWNs@j3vViR(UVONV?Eq1lo)h4s6^-S$Mt0lJ> zsuop?szudms@A8lF;(qAZm~&I2zIsD)nZqRT`hLC3G8Y;n0wA@$t{MeMb)BeQMHlRBtxsWNp4ATI7MqY; z3=0G-5U@bN0wJ9Rf(LVtswKA=suop?szudms@A8lF;(qAZm|iu#Za}VT2w8nHl3>V zVD3@1U@)`Pi6)skBbRg0=c)uL(@RXdHzE!JGrn_J8m zLJS0AAP@tA7zo5bxPJ@;&(xmCUQ=?5p=wdJs9IF5rfOqyi%kb|i#5L!1A!O_#6Tbh z0x=L0#6a*&?Nhbn7DLsdYEiYQT20mZ6gHLx!gL_FSo1ro7FCO?Mb)BeQ>a=GX59i| zN^UV!Evgn(i>lRBZA@;l=|FC==66&rsuop?szueNP_-V+J*t-6VyIeFEvgn(tEpO_ z!p1zS9mp*)ehtqn>K~u zErz!k-eP!*;VqWHTg-#G=d708VyIeFEvgn(tEpO_!p2m!1G&YfghhYCV{HR4uv1P_?L9 zR4uAjQ?)*Yjj3t}a*IvLErzN^)uL)qwdqu?2Xl|ACAS!=7FCO?Mb&Dm)~B#BRqa4- zu_?L5P_?L9R4uADovQU!wWwNDt)^;Ya*NFda*H*;6R(zdwZy9>UM=xz6U3|aOzl&( z5?CO3F!!iha*LsAQMIUARIR3JV{(hl26Brv zzjId0SuJO^oYitxo8qk2gSki5l3NT_i>gJ{qG~l&>r>d6XSD;l#b)FdBL)I75Qu?5 z41{Dc5Ij@+>}ts^hN?x?qH0mKnyU3FY)n-gJ{YO2dm=EL@Yko)7qH0mKs9IEQ3RUaDtXm+=$t{MeMb)BeQMH}s*A#jX~++GKXMo~eCjwd58<)uL)qwWwN6)%p}Rrm7vtEjDio!LAm&TI_1E ztHrK1fnBW!bI(~Vxy4Yms9IDls#a6AF}cO&1G&YT-?2cz0s#vIED*3jNMV8C!Q7*2 z$t{MeMb)BeQMHgJ{rc<>Z%sr}>++wI&R4u9&Rja95pTfpewF9}u=HwPb z)uL)qwW!*3s@8+KN7a&B3{{J&Mb)BeHC5|V*qEwzAh+0@++wI&R4u9&Rhv%LdNB8> zT5^k_YEiYQT2!r~Y8NrN#hQzHbBp;xh=D*11Y#f%1A!O__m6?#nc5TCYe8-?R4u9& zRg0?CRBcRdvBf}cvF3MTAP@tA7zo5bAO=E$7zm!JeX5q+VyIeFEvgn(tEpO_!p5>d zSPbMAYko)7qH0mKs9IEQ3RUaDtXm)~$SsDdMb)BeQMHK#qbtO<}K!#+Gl}4 zZZT9Xsuop?s?}7jPhn%K+JW3+i>45~#qbuxTMTb8yu}iDi+M2joYj(B3{{J&Mb)Be zHC5|V*qEwzAh+0p++v*7a#qV(EoZgq&T2iFdsHpC#Za}VT2w8nR#UY;g^j6d2Xc!o z$SsDdMb)BeQMKt*tp{_DswKA=suop?szudms@A8lF;(qAZm|Wq#Za}VT2w8nHl3>V zVD3@1K#qRGd=9$_P17S&SF;p$87FCO?)l_XvZn5P+ zZn5Tf;?)wbmUy+qt0i7-f_Sx_seP)J++wI&R4u9&Rja95pTfqnKv)js7HfV-)uL)q zwWwNDZ30F%T(Q4%>De)!;@c}9ero?-o@qW>SDWI{o?rQ@uQ=E z_3r3>_m`97?e_ZS_~iBDM?d`Y2Zx7$yxFb~kLvfXFK&OfIsW;kfz|5O$;o7qug>;) zF@Je=e764l;_CIY%P)_f9&R`1>%(k0o>a^K`0{dne!9B2938rUj&E+Ru0MVK`SI1O z>u0alH&2eLzqq^p&c*rq|!HeYN^*y}evtb*eZilnP$0Z*H&753kqT zm(R|QH|ML%tBY5zea}vAuddeTH>(%hi<8$+K7Q7=>&Yi&W1b#<@=4t+*M@)o6}92| z`T2RL0a-CyEUGq~T%6yaYEiYQT20kPgJ{qH0s9S`TLRtkxkH zReQa;S^wdsQ|`yF*4NK3k55mx>*p`Gx0}=FsM=5e$_ECANcv5SQxeJ3X~iUt1qLGZqrc z4-+u?AP$Kl8(TIQCn1R-#IZ>fh!k-0Ll6idg#1Ye3H}qvk3b+0gmdo4+tXd=J@?j5 zpPrU?p8c~u_uj5OXS?S;&w1~A-lzEL>juAnxPjm29j<~_PT|Y+J)Wjh@+Dnn!0 zk7_UbQ7!j$O5Z)0J-kz2-k&{;hvNt4t90uT7t=Abfoh>zjcP?EtUI-92BJ)Mbi7Wk zU;gAJ^H1Zg+AH(f%vS}%aZGjPYH^EEfk3Vns(t=~@9VXfi*3#Im)^b9_>o4nqOd8Y z+H$v8*u*XNVP4-o^F+e8HumQpUP0zN?9;Eu!fx>ja+$opPNu^oPM7%%f*QvVU@`~C zU)SFG{>I488wrC}H|Oq?nFEs#FtOoO=HS!}c1P3sdbMw;@lHLRQM`IQPxfyv*OQy1 z3k>Eye}VN`P^|=JN3IsP7*q?@LbcCd@ZE-LMPXA)wdHQHuvvTv#$1fK7;`b^V$40W zG50v8x^lI+#h_ZK7OI76wOs8MZn3ag?iQ2#R3K1+Kn22ir<=C z=2a1Q>Ki2y!xJSFyIDcTJ#${llwqVv`rQ?`Kzmy!0ez}++t8IR14KYwHnom!lsmJ%iUsO6So*t3)MokP^~|zmB8$vTHIn# zEmRBDLbV#zio&LpYRlbXVH39)R14KYwNR};s+GX(pjzBwP%Tso)k3uz)r!KVlxoY} zVqp`v7*q?@LbXt>KdP0$?4VlQVo)tq3)Mok3e^VNxW$TxoNh7s5PBfc1A!h0^gy5o z!r6NuNKAF~KnQS)LA6jVR14K=R4WRbvRrLY;T9|I(*uDX2=qXp2Lf&}4{kAusV=I; zEe6#>wNNcot5NMXZn2=kEmquzYN1-F7OI76eNe3gX1xL-z%2&VLbXsWRI5>~C~Qh| zwH0o$0Jj)*wba#8S4&;3x4K%1sjgftZZW79s)cHyT8(PAaf<~NZn5G%xmt3y~C~Qipw!$qI;1;6-feHjF5U4=#SAigb*+I3q#h_ZK7OI76HL4Yb zO)1q@xWxk8Vo)tq3)Mok{-{<0vx91Ji$S$eEmRBDYE&xc7K3V`TBsJP)u>h!HlEe6#>wNNco zt59vYja#gE$mtf758<>Jr^Pre#%VE5i=F+nn8Z{^XRi>q7*q?@LbXt>Mz!0x#li}= zSaF}uUUc@NvlpGc=Lk^)k3vU zEmW&fttf0tbF~$2v9S0M>T0R0rLLB`TIy;&)YVF0c2F&DF{l=*g=(Q%jcP?c7K3V`TBsJP)u>h!HlEe6#>wNNcot59vUja#gE$mtf751|JFJrL-DKo10ZAe_Aig2YtE zX|V{m7*q?@LbXt>Mz!0x#i9zgSaF|zwe+i{UoHJ==~wHaU#-Mc7uDhxgKD8#s1~Z# zs8$pEGj;P z3Ir+;s6e0sfeHi<6$lcT9aM{34621{p<1X`qgqkelu~VlTP(sY#%VE5i*Z_v(_-FE zi%Cp%RUqINgKD8#s1~Z#sCFB-SXAK_EACT)Km`I72vi_Yf#9P8K?1WQSBqN=s)cHy zTBuf|T2a`P=4vb4Vi9gJa<$}Y$<>mp^_Q!a!0ez}++t8IR14KYwHnom!lsmJE8Joc zZZW79s)cHyT7Oh4f!RT|xW%Aas1~Y)YBj1Ag-t2dR=C9?++t8IR14KYwf?A90<(i^ zaf?B zc*yA%lMkV@7oEN6>_ulUI(wbHvzNqFM-PM+ZZW79s)cHyT8(PAaf`Jo++xLjdLYmP zfgT9-K%fVLhaLzLQ(aVxTMVj&YN1-FR-;-`*pyWuv?|T12!)k;it2zwI4#C$F;0tlI4vfD*+I3q#h_ZK7OI76HL4YbO)1q@xW!ty#i&4_0)YwyDiHit zAV^?#P%UmTs1~Y)YN1+p+W zz1q0NpjxOFs)cGbs@=vd)~;}i75C}vMQ1NMd(qj8&R!lmdr3@nQ7vvUs1~Y)YN1+< zYDHmFR)Ns2aElf9p<1XGs)cHyS|3y^fmyFWXyX=xYN1-F7OK^#b{n@?yTUD2+=ptR zTBsJPg=&3JtpsKV)#4U|YN1-F7OK^#Rund+x!MZ1SiATT>T0R0rLLB`TIy;&)YVF0 zc2F&DF{l=*g=(Q%jcP? zMzx}_DW%#9w^$pu7*q?@LbXt>KdP0$?8w#P7K3V`TBsJP)u>h!HlpD$TMVj&YN1-FR-;-`*pyOjghOMZZW79s)cHyT7Oh4f!RT|xW%Aas1~Y)Y89&OY~vOy9&)h!Hf0qEoeH;DaUZIMYN1-F7OM3@wGx>13WN@B zF{l=*g=(Q%jcP?-!0ez}++t8IR14KYwHnom z!lsmJE8Joo++v&-orP>O&SO>QlR14KYwNR};s+GX(pjzBwP%Tso)k3uz)r!KV zlxi#7VjbLKP%Tso)k3xYs8#~AgKBY$LA6jVR14K=R4WRbQmUsGkMiu?3Hpa%jy5a@wG4+IZA5G1C$s1~;vR14KYwNR}_wW6>o zt3c>hxW$V5P%Tso)k3vUtq-b|z^qpwba9J8wNNco3)N~=D+-%Zs;zK~b#aSPS4&+j zb+y#hdaJ9InCi;a;ueEyp<1XGs@14=8@E`u!Yx+ZCs#|ZmRv2lT5`2Ma~C~Qipw!$se#VrQaLbXsWRO^pwB``av7PlBw3)MokP_06>y=~lL#Y0ZF zn0yGQ#W*d-X)#WVaa!!`r^O_uIy!sxaEn2;P%Tso)oN6`ja#f&;T9|I)7gv8UUc@N zvlpGcJaqPwnChZh++t8IR14KYwHnom!ltYOp;zG+EAB(JP%Tso)k3vCs8#~AUV+fV zEe6#>wNNcot5NMXZn0j4TdcSb)k3vUEmRBD`k-0~%nqu>Ee6#>wNNcot5K~eY)W&r z6>hO!@gdaJQddh|Ep@fj)q1F_mB8$vTHIn#EmRBDLbV#zio&LpYAf7gJ=|jSK%fT# zJrL-D;H?LO#8g*ZEp9QW7OI76p<0borP>O& zSP!=tR14KYwNR};s+GX(pjzBwP%Tso)k3uz)r!KVlxi#7Vm;hqP%Tso)k3xYs8#~A zgKBY$LA6jVR14KARNLRiEml0_bc@M{&;x-U2=qXp2Le41&fWt-Vyfe`SRc0-R14KY zwNR}_wcEJG`W0@m;y(Rq=~qj?TKd(}uhv7qT8XJHs>Lk^)k3vUEmW&fttf2DDiHb= zZn5G%R14KYwNNco>w{_~FzXcvecWPDEmRBDLbV#zio&LpYAf7g{o+HYK%fGF3Ir+; zs6g;gfgpj|LAAKWpjxOFs)cGbsuhJzDb-fE#rn9#I4#C$F;0tdTFl#NF^Q?J3IyC@ zP%Tso)k3uz)o$Y!>sPqNiu+U`P=P=N0u=~UAo!?2kihK7)#4U|YN1-F7OK^#Rund+ zx!MZ1SRc0-xmt3yhOUZZW79s)cHy zT7Oh4f!RT|xW%Aas1~Y)YBj1Ag-t2dR=CCbxW%Aas1~Y)YW-2I1ZD@-;ueEyp<1XG zs@14g6gH()Tj3V#;}(Nzp<1XGs`W>;5||xSi(3q;g=(Q%sMe<1sJVq(OdfK%#pFJx z#W*d-X)#WVaa!!`r^SwAs@?;^+`e&T_`#&{X6@D4JYLMkN&LZJas6uJd)I2OnJdG= zc)VN;m#?27rWFV3dP zty{Hse&Bz6EmRBDp7%1* zZD;?F<9IY3t>QRaC9+_jOb$$l%f6H)WcT<_CbN`%Jsun+W3wE5L(+Gvg-K7Dyw+@* zCfs6^mKuytB!|Kv&kQY+!77O-v%_SZ{TJ8s!{ZCjUtry-T{946vZLd5a{clrFPVQD zZ`EFz&t|?V5RPN2D_4tKj0yyDwNUNz7kpo@y5Ssl<9V`wbGe?}Bwb)I_xTH~ z&w^?tFgtR!xW%Aas1~Yy{(|o|RJ(;+ENYg!#pFJ@T5`4IYR?ET~ojvx91Ji$S$eEmRBDYPnib*p%jK%iUs8v-pr3ub;|s z*+&_EkE0A1QS?Qw_V`2#xmqd^{(Mla1ZD@-;ueEyp<1XGs@14g6gH()TkaN%nz+TN zK%lc1oxSMn<*%+*0<(i^af?B*xjvW}S;D;}Ic&x6jiyKQ!6<$- z8qCI{^>{Rx9>w=Y)4@2m9~eJUCQ7wba#mtE-im>Y`fQVo)tq3)Mok8r6!zrj%+c++soTAygnxfj|YqX!#(x zD^g-A5VCUC&ke>)WA1VlbK}i+DC2n}6q}7$@uU`mHSg|FfoO zZ#_;vlL?rg1GkvOR2S9a7K3V`TBsJP)u>h!HliLiR14KYwNR};s+GX(pjzBwP%Tso)k3uz)r!KV zlxi#7VgYV3s1~Y)YN1+xR4akmLAAKWpjxOFs)cGbsuhJzDb-fE#RA-7P%Tso)k3xY zs8#~AgKBY$LA6jVR14K=R4WRbQmUH@ETrIiU zCm0LW{>m>jjN5Uvj91BWb^WCeO|=r3^;~U;TMVj&YN1-FR-@W&++tycTdcT`TMV}t zZZX_qxWzoU#U!S>a<#a{pjxOFs)cGbs@=vd7FM{$iu+J4R14KYwNR}Os+GX($kpN& zgKD8#s1~Z#sCFB-SXki}EAB(JP%Tso)k3vCs8#~AgKBY$LA6jVR14K=R4WRbvK|Ow zg~C~Qipw!$qI;uhny7^lTJEyigv zZ=Jm)rn>5Caf?B+SEp9QW7OI76p<0biieyQ2Kf-&Vz|X{i{Tc-Eq43LFP+iC zATiZZR~z9LgKD8#s1~Z#s8$p>3Cs?v#VrQaLbXsWRI5>~C~Qipw!$qI6(0iCLbXsWR14L5pjrvc4ywg12Gv5f zP%TueQSCNvv8ci=R@|qqmbzN%YN@NGuGU9gtpsKV)#4U|YN1-F7OK^#Rund+x!MZ1 zScF@Qezo+grC%-mYQ6QVm6+=4fq+{Ks)cHyTBuf|T2a`PQf-A>EW#}Y)k3vUEmZ4| zY9%l`a<#a{pjxOFs)cGbsuhJzDb-fE#Uk8dP%Tso)k3xYs8#~AgKBY$LA6jVR14KA zRNLCdEml0_bc@M{a9WJhVw@J^v>2zw&VE`7^lTJE#~30n8Z{U)#4U|YN1-F7OK^#Rund66$q^gw^(r>s)cHyTBsJP z^+B}~nDq*T7H%=97OI76p<0bYN@NGu9mu5Z*{d2Q(d`Q++t8I zR14KYwHnom!lsmJE8Jo&++t8IR14KYwf?A90<$Ami(3q;g=(Q%s8*v|QP`AHZG~H` zg+SEp9QW7OI76p<0bMz!0x#ab0^vEn|b#W*d-X)#WVaazpBX)y`R4ywg12Gv5f zP%TueQLQL!N^`XpZm||_F)9$KK%fGF3Iu-@2ojhbREt{-s)cHyTBuf|T2a`PQf-A> ztc6<)s)cHyTBz0^)kT0R0rLLB`TIy=g zURNtI)zR6jjav+=g=(Q%s8*xeZQNq*3b$BspSoJ=YN@NGu9mu54|TN?Q(aVxTMVj& zYN1-FR-;-`*pyWuv@6_V#eJw2s)cHyTBz0s)kh!Hls!V++t8IR14KY zwHnom!lsmJE8Jpj++t8IR14KYwf?A90<(i^af?B_ulUI(vEO>?MKOLAAKWpjxOFs)cGbsuhJzDb-fE#oD;V$kmdoC09$X)?cnx z0<(i^af?BY`fQVo)tq3)Mok8r6!zrmO;?Q{fgX?nAXuEmRBDLbX1q zRsyqLfzZJ%2Gv5fP%TueQLQL!N~yNOE!HVMgbD;I5U4<)0)Ywy4;2U!m>pD$TMVj& zYN1-FR-@W&++v*yw^(tXTrIg;a<$}Y$<_ME)kMzx}_DW%#9w^#?a7*q?@LbXt>KdP0$?4VlQVo)tq3)Mok z8r6!zrj%+c++rQvVo)tq3)Mok{-{<0vx91Ji$S$eEmRBDYE&xEe6#>wNNcot59us8@E{Tkkc(DA3|p@I(yOCi_Ttj_BwlKFNvv+ z(_&rRVo)tq3)Mok8r5#&7VB2H#ftlM_M)>FoxSMnMQ1M$oxLQcx~LYn7*q?@LbXt> zMzx}_DXT!}R=CBA`%o=Z3)MokP^}NDmB6f5AarqyLA6jVR14K=R4WRbQmUlPnET`hIB)YVc~OI@vp zx>^a$j$AEnF{l=*g=(Q%jcT`Xi*+m9V#R$b5U4<)0)YwyDiC~BAV^?#P%UmTs1~Y) zYN1+Lk^)k3vUEmW&fttf0tskXu`*2OIb)k3vUEmZ4|Y9%l`s1~;vR14KYwNR}>wY_cJ zV#PyFx0rkgJrL-DKo10ZAkYKh>^%@9raC%%^>B+pwNNco3)N~=yNz3{SK$^b?$ZN- z9tiY6pa%jy5IpoikeKSCTHIn#EmRBDLbV#zio&L>0-;yo7Ax*UwNNco3)MokKB!g# zvtEJF!z~8YLbXsWRI5?#Hg2(Ag~ zC~Qh|wH0o$9&RyCi*Z_v(_)+!^LAQHVydeG0k;@b3)MokP_0I_qOd8Y+6uQ=ulNv7 zi*Z_v(_)+!){rIYN1-F7OM3}wGx;eREt{-s)cHyTBuf|T2a`PQf-A>tcP0+s)cHy zTBz0^)k2nYwYbHgTBsJPg=#gb z6@^V%1wy~VEmquzYN1-F7OI76eNe3gX1xNTk6R3?g=(Q%s8*v|QP`AHZG~H`UwjA^ z2vi_Yfj|WU6$lmp^^vQU z!0ez}++t8IR14KYwHnom!lpD=Tj3V#;})X_0zDAufj|!gZ#@tsrn>5Caf?B*E%KYN1-F7OM3}wGx;eREt{-s)cHyTBz2h+E#N5x0pQS za*N4*dLYmPfgT9-K%fW0*?S-y$5j2an7Mu9%J73p_icz;v`tN3U*Z1uu;@?kdS_ZEXm z@^H3zus46)xKkgGrb#{MH(S|X|G|8oOb_GPyjC~=3|6bfa{s}@!QyDSca*Gl8`%}} zU0<9{lUuiH@BG03_{M|B$s)d=jOWSX#1LDNGQdT$S}&&cWir0EHyMnk@q95mGV|LT zt{01Bx{43Rv*Cl?TYJaz+P$4l=1%?g?R;8h4u5b?b6DP;PESlAh&sJqHizMCx`Jw< zTBuf|+AZ8-t!BAfOzuOqP%Tso)k3vCs8#~AnyWP-7ph&3R>`O4hTI27$@1=eaCkUQ z?%o@(M~8Qz+WoIJ*2^SLcRru47hiqd;P($V@cX>ORnW>Qe3`z-nI@}jr(;HCXe|3t z?PWiz<(^LIy9cv}ck0Xgvxo6;{J?ybZav~+I%YOdEmW&fttf0tDiB)Da<`b=hiai( zsP?>1Y+l*(#9*`($!pLR|KxEFrtce=?b+?CbI1AQ_wG;2V;@TP;j_ z%H*|X(=_20o3zwmd?Gm%26<*^kqlNzJeeIPV(GPVJh3D3cu>uaoPS zKY7Xg(|D`)%6vBSRe^9EQ(d`Q++tK9kgJ7ipTFSydhO+6TXX%TcP}-5q*1LXY)YxN z+%48>;uiZbukW6DBH>#b`*RPkAoCse>DOanw|E7)Ox|B7(_s>)%X|hwjbjKfnSC`!|>C$xYG) z26Lak!1^qxRsyplSBqN=s)cHy+UGC$ZbP-Auqmb5a<^EkS$qh_T#UIGb1~*(%ssO) z_c*4ya<#a{pjxOFs)cH`Tus#@pFxtEo8+wH(go$@Ld* zU%uNg`GMk4*7ZxDxYT$SxmpR#j$AEnF{l=*g=(Q%jcP?JbWL;@lSK8Knqgk3Oqwm|J@7tsA z+oSK>qwm|J@7tsA_jh)z)vBpn3zTc2axGG>wUldZW?clxxa7%(83EEl}nW zDDw!Ec?8Nl0%ab7GLJx+N1)6jQ08G4U~3*`1-7p#^Dt|${hTrnvkKeKDf2Mvu>G7e z53>?m&zYszzNXB>EXMY8$~??+Y(J;W!z{@5bILr-l59Vx%)>0o)^lc6wy!DkFzd4Y zoH7ryGTYB7^Dt|({hTrnvpQSPndRBOrp%+Q%)_kE_V+6DFl)5^oH7ryO54vV^DyhQ z^_*F#?Q6ro&_bT%+ z>$d%zG7qzI+s`TUFl)EYsx%&$~?^aZGW#a53_>X&nfdTYq(rvYv-#LvDYsG7qyQx1UqyVK(LVbILrF^*l8Da_f8h$~=_yJTzN#`+Jpn zn9aHUoH7ryJ-45;=MgFEd8Dl8QPZABq^#$WvYtoEdLAk3d8Dl8k+Pmg%6c9t>v^QC z=aI6WN6LC0DeHNptml!ko=3`h9x3a2q^#$WvYtoEdLAk3d8Dl8k+Pmg%6c9t>v^Q? z&m(0$kCgp+q^#$WvYtng@_VJM=aI5MkCgR1QugPOvYtoE{yb9F^GI3GBV|30TFN|> z^*mD6^GMmBN6LC0Df{zCS4bE@}hes$OQTudwebiDfkZC0PlHmkMT zi|Nnb9OJ*-_(!kS-cGBLHiBa9_1fhx@4Rwv7&rGGn$o1V^3Yk!F03Z=*qB-;>#=QW z{fXKekK*ZiVj2vLS-+isz5L4PtG7~jFlC}2 ztuj#?B2D*{?8l$R!pz_D&7W&ruf3dKdt_aCeEr4!OP92KDO5!qsm_)j&~hznRF{^qk=Bij88*o!dx zBm$qvmY9nsr2hUnjmcUT@VS+feR$H`~1U;m5l)%f?VXdgb=H3XXp9?6T7;3b5grw3TXxJyM}Nx!4vLL!INj{ zjuRmycnWV*d^8-kdSN{I&>ccI^Y2vfoM4Odu1wiNE3J)ZWu_X{sajg z5QTXCvewV0-*G^V=xR561(jCGns-at*W ziIWMQ{1OQs51@!)Z?uwN+$RPdaw#*xTxY9fgI=17@;+v>># zPZ8Rm>>8rY1W!bQr%8fGh}YM94H7)#ge?0eb2#@SbVREN?N4?M zt<3~aiv$k|p5s@VBzQQY#rxHjX*o8T%%-L{#n_zP9c;W`J)ZWu_X{sajg z5CNbr#0A;Cj}hXl`hZW_x!x1AVHR`C4h zwI@xMi&bLh?YznIYs2x#NK^{Se=+@&?Na&8S7@4i0Zo(t`mlsZ(rQ<-m?Zm+<>`-aa9*_J zedadV=GSUh%>U%fCNlr|M*64GH2>2N8oySny_K3oPDP@fpU%V6$2NWxMVxxM_zC;y z2>;&*ZWrNi+Ega_s~yRYmEK!VjeNtrg+EHB$zrfN?a^!HktKa7iz=tx9JBfE>hV0; zpUva+-)Rssp0G3=e#r#GM_<2_2E^jZaey>`CPwD(NH|l!QS-4)jQq~ES}l!(R}F&= zA3Wi+!EzPnCoP*dUL2Y~*?jgLLfA!wu)no|L;gaTCH@S$`7zS;=br_vnFQf_JrNOlmfvMK0U3+(t9FB&o4sE|r zXuF8e_IJ-Mw4JhR24Ob{vhX3l0afa8iEzbN#5Vv-Evnrnz8S4$@oQwZ zo*)0?f8T3-`_!iEVD2I)C_>|5vwGgX1OUqOF zuQ3K+L`*v`jlo7ciXi9|F7){1Z%pCtxtSX4CJ)9&S!cr9lRu=9?cY43pc{09eEWNH zeCF3Ti@k-rXJhQUUr2j3{e@&aPZqm3H@@#?>g_Q`pTT&!n`wjp?Z!mTJ5Shie2p%lKh3I=Y`a;vmYi5apI4-?xu7@P8)%TyFg7D+V@AY_`Mh@z!{f z45r5n`C|Gfn;-w*%e=a{fL9lPv=IUPZH+hhvDwP)is?^3i(lEUu#x>TD0#brEcFat z8iW7I#=y@zboE`k=fMX@<{Xi|gHC5AJK^*`LF$a}CjLgPb|Vcmj$P~Vd(vcNkni|C zsri$Smk_&YY+JST^Wb~x1#O<$`^u?zuP2#nxjxuC%V73~^N zzjrn7W3>dKx8GUl)8XLBOX-*LKc|sB&1b#89!yuGUriR*UwZda!|rxqr9=NA415u>{x>#u zy8gE4*4yhaDC!HdUz}5~cv56DiO}?4>GqqjNLW03SUg$#{3o^A8?~$1OY5SvruI{{ zmy4IxI~%dA_A|AYe`z&XeK9%O*^$bzQD4;TrnSLr#Al@aq}YL=5L)_+?*t<#b_AM=6mzU z4O0e_I;W-Z$NUClakKH{m&ueOe&t}W8s3kW=2zC7P(1o-?VbO!F}m~4Uk1&-UH)@w z?{7w4RQnu^rh~=fc%B!ENS#l%#V=}lhU>)#$^Om1_=}%4z&u?~;??~{vb;YVA1?RJ z-@gu9`!jCs->SX39FGnY^DATCg`29L%Z-oK-Z=e#+Fbo+yO=h66A z-r{Hwl*ZAZsl?HyL`f5w)hHPpM_N}L4VsQP`d5gf7ZFGQ_C}=g$De8(?dSV>M;s-B zCZ;aayum-Q^n`h3)^L#=AEV2bpy&HsofSbZZ5u(qT~gqkcUpB8cz>G^brB)z$Fsou ze{TfNb4R?QMDx^Jo*eN^=EorG!JbrUuNH9;d=J)BBYIO`>)ffI;zLTi$EHgDWR&%9 z?W!#8{!Zyj@bq=imy3$;B2w-ARGtj9cF95SbXlbHozGcvmsSq(dj!af2$275BNTaH zxFV$Fa>(UfmF0r`wexT@++=uS&Mcfdn)j8}WFDJcWLnW^ei^@-rWpyL=1gQfo2DrS zxX-+~c+DL3OwWEVhqHNNw!D|`rqw;>;$k*V%!c?<UT}68D{YA1^5V#tgD}IV|#lKr~1g3)FPR# z;sf&+{lV_7y`r<9Y0+|fusB+#N6#KSOusVQx(i~u=qZ?PaIl2w#-_0_Oy`y9LKoA8 zZl()ix_Z5y9}UZ6$=*@2+WkoOk-ex8`4mh?h^)hOPtA1y^;+q%KG*AuFt|}O>ALmM zl*p_nY58*TC!=x83YyKUlm|&Nj}J$a?9bO~SJE%9G%wkyy+0Z8P;D2_$zvw%GFKl_-=>8!w?jmB`cQ2-frJeVz+A_xF8{PMbZN#>xiEY2-+~u}h zyWF-j=mqVI*yR!(FJzZ{QPDAPM!IEm%y+p&N4B_6wZ%2$SICpnZv0heGs3n<=KB{g z-$gLrA6^Xeb)JIx=CfsNj+R4ykl)jg-|soeuk9kgj+^{C7eRi+$O|FAi;9s?L4L$Y z=~zo4;8PR={+*gJm|XpQswdp?d~Vph@iX^E=09b!*{177T3MU-5HcJ~()fwm z8~GW^w8kYK9X`s=znKFaS!dr<7p-%+{}?E|2q^q}7XyXePRYxTn;#6u>(ow{8QOM( z)UV6QTT|N3$=lf~HX$gvRom+1t?5LS{VHX`;BWq1xghlG6}^(S zD{aYF+*CBg_L%AxAg22?cBb diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_update_startup_program.bin b/feed/feed_deploy/news_jingpai/package/format_nets/old_program/old_update_startup_program.bin deleted file mode 100644 index 76b538aca52b9c46cfae8b79b8ffa772f4f5fc2d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 48630 zcmeHQ%Woq|8IP06BoORqgL5nc52e@ zZn~={35U(@e*mWi3BiFI3tZsF0U>eXzzNWv_y>@XK;i;|@KsgYX?GmQ%sLNGzFgvd z|LXOs7eorO)o^@@$)wFBops+oHpO2}mic?BSA+B`u1;$i-D2Ee=_uU2F=EW*_}ZB`a&aQ#}@m#fFi*~i73 z-_#apr$HTudF@o&Y5Bzq-kt4ds)cF;T4+!Wme1FqDtRSS^3xe;FMyz4zm<9YMYU>W zYE^ZTt#h43JU;+ldG|cLK@FVwjPduywKuex4`RMHVC=3hx0vhNP2Y<+@wl<7E&72S z`YsitJ3njuQPY;-Op~}S+|fM9&%XELoUT7+F4c|FzTcsxqxd-^-)5!YnD6_Z-EKB( zyO!MvSjlCLGsJqOgdS!2? zTGW}R+d)VJ>N&O_Xgd56&cmpFa7e;-WVR_U81f7}_rCAZM~}4kzB_(|%wwDpmwktZ zX^gvj3Zkqa+m`qu4i(l8iNmPJ>mP+N1xP;{Kcs<`wo}`ch?*e<87&%7Vp6sBbR$)a zjb{b8TGS)zwgBMg;|J)HV;b7~)D7TYgG4k{bJe{ai8G>HfMlcrT`q-AqD~#xQ@;W2 zFUF4+#sCkY!%EaqsYBtY34JaFx>8on-Q=O2TyMl4bYqG31#x3qFPr6}-txmHwVOe# znjZlFBLmoB(Ltp*bfUP?Vcc#}>eTG#AngLe>?1~em&Gn4UfXUlkGQt$!$5?gBgasw zb8LH*161FkF8n-9L#6vn{nMpxSRZ#O>VG5#0m=WtR7aE4cNBpn@WfCK` z_YjGA;ixX}bV-A{Mcv`Y0j+PeT%Yi&wIRmg$6Z0eLjprOw0%zuZB>jESW96|`m8LD ztkF1dL?@Of^t%2;l(P`IKGYP*pP~*z$xaQJ@65U#jP0PEk9I!V`F#sdF`oZQ$MdUb=cApEc0StqXy@M< zJ0E2}+WBbbqn(d-{+-#AWm#zFqn(d-KHB+c=ieDSA7wt;`Do{(osV|@t7Yf^ehA;r znic7b_8%?HGX(Q87r$mAP#8<2BO^D6uAaWoBOt+)Gr(| zzMm$p?=imr?rLGD8L2JB`A-6wCZWMdl+Kj^QD`_Ja6^>}zVLbkX$-oA?J`g;DELiD z9}Y#2Z1F%halvn|s2EYn^HOD~ZmVPuEfw*Hh`CLgoV_5N`5th;t1JR$e-R7fHUSn2 z!#Xx3s0evo1kqbA6qkB7oQ)uY{-;bCu6&AMSIu8Y9(bVWmy;N&4qlgGQ8RVMd>=UW ziTjPSEE5Ib`d7&WBH;X|^@>^6H+A_tddi+X68!qVyykN4sABcae266sCS+-WHj+QP zWlnac0a-t^j=Ul#KX5+dOU`HHH7$-HC&Gd2cpjt$kZAzUnEy5HUIbYrWCDRDZ6@Ll zWDD>b-+KqL3*75a;tfn5@cDAT{)JpgBk13MEu#KK{rfWcey&6RUX|I9Ra`YonhS~I z5^APaxGzzqcr}yN#f& z&|4ff8IRuL#o6huLtCM@cu{V^B!G$D;+b!V-Xi9Z8!d<2xHNDo)!d2E?=8yAx+Cac z^cGS7zFc(IaGEsmB}6eF7)Bb(VgtmDadIxbz+=PjOOyhWMEeFSZV-XdzN+pDe6 zTSRYBq`}6yMVwodc?WNo1|H%qR!y8+3|Ptia@+?Pt-aZ3EqaUSEqXexgK4S_U65r< zEV7LQhj3V$B934=(A6$Dy%ulLG8-eUgh6i+y+!mEsb0aAsJIf<@6gg!udv0{J(Ib* z=acG_VJlHjjGy(bM77Xc?4Y+e={rHtTSRXWy+zNSFmG{?u1f7^($y7CF5=`OPA=lcMVMb!OkFoOxp?oN`E%~#(%EK^)12}1K7X--{vs;iiC4hr zFQUJQ{^G>>i-VNaYD!tn9D%;-cNoj)FrvPiczuNqBRY)eFrveF3mnG$lW%<`%XE`= z7-upLV;LPrRKOFjfYD(@hY=mdiE|hSDXWyj_{GDc(;4HhOKW*Wjk*9Vd(?55*Ir+V zJ=OxozzeBSU(@CtM%uvZ=t{!3Pu>Ns*voUH9W5F_4%Z<^aPjcZ^Y0pI+Bn#~amFXN ztj+m>9r`ZqQp$Ch;E;o-eq+2Y`QJQX@r;^&K=ad^xsve%WQ7}&!E9@1ThfKuHn2Ln zN%p9SLmjt6;xOv*F7YO^veGm^=$Kepf1RPs90S+wAcC5$vZBiD4OvM6D#_szCe!mA zacNf83&`PqGOg>6VZ)bhs5ER>z07i35zF>%bWmKMa=o{yiZ19sm0(KJp21!KQJDHu z$@>W`tW7ScSd+baHry@_pGmIS2r43?uwCGzFa}auV4sCT@sKrQP8~TGiuzeyCCsTG z3hLmm0o7hX2%oyq17HYR)Q`KB#6!Y78rm0RMX)MzD5m!*$Y#pyV|bF??k)zdoD);i wW0JYAr~_-TC<1){vtA#&1i-bYWmrkn0&wI?alWc8NHr008i*5X)ALjR1Nye%TmS$7 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot b/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot deleted file mode 100644 index 3e91b42e..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot +++ /dev/null @@ -1,408 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 -6009 -6011 -6012 -6013 -6014 -6015 -6019 -6023 -6024 -6027 -6029 -6031 -6050 -6060 -6068 -6069 -6089 -6095 -6105 -6112 -6130 -6131 -6132 -6134 -6161 -6162 -6163 -6166 -6182 -6183 -6185 -6190 -6212 -6213 -6231 -6233 -6234 -6236 -6238 -6239 -6240 -6241 -6242 -6243 -6244 -6245 -6354 -7002 -7005 -7008 -7010 -7012 -7013 -7015 -7016 -7017 -7018 -7019 -7020 -7045 -7046 -7048 -7049 -7052 -7054 -7056 -7064 -7066 -7076 -7078 -7083 -7084 -7085 -7086 -7087 -7088 -7089 -7090 -7099 -7100 -7101 -7102 -7103 -7104 -7105 -7109 -7124 -7126 -7136 -7142 -7143 -7144 -7145 -7146 -7147 -7148 -7150 -7151 -7152 -7153 -7154 -7155 -7156 -7157 -7047 -7050 -6253 -6254 -6255 -6256 -6257 -6259 -6260 -6261 -7170 -7185 -7186 -6751 -6755 -6757 -6759 -6760 -6763 -6764 -6765 -6766 -6767 -6768 -6769 -6770 -7502 -7503 -7504 -7505 -7510 -7511 -7512 -7513 -6806 -6807 -6808 -6809 -6810 -6811 -6812 -6813 -6815 -6816 -6817 -6819 -6823 -6828 -6831 -6840 -6845 -6875 -6879 -6881 -6888 -6889 -6947 -6950 -6956 -6957 -6959 -10006 -10008 -10009 -10010 -10011 -10016 -10017 -10018 -10019 -10020 -10021 -10022 -10023 -10024 -10029 -10030 -10031 -10032 -10033 -10034 -10035 -10036 -10037 -10038 -10039 -10040 -10041 -10042 -10044 -10045 -10046 -10051 -10052 -10053 -10054 -10055 -10056 -10057 -10060 -10066 -10069 -6820 -6821 -6822 -13333 -13334 -13335 -13336 -13337 -13338 -13339 -13340 -13341 -13351 -13352 -13353 -13359 -13361 -13362 -13363 -13366 -13367 -13368 -13369 -13370 -13371 -13375 -13376 -5700 -5702 -13400 -13401 -13402 -13403 -13404 -13406 -13407 -13408 -13410 -13417 -13418 -13419 -13420 -13422 -13425 -13427 -13428 -13429 -13430 -13431 -13433 -13434 -13436 -13437 -13326 -13330 -13331 -5717 -13442 -13451 -13452 -13455 -13456 -13457 -13458 -13459 -13460 -13461 -13462 -13463 -13464 -13465 -13466 -13467 -13468 -1104 -1106 -1107 -1108 -1109 -1110 -1111 -1112 -1113 -1114 -1115 -1116 -1117 -1119 -1120 -1121 -1122 -1123 -1124 -1125 -1126 -1127 -1128 -1129 -13812 -13813 -6740 -1490 -1491 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot_common b/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot_common deleted file mode 100644 index 869fb695..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/slot_common +++ /dev/null @@ -1,99 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/to.py b/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/to.py deleted file mode 100644 index 638c5364..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/old_slot/to.py +++ /dev/null @@ -1,5 +0,0 @@ -with open("session_slot", "r") as fin: - res = [] - for i in fin: - res.append("\"" + i.strip() + "\"") - print ", ".join(res) diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/reqi_fleet_desc b/feed/feed_deploy/news_jingpai/package/format_nets/reqi_fleet_desc deleted file mode 100644 index c0d3ab82..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/reqi_fleet_desc +++ /dev/null @@ -1,1461 +0,0 @@ -server_param { - downpour_server_param { - downpour_table_param { - table_id: 0 - table_class: "DownpourSparseTable" - shard_num: 1950 - sparse_table_cache_rate: 0.00055 - accessor { - accessor_class: "DownpourCtrAccessor" - sparse_sgd_param { - learning_rate: 0.05 - initial_g2sum: 3.0 - initial_range: 0.0001 - weight_bounds: -10.0 - weight_bounds: 10.0 - } - fea_dim: 11 - embedx_dim: 8 - embedx_threshold: 10 - downpour_accessor_param { - nonclk_coeff: 0.1 - click_coeff: 1 - base_threshold: 1.5 - delta_threshold: 0.25 - delta_keep_days: 16 - delete_after_unseen_days: 30 - show_click_decay_rate: 0.98 - delete_threshold: 0.8 - } - table_accessor_save_param { - param: 1 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - table_accessor_save_param { - param: 2 - converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)" - deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)" - } - } - type: PS_SPARSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 1 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 2 - table_class: "DownpourDenseDoubleTable" - accessor { - accessor_class: "DownpourDenseValueDoubleAccessor" - dense_sgd_param { - name: "summarydouble" - summary { - summary_decay_rate: 0.999999 - } - } - } - type: PS_DENSE_TABLE - compress_in_save: true - } - downpour_table_param { - table_id: 3 - table_class: "DownpourDenseTable" - accessor { - accessor_class: "DownpourDenseValueAccessor" - dense_sgd_param { - name: "adam" - adam { - learning_rate: 5e-06 - avg_decay_rate: 0.999993 - ada_decay_rate: 0.9999 - ada_epsilon: 1e-08 - mom_decay_rate: 0.99 - } - naive { - learning_rate: 0.0002 - } - } - } - type: PS_DENSE_TABLE - compress_in_save: true - } - service_param { - server_class: "DownpourBrpcPsServer" - client_class: "DownpourBrpcPsClient" - service_class: "DownpourPsService" - start_server_port: 0 - server_thread_num: 12 - } - } -} -trainer_param { - dense_table { - table_id: 1 - - dense_variable_name: "join_0.w_0" - dense_variable_name: "join_0.b_0" - dense_variable_name: "join_1.w_0" - dense_variable_name: "join_1.b_0" - dense_variable_name: "join_2.w_0" - dense_variable_name: "join_2.b_0" - dense_variable_name: "join_3.w_0" - dense_variable_name: "join_3.b_0" - dense_variable_name: "join_4.w_0" - dense_variable_name: "join_4.b_0" - dense_variable_name: "join_5.w_0" - dense_variable_name: "join_5.b_0" - dense_variable_name: "join_6.w_0" - dense_variable_name: "join_6.b_0" - dense_variable_name: "join_7.w_0" - dense_variable_name: "join_7.b_0" - - dense_variable_name: "common_0.w_0" - dense_variable_name: "common_0.b_0" - dense_variable_name: "common_1.w_0" - dense_variable_name: "common_1.b_0" - dense_variable_name: "common_2.w_0" - dense_variable_name: "common_2.b_0" - dense_variable_name: "common_3.w_0" - dense_variable_name: "common_3.b_0" - dense_variable_name: "common_4.w_0" - dense_variable_name: "common_4.b_0" - dense_variable_name: "common_5.w_0" - dense_variable_name: "common_5.b_0" - dense_variable_name: "common_6.w_0" - dense_variable_name: "common_6.b_0" - dense_variable_name: "common_7.w_0" - dense_variable_name: "common_7.b_0" - - dense_gradient_variable_name: "join_0.w_0@GRAD" - dense_gradient_variable_name: "join_0.b_0@GRAD" - dense_gradient_variable_name: "join_1.w_0@GRAD" - dense_gradient_variable_name: "join_1.b_0@GRAD" - dense_gradient_variable_name: "join_2.w_0@GRAD" - dense_gradient_variable_name: "join_2.b_0@GRAD" - dense_gradient_variable_name: "join_3.w_0@GRAD" - dense_gradient_variable_name: "join_3.b_0@GRAD" - dense_gradient_variable_name: "join_4.w_0@GRAD" - dense_gradient_variable_name: "join_4.b_0@GRAD" - dense_gradient_variable_name: "join_5.w_0@GRAD" - dense_gradient_variable_name: "join_5.b_0@GRAD" - dense_gradient_variable_name: "join_6.w_0@GRAD" - dense_gradient_variable_name: "join_6.b_0@GRAD" - dense_gradient_variable_name: "join_7.w_0@GRAD" - dense_gradient_variable_name: "join_7.b_0@GRAD" - - dense_gradient_variable_name: "common_0.w_0@GRAD" - dense_gradient_variable_name: "common_0.b_0@GRAD" - dense_gradient_variable_name: "common_1.w_0@GRAD" - dense_gradient_variable_name: "common_1.b_0@GRAD" - dense_gradient_variable_name: "common_2.w_0@GRAD" - dense_gradient_variable_name: "common_2.b_0@GRAD" - dense_gradient_variable_name: "common_3.w_0@GRAD" - dense_gradient_variable_name: "common_3.b_0@GRAD" - dense_gradient_variable_name: "common_4.w_0@GRAD" - dense_gradient_variable_name: "common_4.b_0@GRAD" - dense_gradient_variable_name: "common_5.w_0@GRAD" - dense_gradient_variable_name: "common_5.b_0@GRAD" - dense_gradient_variable_name: "common_6.w_0@GRAD" - dense_gradient_variable_name: "common_6.b_0@GRAD" - dense_gradient_variable_name: "common_7.w_0@GRAD" - dense_gradient_variable_name: "common_7.b_0@GRAD" - } - dense_table { - table_id: 2 - dense_variable_name: "join.batch_size" - dense_variable_name: "join.batch_sum" - dense_variable_name: "join.batch_square_sum" - - dense_variable_name: "common.batch_size" - dense_variable_name: "common.batch_sum" - dense_variable_name: "common.batch_square_sum" - - dense_gradient_variable_name: "join.batch_size@GRAD" - dense_gradient_variable_name: "join.batch_sum@GRAD" - dense_gradient_variable_name: "join.batch_square_sum@GRAD" - - dense_gradient_variable_name: "common.batch_size@GRAD" - dense_gradient_variable_name: "common.batch_sum@GRAD" - dense_gradient_variable_name: "common.batch_square_sum@GRAD" - } - dense_table { - table_id: 3 - dense_variable_name: "fc_0.w_0" - dense_variable_name: "fc_0.b_0" - dense_variable_name: "fc_1.w_0" - dense_variable_name: "fc_1.b_0" - dense_variable_name: "fc_2.w_0" - dense_variable_name: "fc_2.b_0" - dense_variable_name: "fc_3.w_0" - dense_variable_name: "fc_3.b_0" - dense_variable_name: "fc_4.w_0" - dense_variable_name: "fc_4.b_0" - dense_variable_name: "fc_5.w_0" - dense_variable_name: "fc_5.b_0" - dense_gradient_variable_name: "fc_0.w_0@GRAD" - dense_gradient_variable_name: "fc_0.b_0@GRAD" - dense_gradient_variable_name: "fc_1.w_0@GRAD" - dense_gradient_variable_name: "fc_1.b_0@GRAD" - dense_gradient_variable_name: "fc_2.w_0@GRAD" - dense_gradient_variable_name: "fc_2.b_0@GRAD" - dense_gradient_variable_name: "fc_3.w_0@GRAD" - dense_gradient_variable_name: "fc_3.b_0@GRAD" - dense_gradient_variable_name: "fc_4.w_0@GRAD" - dense_gradient_variable_name: "fc_4.b_0@GRAD" - dense_gradient_variable_name: "fc_5.w_0@GRAD" - dense_gradient_variable_name: "fc_5.b_0@GRAD" - } - sparse_table { - table_id: 0 - slot_key: "6048" - slot_key: "6002" - slot_key: "6145" - slot_key: "6202" - slot_key: "6201" - slot_key: "6121" - slot_key: "6738" - slot_key: "6119" - slot_key: "6146" - slot_key: "6120" - slot_key: "6147" - slot_key: "6122" - slot_key: "6123" - slot_key: "6118" - slot_key: "6142" - slot_key: "6143" - slot_key: "6008" - slot_key: "6148" - slot_key: "6151" - slot_key: "6127" - slot_key: "6144" - slot_key: "6094" - slot_key: "6083" - slot_key: "6952" - slot_key: "6739" - slot_key: "6150" - slot_key: "6109" - slot_key: "6003" - slot_key: "6099" - slot_key: "6149" - slot_key: "6129" - slot_key: "6203" - slot_key: "6153" - slot_key: "6152" - slot_key: "6128" - slot_key: "6106" - slot_key: "6251" - slot_key: "7082" - slot_key: "7515" - slot_key: "6951" - slot_key: "6949" - slot_key: "7080" - slot_key: "6066" - slot_key: "7507" - slot_key: "6186" - slot_key: "6007" - slot_key: "7514" - slot_key: "6125" - slot_key: "7506" - slot_key: "10001" - slot_key: "6006" - slot_key: "7023" - slot_key: "6085" - slot_key: "10000" - slot_key: "6098" - slot_key: "6250" - slot_key: "6110" - slot_key: "6124" - slot_key: "6090" - slot_key: "6082" - slot_key: "6067" - slot_key: "6101" - slot_key: "6004" - slot_key: "6191" - slot_key: "7075" - slot_key: "6948" - slot_key: "6157" - slot_key: "6126" - slot_key: "6188" - slot_key: "7077" - slot_key: "6070" - slot_key: "6111" - slot_key: "6087" - slot_key: "6103" - slot_key: "6107" - slot_key: "6194" - slot_key: "6156" - slot_key: "6005" - slot_key: "6247" - slot_key: "6814" - slot_key: "6158" - slot_key: "7122" - slot_key: "6058" - slot_key: "6189" - slot_key: "7058" - slot_key: "6059" - slot_key: "6115" - slot_key: "7079" - slot_key: "7081" - slot_key: "6833" - slot_key: "7024" - slot_key: "6108" - slot_key: "13342" - slot_key: "13345" - slot_key: "13412" - slot_key: "13343" - slot_key: "13350" - slot_key: "13346" - slot_key: "13409" - slot_key: "6009" - slot_key: "6011" - slot_key: "6012" - slot_key: "6013" - slot_key: "6014" - slot_key: "6015" - slot_key: "6019" - slot_key: "6023" - slot_key: "6024" - slot_key: "6027" - slot_key: "6029" - slot_key: "6031" - slot_key: "6050" - slot_key: "6060" - slot_key: "6068" - slot_key: "6069" - slot_key: "6089" - slot_key: "6095" - slot_key: "6105" - slot_key: "6112" - slot_key: "6130" - slot_key: "6131" - slot_key: "6132" - slot_key: "6134" - slot_key: "6161" - slot_key: "6162" - slot_key: "6163" - slot_key: "6166" - slot_key: "6182" - slot_key: "6183" - slot_key: "6185" - slot_key: "6190" - slot_key: "6212" - slot_key: "6213" - slot_key: "6231" - slot_key: "6233" - slot_key: "6234" - slot_key: "6236" - slot_key: "6238" - slot_key: "6239" - slot_key: "6240" - slot_key: "6241" - slot_key: "6242" - slot_key: "6243" - slot_key: "6244" - slot_key: "6245" - slot_key: "6354" - slot_key: "7002" - slot_key: "7005" - slot_key: "7008" - slot_key: "7010" - slot_key: "7013" - slot_key: "7015" - slot_key: "7019" - slot_key: "7020" - slot_key: "7045" - slot_key: "7046" - slot_key: "7048" - slot_key: "7049" - slot_key: "7052" - slot_key: "7054" - slot_key: "7056" - slot_key: "7064" - slot_key: "7066" - slot_key: "7076" - slot_key: "7078" - slot_key: "7083" - slot_key: "7084" - slot_key: "7085" - slot_key: "7086" - slot_key: "7087" - slot_key: "7088" - slot_key: "7089" - slot_key: "7090" - slot_key: "7099" - slot_key: "7100" - slot_key: "7101" - slot_key: "7102" - slot_key: "7103" - slot_key: "7104" - slot_key: "7105" - slot_key: "7109" - slot_key: "7124" - slot_key: "7126" - slot_key: "7136" - slot_key: "7142" - slot_key: "7143" - slot_key: "7144" - slot_key: "7145" - slot_key: "7146" - slot_key: "7147" - slot_key: "7148" - slot_key: "7150" - slot_key: "7151" - slot_key: "7152" - slot_key: "7153" - slot_key: "7154" - slot_key: "7155" - slot_key: "7156" - slot_key: "7157" - slot_key: "7047" - slot_key: "7050" - slot_key: "6257" - slot_key: "6259" - slot_key: "6260" - slot_key: "6261" - slot_key: "7170" - slot_key: "7185" - slot_key: "7186" - slot_key: "6751" - slot_key: "6755" - slot_key: "6757" - slot_key: "6759" - slot_key: "6760" - slot_key: "6763" - slot_key: "6764" - slot_key: "6765" - slot_key: "6766" - slot_key: "6767" - slot_key: "6768" - slot_key: "6769" - slot_key: "6770" - slot_key: "7502" - slot_key: "7503" - slot_key: "7504" - slot_key: "7505" - slot_key: "7510" - slot_key: "7511" - slot_key: "7512" - slot_key: "7513" - slot_key: "6806" - slot_key: "6807" - slot_key: "6808" - slot_key: "6809" - slot_key: "6810" - slot_key: "6811" - slot_key: "6812" - slot_key: "6813" - slot_key: "6815" - slot_key: "6816" - slot_key: "6817" - slot_key: "6819" - slot_key: "6823" - slot_key: "6828" - slot_key: "6831" - slot_key: "6840" - slot_key: "6845" - slot_key: "6875" - slot_key: "6879" - slot_key: "6881" - slot_key: "6888" - slot_key: "6889" - slot_key: "6947" - slot_key: "6950" - slot_key: "6956" - slot_key: "6957" - slot_key: "6959" - slot_key: "10006" - slot_key: "10008" - slot_key: "10009" - slot_key: "10010" - slot_key: "10011" - slot_key: "10016" - slot_key: "10017" - slot_key: "10018" - slot_key: "10019" - slot_key: "10020" - slot_key: "10021" - slot_key: "10022" - slot_key: "10023" - slot_key: "10024" - slot_key: "10029" - slot_key: "10030" - slot_key: "10031" - slot_key: "10032" - slot_key: "10033" - slot_key: "10034" - slot_key: "10035" - slot_key: "10036" - slot_key: "10037" - slot_key: "10038" - slot_key: "10039" - slot_key: "10040" - slot_key: "10041" - slot_key: "10042" - slot_key: "10044" - slot_key: "10045" - slot_key: "10046" - slot_key: "10051" - slot_key: "10052" - slot_key: "10053" - slot_key: "10054" - slot_key: "10055" - slot_key: "10056" - slot_key: "10057" - slot_key: "10060" - slot_key: "10066" - slot_key: "10069" - slot_key: "6820" - slot_key: "6821" - slot_key: "6822" - slot_key: "13333" - slot_key: "13334" - slot_key: "13335" - slot_key: "13336" - slot_key: "13337" - slot_key: "13338" - slot_key: "13339" - slot_key: "13340" - slot_key: "13341" - slot_key: "13351" - slot_key: "13352" - slot_key: "13353" - slot_key: "13359" - slot_key: "13361" - slot_key: "13362" - slot_key: "13363" - slot_key: "13366" - slot_key: "13367" - slot_key: "13368" - slot_key: "13369" - slot_key: "13370" - slot_key: "13371" - slot_key: "13375" - slot_key: "13376" - slot_key: "5700" - slot_key: "5702" - slot_key: "13400" - slot_key: "13401" - slot_key: "13402" - slot_key: "13403" - slot_key: "13404" - slot_key: "13406" - slot_key: "13407" - slot_key: "13408" - slot_key: "13410" - slot_key: "13417" - slot_key: "13418" - slot_key: "13419" - slot_key: "13420" - slot_key: "13422" - slot_key: "13425" - slot_key: "13427" - slot_key: "13428" - slot_key: "13429" - slot_key: "13430" - slot_key: "13431" - slot_key: "13433" - slot_key: "13434" - slot_key: "13436" - slot_key: "13437" - slot_key: "13326" - slot_key: "13330" - slot_key: "13331" - slot_key: "5717" - slot_key: "13442" - slot_key: "13451" - slot_key: "13452" - slot_key: "13455" - slot_key: "13456" - slot_key: "13457" - slot_key: "13458" - slot_key: "13459" - slot_key: "13460" - slot_key: "13461" - slot_key: "13462" - slot_key: "13463" - slot_key: "13464" - slot_key: "13465" - slot_key: "13466" - slot_key: "13467" - slot_key: "13468" - slot_key: "1104" - slot_key: "1106" - slot_key: "1107" - slot_key: "1108" - slot_key: "1109" - slot_key: "1110" - slot_key: "1111" - slot_key: "1112" - slot_key: "1113" - slot_key: "1114" - slot_key: "1115" - slot_key: "1116" - slot_key: "1117" - slot_key: "1119" - slot_key: "1120" - slot_key: "1121" - slot_key: "1122" - slot_key: "1123" - slot_key: "1124" - slot_key: "1125" - slot_key: "1126" - slot_key: "1127" - slot_key: "1128" - slot_key: "1129" - slot_key: "13812" - slot_key: "13813" - slot_key: "6740" - slot_key: "1490" - slot_key: "32915" - slot_key: "32950" - slot_key: "32952" - slot_key: "32953" - slot_key: "32954" - slot_key: "33077" - slot_key: "33085" - slot_key: "33086" - slot_value: "embedding_0.tmp_0" - slot_value: "embedding_1.tmp_0" - slot_value: "embedding_2.tmp_0" - slot_value: "embedding_3.tmp_0" - slot_value: "embedding_4.tmp_0" - slot_value: "embedding_5.tmp_0" - slot_value: "embedding_6.tmp_0" - slot_value: "embedding_7.tmp_0" - slot_value: "embedding_8.tmp_0" - slot_value: "embedding_9.tmp_0" - slot_value: "embedding_10.tmp_0" - slot_value: "embedding_11.tmp_0" - slot_value: "embedding_12.tmp_0" - slot_value: "embedding_13.tmp_0" - slot_value: "embedding_14.tmp_0" - slot_value: "embedding_15.tmp_0" - slot_value: "embedding_16.tmp_0" - slot_value: "embedding_17.tmp_0" - slot_value: "embedding_18.tmp_0" - slot_value: "embedding_19.tmp_0" - slot_value: "embedding_20.tmp_0" - slot_value: "embedding_21.tmp_0" - slot_value: "embedding_22.tmp_0" - slot_value: "embedding_23.tmp_0" - slot_value: "embedding_24.tmp_0" - slot_value: "embedding_25.tmp_0" - slot_value: "embedding_26.tmp_0" - slot_value: "embedding_27.tmp_0" - slot_value: "embedding_28.tmp_0" - slot_value: "embedding_29.tmp_0" - slot_value: "embedding_30.tmp_0" - slot_value: "embedding_31.tmp_0" - slot_value: "embedding_32.tmp_0" - slot_value: "embedding_33.tmp_0" - slot_value: "embedding_34.tmp_0" - slot_value: "embedding_35.tmp_0" - slot_value: "embedding_36.tmp_0" - slot_value: "embedding_37.tmp_0" - slot_value: "embedding_38.tmp_0" - slot_value: "embedding_39.tmp_0" - slot_value: "embedding_40.tmp_0" - slot_value: "embedding_41.tmp_0" - slot_value: "embedding_42.tmp_0" - slot_value: "embedding_43.tmp_0" - slot_value: "embedding_44.tmp_0" - slot_value: "embedding_45.tmp_0" - slot_value: "embedding_46.tmp_0" - slot_value: "embedding_47.tmp_0" - slot_value: "embedding_48.tmp_0" - slot_value: "embedding_49.tmp_0" - slot_value: "embedding_50.tmp_0" - slot_value: "embedding_51.tmp_0" - slot_value: "embedding_52.tmp_0" - slot_value: "embedding_53.tmp_0" - slot_value: "embedding_54.tmp_0" - slot_value: "embedding_55.tmp_0" - slot_value: "embedding_56.tmp_0" - slot_value: "embedding_57.tmp_0" - slot_value: "embedding_58.tmp_0" - slot_value: "embedding_59.tmp_0" - slot_value: "embedding_60.tmp_0" - slot_value: "embedding_61.tmp_0" - slot_value: "embedding_62.tmp_0" - slot_value: "embedding_63.tmp_0" - slot_value: "embedding_64.tmp_0" - slot_value: "embedding_65.tmp_0" - slot_value: "embedding_66.tmp_0" - slot_value: "embedding_67.tmp_0" - slot_value: "embedding_68.tmp_0" - slot_value: "embedding_69.tmp_0" - slot_value: "embedding_70.tmp_0" - slot_value: "embedding_71.tmp_0" - slot_value: "embedding_72.tmp_0" - slot_value: "embedding_73.tmp_0" - slot_value: "embedding_74.tmp_0" - slot_value: "embedding_75.tmp_0" - slot_value: "embedding_76.tmp_0" - slot_value: "embedding_77.tmp_0" - slot_value: "embedding_78.tmp_0" - slot_value: "embedding_79.tmp_0" - slot_value: "embedding_80.tmp_0" - slot_value: "embedding_81.tmp_0" - slot_value: "embedding_82.tmp_0" - slot_value: "embedding_83.tmp_0" - slot_value: "embedding_84.tmp_0" - slot_value: "embedding_85.tmp_0" - slot_value: "embedding_86.tmp_0" - slot_value: "embedding_87.tmp_0" - slot_value: "embedding_88.tmp_0" - slot_value: "embedding_89.tmp_0" - slot_value: "embedding_90.tmp_0" - slot_value: "embedding_91.tmp_0" - slot_value: "embedding_92.tmp_0" - slot_value: "embedding_93.tmp_0" - slot_value: "embedding_94.tmp_0" - slot_value: "embedding_95.tmp_0" - slot_value: "embedding_96.tmp_0" - slot_value: "embedding_97.tmp_0" - slot_value: "embedding_98.tmp_0" - slot_value: "embedding_99.tmp_0" - slot_value: "embedding_100.tmp_0" - slot_value: "embedding_101.tmp_0" - slot_value: "embedding_102.tmp_0" - slot_value: "embedding_103.tmp_0" - slot_value: "embedding_104.tmp_0" - slot_value: "embedding_105.tmp_0" - slot_value: "embedding_106.tmp_0" - slot_value: "embedding_107.tmp_0" - slot_value: "embedding_108.tmp_0" - slot_value: "embedding_109.tmp_0" - slot_value: "embedding_110.tmp_0" - slot_value: "embedding_111.tmp_0" - slot_value: "embedding_112.tmp_0" - slot_value: "embedding_113.tmp_0" - slot_value: "embedding_114.tmp_0" - slot_value: "embedding_115.tmp_0" - slot_value: "embedding_116.tmp_0" - slot_value: "embedding_117.tmp_0" - slot_value: "embedding_118.tmp_0" - slot_value: "embedding_119.tmp_0" - slot_value: "embedding_120.tmp_0" - slot_value: "embedding_121.tmp_0" - slot_value: "embedding_122.tmp_0" - slot_value: "embedding_123.tmp_0" - slot_value: "embedding_124.tmp_0" - slot_value: "embedding_125.tmp_0" - slot_value: "embedding_126.tmp_0" - slot_value: "embedding_127.tmp_0" - slot_value: "embedding_128.tmp_0" - slot_value: "embedding_129.tmp_0" - slot_value: "embedding_130.tmp_0" - slot_value: "embedding_131.tmp_0" - slot_value: "embedding_132.tmp_0" - slot_value: "embedding_133.tmp_0" - slot_value: "embedding_134.tmp_0" - slot_value: "embedding_135.tmp_0" - slot_value: "embedding_136.tmp_0" - slot_value: "embedding_137.tmp_0" - slot_value: "embedding_138.tmp_0" - slot_value: "embedding_139.tmp_0" - slot_value: "embedding_140.tmp_0" - slot_value: "embedding_141.tmp_0" - slot_value: "embedding_142.tmp_0" - slot_value: "embedding_143.tmp_0" - slot_value: "embedding_144.tmp_0" - slot_value: "embedding_145.tmp_0" - slot_value: "embedding_146.tmp_0" - slot_value: "embedding_147.tmp_0" - slot_value: "embedding_148.tmp_0" - slot_value: "embedding_149.tmp_0" - slot_value: "embedding_150.tmp_0" - slot_value: "embedding_151.tmp_0" - slot_value: "embedding_152.tmp_0" - slot_value: "embedding_153.tmp_0" - slot_value: "embedding_154.tmp_0" - slot_value: "embedding_155.tmp_0" - slot_value: "embedding_156.tmp_0" - slot_value: "embedding_157.tmp_0" - slot_value: "embedding_158.tmp_0" - slot_value: "embedding_159.tmp_0" - slot_value: "embedding_160.tmp_0" - slot_value: "embedding_161.tmp_0" - slot_value: "embedding_162.tmp_0" - slot_value: "embedding_163.tmp_0" - slot_value: "embedding_164.tmp_0" - slot_value: "embedding_165.tmp_0" - slot_value: "embedding_166.tmp_0" - slot_value: "embedding_167.tmp_0" - slot_value: "embedding_168.tmp_0" - slot_value: "embedding_169.tmp_0" - slot_value: "embedding_170.tmp_0" - slot_value: "embedding_171.tmp_0" - slot_value: "embedding_172.tmp_0" - slot_value: "embedding_173.tmp_0" - slot_value: "embedding_174.tmp_0" - slot_value: "embedding_175.tmp_0" - slot_value: "embedding_176.tmp_0" - slot_value: "embedding_177.tmp_0" - slot_value: "embedding_178.tmp_0" - slot_value: "embedding_179.tmp_0" - slot_value: "embedding_180.tmp_0" - slot_value: "embedding_181.tmp_0" - slot_value: "embedding_182.tmp_0" - slot_value: "embedding_183.tmp_0" - slot_value: "embedding_184.tmp_0" - slot_value: "embedding_185.tmp_0" - slot_value: "embedding_186.tmp_0" - slot_value: "embedding_187.tmp_0" - slot_value: "embedding_188.tmp_0" - slot_value: "embedding_189.tmp_0" - slot_value: "embedding_190.tmp_0" - slot_value: "embedding_191.tmp_0" - slot_value: "embedding_192.tmp_0" - slot_value: "embedding_193.tmp_0" - slot_value: "embedding_194.tmp_0" - slot_value: "embedding_195.tmp_0" - slot_value: "embedding_196.tmp_0" - slot_value: "embedding_197.tmp_0" - slot_value: "embedding_198.tmp_0" - slot_value: "embedding_199.tmp_0" - slot_value: "embedding_200.tmp_0" - slot_value: "embedding_201.tmp_0" - slot_value: "embedding_202.tmp_0" - slot_value: "embedding_203.tmp_0" - slot_value: "embedding_204.tmp_0" - slot_value: "embedding_205.tmp_0" - slot_value: "embedding_206.tmp_0" - slot_value: "embedding_207.tmp_0" - slot_value: "embedding_208.tmp_0" - slot_value: "embedding_209.tmp_0" - slot_value: "embedding_210.tmp_0" - slot_value: "embedding_211.tmp_0" - slot_value: "embedding_212.tmp_0" - slot_value: "embedding_213.tmp_0" - slot_value: "embedding_214.tmp_0" - slot_value: "embedding_215.tmp_0" - slot_value: "embedding_216.tmp_0" - slot_value: "embedding_217.tmp_0" - slot_value: "embedding_218.tmp_0" - slot_value: "embedding_219.tmp_0" - slot_value: "embedding_220.tmp_0" - slot_value: "embedding_221.tmp_0" - slot_value: "embedding_222.tmp_0" - slot_value: "embedding_223.tmp_0" - slot_value: "embedding_224.tmp_0" - slot_value: "embedding_225.tmp_0" - slot_value: "embedding_226.tmp_0" - slot_value: "embedding_227.tmp_0" - slot_value: "embedding_228.tmp_0" - slot_value: "embedding_229.tmp_0" - slot_value: "embedding_230.tmp_0" - slot_value: "embedding_231.tmp_0" - slot_value: "embedding_232.tmp_0" - slot_value: "embedding_233.tmp_0" - slot_value: "embedding_234.tmp_0" - slot_value: "embedding_235.tmp_0" - slot_value: "embedding_236.tmp_0" - slot_value: "embedding_237.tmp_0" - slot_value: "embedding_238.tmp_0" - slot_value: "embedding_239.tmp_0" - slot_value: "embedding_240.tmp_0" - slot_value: "embedding_241.tmp_0" - slot_value: "embedding_242.tmp_0" - slot_value: "embedding_243.tmp_0" - slot_value: "embedding_244.tmp_0" - slot_value: "embedding_245.tmp_0" - slot_value: "embedding_246.tmp_0" - slot_value: "embedding_247.tmp_0" - slot_value: "embedding_248.tmp_0" - slot_value: "embedding_249.tmp_0" - slot_value: "embedding_250.tmp_0" - slot_value: "embedding_251.tmp_0" - slot_value: "embedding_252.tmp_0" - slot_value: "embedding_253.tmp_0" - slot_value: "embedding_254.tmp_0" - slot_value: "embedding_255.tmp_0" - slot_value: "embedding_256.tmp_0" - slot_value: "embedding_257.tmp_0" - slot_value: "embedding_258.tmp_0" - slot_value: "embedding_259.tmp_0" - slot_value: "embedding_260.tmp_0" - slot_value: "embedding_261.tmp_0" - slot_value: "embedding_262.tmp_0" - slot_value: "embedding_263.tmp_0" - slot_value: "embedding_264.tmp_0" - slot_value: "embedding_265.tmp_0" - slot_value: "embedding_266.tmp_0" - slot_value: "embedding_267.tmp_0" - slot_value: "embedding_268.tmp_0" - slot_value: "embedding_269.tmp_0" - slot_value: "embedding_270.tmp_0" - slot_value: "embedding_271.tmp_0" - slot_value: "embedding_272.tmp_0" - slot_value: "embedding_273.tmp_0" - slot_value: "embedding_274.tmp_0" - slot_value: "embedding_275.tmp_0" - slot_value: "embedding_276.tmp_0" - slot_value: "embedding_277.tmp_0" - slot_value: "embedding_278.tmp_0" - slot_value: "embedding_279.tmp_0" - slot_value: "embedding_280.tmp_0" - slot_value: "embedding_281.tmp_0" - slot_value: "embedding_282.tmp_0" - slot_value: "embedding_283.tmp_0" - slot_value: "embedding_284.tmp_0" - slot_value: "embedding_285.tmp_0" - slot_value: "embedding_286.tmp_0" - slot_value: "embedding_287.tmp_0" - slot_value: "embedding_288.tmp_0" - slot_value: "embedding_289.tmp_0" - slot_value: "embedding_290.tmp_0" - slot_value: "embedding_291.tmp_0" - slot_value: "embedding_292.tmp_0" - slot_value: "embedding_293.tmp_0" - slot_value: "embedding_294.tmp_0" - slot_value: "embedding_295.tmp_0" - slot_value: "embedding_296.tmp_0" - slot_value: "embedding_297.tmp_0" - slot_value: "embedding_298.tmp_0" - slot_value: "embedding_299.tmp_0" - slot_value: "embedding_300.tmp_0" - slot_value: "embedding_301.tmp_0" - slot_value: "embedding_302.tmp_0" - slot_value: "embedding_303.tmp_0" - slot_value: "embedding_304.tmp_0" - slot_value: "embedding_305.tmp_0" - slot_value: "embedding_306.tmp_0" - slot_value: "embedding_307.tmp_0" - slot_value: "embedding_308.tmp_0" - slot_value: "embedding_309.tmp_0" - slot_value: "embedding_310.tmp_0" - slot_value: "embedding_311.tmp_0" - slot_value: "embedding_312.tmp_0" - slot_value: "embedding_313.tmp_0" - slot_value: "embedding_314.tmp_0" - slot_value: "embedding_315.tmp_0" - slot_value: "embedding_316.tmp_0" - slot_value: "embedding_317.tmp_0" - slot_value: "embedding_318.tmp_0" - slot_value: "embedding_319.tmp_0" - slot_value: "embedding_320.tmp_0" - slot_value: "embedding_321.tmp_0" - slot_value: "embedding_322.tmp_0" - slot_value: "embedding_323.tmp_0" - slot_value: "embedding_324.tmp_0" - slot_value: "embedding_325.tmp_0" - slot_value: "embedding_326.tmp_0" - slot_value: "embedding_327.tmp_0" - slot_value: "embedding_328.tmp_0" - slot_value: "embedding_329.tmp_0" - slot_value: "embedding_330.tmp_0" - slot_value: "embedding_331.tmp_0" - slot_value: "embedding_332.tmp_0" - slot_value: "embedding_333.tmp_0" - slot_value: "embedding_334.tmp_0" - slot_value: "embedding_335.tmp_0" - slot_value: "embedding_336.tmp_0" - slot_value: "embedding_337.tmp_0" - slot_value: "embedding_338.tmp_0" - slot_value: "embedding_339.tmp_0" - slot_value: "embedding_340.tmp_0" - slot_value: "embedding_341.tmp_0" - slot_value: "embedding_342.tmp_0" - slot_value: "embedding_343.tmp_0" - slot_value: "embedding_344.tmp_0" - slot_value: "embedding_345.tmp_0" - slot_value: "embedding_346.tmp_0" - slot_value: "embedding_347.tmp_0" - slot_value: "embedding_348.tmp_0" - slot_value: "embedding_349.tmp_0" - slot_value: "embedding_350.tmp_0" - slot_value: "embedding_351.tmp_0" - slot_value: "embedding_352.tmp_0" - slot_value: "embedding_353.tmp_0" - slot_value: "embedding_354.tmp_0" - slot_value: "embedding_355.tmp_0" - slot_value: "embedding_356.tmp_0" - slot_value: "embedding_357.tmp_0" - slot_value: "embedding_358.tmp_0" - slot_value: "embedding_359.tmp_0" - slot_value: "embedding_360.tmp_0" - slot_value: "embedding_361.tmp_0" - slot_value: "embedding_362.tmp_0" - slot_value: "embedding_363.tmp_0" - slot_value: "embedding_364.tmp_0" - slot_value: "embedding_365.tmp_0" - slot_value: "embedding_366.tmp_0" - slot_value: "embedding_367.tmp_0" - slot_value: "embedding_368.tmp_0" - slot_value: "embedding_369.tmp_0" - slot_value: "embedding_370.tmp_0" - slot_value: "embedding_371.tmp_0" - slot_value: "embedding_372.tmp_0" - slot_value: "embedding_373.tmp_0" - slot_value: "embedding_374.tmp_0" - slot_value: "embedding_375.tmp_0" - slot_value: "embedding_376.tmp_0" - slot_value: "embedding_377.tmp_0" - slot_value: "embedding_378.tmp_0" - slot_value: "embedding_379.tmp_0" - slot_value: "embedding_380.tmp_0" - slot_value: "embedding_381.tmp_0" - slot_value: "embedding_382.tmp_0" - slot_value: "embedding_383.tmp_0" - slot_value: "embedding_384.tmp_0" - slot_value: "embedding_385.tmp_0" - slot_value: "embedding_386.tmp_0" - slot_value: "embedding_387.tmp_0" - slot_value: "embedding_388.tmp_0" - slot_value: "embedding_389.tmp_0" - slot_value: "embedding_390.tmp_0" - slot_value: "embedding_391.tmp_0" - slot_value: "embedding_392.tmp_0" - slot_value: "embedding_393.tmp_0" - slot_value: "embedding_394.tmp_0" - slot_value: "embedding_395.tmp_0" - slot_value: "embedding_396.tmp_0" - slot_value: "embedding_397.tmp_0" - slot_value: "embedding_398.tmp_0" - slot_value: "embedding_399.tmp_0" - slot_value: "embedding_400.tmp_0" - slot_value: "embedding_401.tmp_0" - slot_value: "embedding_402.tmp_0" - slot_value: "embedding_403.tmp_0" - slot_value: "embedding_404.tmp_0" - slot_value: "embedding_405.tmp_0" - slot_value: "embedding_406.tmp_0" - slot_gradient: "embedding_0.tmp_0@GRAD" - slot_gradient: "embedding_1.tmp_0@GRAD" - slot_gradient: "embedding_2.tmp_0@GRAD" - slot_gradient: "embedding_3.tmp_0@GRAD" - slot_gradient: "embedding_4.tmp_0@GRAD" - slot_gradient: "embedding_5.tmp_0@GRAD" - slot_gradient: "embedding_6.tmp_0@GRAD" - slot_gradient: "embedding_7.tmp_0@GRAD" - slot_gradient: "embedding_8.tmp_0@GRAD" - slot_gradient: "embedding_9.tmp_0@GRAD" - slot_gradient: "embedding_10.tmp_0@GRAD" - slot_gradient: "embedding_11.tmp_0@GRAD" - slot_gradient: "embedding_12.tmp_0@GRAD" - slot_gradient: "embedding_13.tmp_0@GRAD" - slot_gradient: "embedding_14.tmp_0@GRAD" - slot_gradient: "embedding_15.tmp_0@GRAD" - slot_gradient: "embedding_16.tmp_0@GRAD" - slot_gradient: "embedding_17.tmp_0@GRAD" - slot_gradient: "embedding_18.tmp_0@GRAD" - slot_gradient: "embedding_19.tmp_0@GRAD" - slot_gradient: "embedding_20.tmp_0@GRAD" - slot_gradient: "embedding_21.tmp_0@GRAD" - slot_gradient: "embedding_22.tmp_0@GRAD" - slot_gradient: "embedding_23.tmp_0@GRAD" - slot_gradient: "embedding_24.tmp_0@GRAD" - slot_gradient: "embedding_25.tmp_0@GRAD" - slot_gradient: "embedding_26.tmp_0@GRAD" - slot_gradient: "embedding_27.tmp_0@GRAD" - slot_gradient: "embedding_28.tmp_0@GRAD" - slot_gradient: "embedding_29.tmp_0@GRAD" - slot_gradient: "embedding_30.tmp_0@GRAD" - slot_gradient: "embedding_31.tmp_0@GRAD" - slot_gradient: "embedding_32.tmp_0@GRAD" - slot_gradient: "embedding_33.tmp_0@GRAD" - slot_gradient: "embedding_34.tmp_0@GRAD" - slot_gradient: "embedding_35.tmp_0@GRAD" - slot_gradient: "embedding_36.tmp_0@GRAD" - slot_gradient: "embedding_37.tmp_0@GRAD" - slot_gradient: "embedding_38.tmp_0@GRAD" - slot_gradient: "embedding_39.tmp_0@GRAD" - slot_gradient: "embedding_40.tmp_0@GRAD" - slot_gradient: "embedding_41.tmp_0@GRAD" - slot_gradient: "embedding_42.tmp_0@GRAD" - slot_gradient: "embedding_43.tmp_0@GRAD" - slot_gradient: "embedding_44.tmp_0@GRAD" - slot_gradient: "embedding_45.tmp_0@GRAD" - slot_gradient: "embedding_46.tmp_0@GRAD" - slot_gradient: "embedding_47.tmp_0@GRAD" - slot_gradient: "embedding_48.tmp_0@GRAD" - slot_gradient: "embedding_49.tmp_0@GRAD" - slot_gradient: "embedding_50.tmp_0@GRAD" - slot_gradient: "embedding_51.tmp_0@GRAD" - slot_gradient: "embedding_52.tmp_0@GRAD" - slot_gradient: "embedding_53.tmp_0@GRAD" - slot_gradient: "embedding_54.tmp_0@GRAD" - slot_gradient: "embedding_55.tmp_0@GRAD" - slot_gradient: "embedding_56.tmp_0@GRAD" - slot_gradient: "embedding_57.tmp_0@GRAD" - slot_gradient: "embedding_58.tmp_0@GRAD" - slot_gradient: "embedding_59.tmp_0@GRAD" - slot_gradient: "embedding_60.tmp_0@GRAD" - slot_gradient: "embedding_61.tmp_0@GRAD" - slot_gradient: "embedding_62.tmp_0@GRAD" - slot_gradient: "embedding_63.tmp_0@GRAD" - slot_gradient: "embedding_64.tmp_0@GRAD" - slot_gradient: "embedding_65.tmp_0@GRAD" - slot_gradient: "embedding_66.tmp_0@GRAD" - slot_gradient: "embedding_67.tmp_0@GRAD" - slot_gradient: "embedding_68.tmp_0@GRAD" - slot_gradient: "embedding_69.tmp_0@GRAD" - slot_gradient: "embedding_70.tmp_0@GRAD" - slot_gradient: "embedding_71.tmp_0@GRAD" - slot_gradient: "embedding_72.tmp_0@GRAD" - slot_gradient: "embedding_73.tmp_0@GRAD" - slot_gradient: "embedding_74.tmp_0@GRAD" - slot_gradient: "embedding_75.tmp_0@GRAD" - slot_gradient: "embedding_76.tmp_0@GRAD" - slot_gradient: "embedding_77.tmp_0@GRAD" - slot_gradient: "embedding_78.tmp_0@GRAD" - slot_gradient: "embedding_79.tmp_0@GRAD" - slot_gradient: "embedding_80.tmp_0@GRAD" - slot_gradient: "embedding_81.tmp_0@GRAD" - slot_gradient: "embedding_82.tmp_0@GRAD" - slot_gradient: "embedding_83.tmp_0@GRAD" - slot_gradient: "embedding_84.tmp_0@GRAD" - slot_gradient: "embedding_85.tmp_0@GRAD" - slot_gradient: "embedding_86.tmp_0@GRAD" - slot_gradient: "embedding_87.tmp_0@GRAD" - slot_gradient: "embedding_88.tmp_0@GRAD" - slot_gradient: "embedding_89.tmp_0@GRAD" - slot_gradient: "embedding_90.tmp_0@GRAD" - slot_gradient: "embedding_91.tmp_0@GRAD" - slot_gradient: "embedding_92.tmp_0@GRAD" - slot_gradient: "embedding_93.tmp_0@GRAD" - slot_gradient: "embedding_94.tmp_0@GRAD" - slot_gradient: "embedding_95.tmp_0@GRAD" - slot_gradient: "embedding_96.tmp_0@GRAD" - slot_gradient: "embedding_97.tmp_0@GRAD" - slot_gradient: "embedding_98.tmp_0@GRAD" - slot_gradient: "embedding_99.tmp_0@GRAD" - slot_gradient: "embedding_100.tmp_0@GRAD" - slot_gradient: "embedding_101.tmp_0@GRAD" - slot_gradient: "embedding_102.tmp_0@GRAD" - slot_gradient: "embedding_103.tmp_0@GRAD" - slot_gradient: "embedding_104.tmp_0@GRAD" - slot_gradient: "embedding_105.tmp_0@GRAD" - slot_gradient: "embedding_106.tmp_0@GRAD" - slot_gradient: "embedding_107.tmp_0@GRAD" - slot_gradient: "embedding_108.tmp_0@GRAD" - slot_gradient: "embedding_109.tmp_0@GRAD" - slot_gradient: "embedding_110.tmp_0@GRAD" - slot_gradient: "embedding_111.tmp_0@GRAD" - slot_gradient: "embedding_112.tmp_0@GRAD" - slot_gradient: "embedding_113.tmp_0@GRAD" - slot_gradient: "embedding_114.tmp_0@GRAD" - slot_gradient: "embedding_115.tmp_0@GRAD" - slot_gradient: "embedding_116.tmp_0@GRAD" - slot_gradient: "embedding_117.tmp_0@GRAD" - slot_gradient: "embedding_118.tmp_0@GRAD" - slot_gradient: "embedding_119.tmp_0@GRAD" - slot_gradient: "embedding_120.tmp_0@GRAD" - slot_gradient: "embedding_121.tmp_0@GRAD" - slot_gradient: "embedding_122.tmp_0@GRAD" - slot_gradient: "embedding_123.tmp_0@GRAD" - slot_gradient: "embedding_124.tmp_0@GRAD" - slot_gradient: "embedding_125.tmp_0@GRAD" - slot_gradient: "embedding_126.tmp_0@GRAD" - slot_gradient: "embedding_127.tmp_0@GRAD" - slot_gradient: "embedding_128.tmp_0@GRAD" - slot_gradient: "embedding_129.tmp_0@GRAD" - slot_gradient: "embedding_130.tmp_0@GRAD" - slot_gradient: "embedding_131.tmp_0@GRAD" - slot_gradient: "embedding_132.tmp_0@GRAD" - slot_gradient: "embedding_133.tmp_0@GRAD" - slot_gradient: "embedding_134.tmp_0@GRAD" - slot_gradient: "embedding_135.tmp_0@GRAD" - slot_gradient: "embedding_136.tmp_0@GRAD" - slot_gradient: "embedding_137.tmp_0@GRAD" - slot_gradient: "embedding_138.tmp_0@GRAD" - slot_gradient: "embedding_139.tmp_0@GRAD" - slot_gradient: "embedding_140.tmp_0@GRAD" - slot_gradient: "embedding_141.tmp_0@GRAD" - slot_gradient: "embedding_142.tmp_0@GRAD" - slot_gradient: "embedding_143.tmp_0@GRAD" - slot_gradient: "embedding_144.tmp_0@GRAD" - slot_gradient: "embedding_145.tmp_0@GRAD" - slot_gradient: "embedding_146.tmp_0@GRAD" - slot_gradient: "embedding_147.tmp_0@GRAD" - slot_gradient: "embedding_148.tmp_0@GRAD" - slot_gradient: "embedding_149.tmp_0@GRAD" - slot_gradient: "embedding_150.tmp_0@GRAD" - slot_gradient: "embedding_151.tmp_0@GRAD" - slot_gradient: "embedding_152.tmp_0@GRAD" - slot_gradient: "embedding_153.tmp_0@GRAD" - slot_gradient: "embedding_154.tmp_0@GRAD" - slot_gradient: "embedding_155.tmp_0@GRAD" - slot_gradient: "embedding_156.tmp_0@GRAD" - slot_gradient: "embedding_157.tmp_0@GRAD" - slot_gradient: "embedding_158.tmp_0@GRAD" - slot_gradient: "embedding_159.tmp_0@GRAD" - slot_gradient: "embedding_160.tmp_0@GRAD" - slot_gradient: "embedding_161.tmp_0@GRAD" - slot_gradient: "embedding_162.tmp_0@GRAD" - slot_gradient: "embedding_163.tmp_0@GRAD" - slot_gradient: "embedding_164.tmp_0@GRAD" - slot_gradient: "embedding_165.tmp_0@GRAD" - slot_gradient: "embedding_166.tmp_0@GRAD" - slot_gradient: "embedding_167.tmp_0@GRAD" - slot_gradient: "embedding_168.tmp_0@GRAD" - slot_gradient: "embedding_169.tmp_0@GRAD" - slot_gradient: "embedding_170.tmp_0@GRAD" - slot_gradient: "embedding_171.tmp_0@GRAD" - slot_gradient: "embedding_172.tmp_0@GRAD" - slot_gradient: "embedding_173.tmp_0@GRAD" - slot_gradient: "embedding_174.tmp_0@GRAD" - slot_gradient: "embedding_175.tmp_0@GRAD" - slot_gradient: "embedding_176.tmp_0@GRAD" - slot_gradient: "embedding_177.tmp_0@GRAD" - slot_gradient: "embedding_178.tmp_0@GRAD" - slot_gradient: "embedding_179.tmp_0@GRAD" - slot_gradient: "embedding_180.tmp_0@GRAD" - slot_gradient: "embedding_181.tmp_0@GRAD" - slot_gradient: "embedding_182.tmp_0@GRAD" - slot_gradient: "embedding_183.tmp_0@GRAD" - slot_gradient: "embedding_184.tmp_0@GRAD" - slot_gradient: "embedding_185.tmp_0@GRAD" - slot_gradient: "embedding_186.tmp_0@GRAD" - slot_gradient: "embedding_187.tmp_0@GRAD" - slot_gradient: "embedding_188.tmp_0@GRAD" - slot_gradient: "embedding_189.tmp_0@GRAD" - slot_gradient: "embedding_190.tmp_0@GRAD" - slot_gradient: "embedding_191.tmp_0@GRAD" - slot_gradient: "embedding_192.tmp_0@GRAD" - slot_gradient: "embedding_193.tmp_0@GRAD" - slot_gradient: "embedding_194.tmp_0@GRAD" - slot_gradient: "embedding_195.tmp_0@GRAD" - slot_gradient: "embedding_196.tmp_0@GRAD" - slot_gradient: "embedding_197.tmp_0@GRAD" - slot_gradient: "embedding_198.tmp_0@GRAD" - slot_gradient: "embedding_199.tmp_0@GRAD" - slot_gradient: "embedding_200.tmp_0@GRAD" - slot_gradient: "embedding_201.tmp_0@GRAD" - slot_gradient: "embedding_202.tmp_0@GRAD" - slot_gradient: "embedding_203.tmp_0@GRAD" - slot_gradient: "embedding_204.tmp_0@GRAD" - slot_gradient: "embedding_205.tmp_0@GRAD" - slot_gradient: "embedding_206.tmp_0@GRAD" - slot_gradient: "embedding_207.tmp_0@GRAD" - slot_gradient: "embedding_208.tmp_0@GRAD" - slot_gradient: "embedding_209.tmp_0@GRAD" - slot_gradient: "embedding_210.tmp_0@GRAD" - slot_gradient: "embedding_211.tmp_0@GRAD" - slot_gradient: "embedding_212.tmp_0@GRAD" - slot_gradient: "embedding_213.tmp_0@GRAD" - slot_gradient: "embedding_214.tmp_0@GRAD" - slot_gradient: "embedding_215.tmp_0@GRAD" - slot_gradient: "embedding_216.tmp_0@GRAD" - slot_gradient: "embedding_217.tmp_0@GRAD" - slot_gradient: "embedding_218.tmp_0@GRAD" - slot_gradient: "embedding_219.tmp_0@GRAD" - slot_gradient: "embedding_220.tmp_0@GRAD" - slot_gradient: "embedding_221.tmp_0@GRAD" - slot_gradient: "embedding_222.tmp_0@GRAD" - slot_gradient: "embedding_223.tmp_0@GRAD" - slot_gradient: "embedding_224.tmp_0@GRAD" - slot_gradient: "embedding_225.tmp_0@GRAD" - slot_gradient: "embedding_226.tmp_0@GRAD" - slot_gradient: "embedding_227.tmp_0@GRAD" - slot_gradient: "embedding_228.tmp_0@GRAD" - slot_gradient: "embedding_229.tmp_0@GRAD" - slot_gradient: "embedding_230.tmp_0@GRAD" - slot_gradient: "embedding_231.tmp_0@GRAD" - slot_gradient: "embedding_232.tmp_0@GRAD" - slot_gradient: "embedding_233.tmp_0@GRAD" - slot_gradient: "embedding_234.tmp_0@GRAD" - slot_gradient: "embedding_235.tmp_0@GRAD" - slot_gradient: "embedding_236.tmp_0@GRAD" - slot_gradient: "embedding_237.tmp_0@GRAD" - slot_gradient: "embedding_238.tmp_0@GRAD" - slot_gradient: "embedding_239.tmp_0@GRAD" - slot_gradient: "embedding_240.tmp_0@GRAD" - slot_gradient: "embedding_241.tmp_0@GRAD" - slot_gradient: "embedding_242.tmp_0@GRAD" - slot_gradient: "embedding_243.tmp_0@GRAD" - slot_gradient: "embedding_244.tmp_0@GRAD" - slot_gradient: "embedding_245.tmp_0@GRAD" - slot_gradient: "embedding_246.tmp_0@GRAD" - slot_gradient: "embedding_247.tmp_0@GRAD" - slot_gradient: "embedding_248.tmp_0@GRAD" - slot_gradient: "embedding_249.tmp_0@GRAD" - slot_gradient: "embedding_250.tmp_0@GRAD" - slot_gradient: "embedding_251.tmp_0@GRAD" - slot_gradient: "embedding_252.tmp_0@GRAD" - slot_gradient: "embedding_253.tmp_0@GRAD" - slot_gradient: "embedding_254.tmp_0@GRAD" - slot_gradient: "embedding_255.tmp_0@GRAD" - slot_gradient: "embedding_256.tmp_0@GRAD" - slot_gradient: "embedding_257.tmp_0@GRAD" - slot_gradient: "embedding_258.tmp_0@GRAD" - slot_gradient: "embedding_259.tmp_0@GRAD" - slot_gradient: "embedding_260.tmp_0@GRAD" - slot_gradient: "embedding_261.tmp_0@GRAD" - slot_gradient: "embedding_262.tmp_0@GRAD" - slot_gradient: "embedding_263.tmp_0@GRAD" - slot_gradient: "embedding_264.tmp_0@GRAD" - slot_gradient: "embedding_265.tmp_0@GRAD" - slot_gradient: "embedding_266.tmp_0@GRAD" - slot_gradient: "embedding_267.tmp_0@GRAD" - slot_gradient: "embedding_268.tmp_0@GRAD" - slot_gradient: "embedding_269.tmp_0@GRAD" - slot_gradient: "embedding_270.tmp_0@GRAD" - slot_gradient: "embedding_271.tmp_0@GRAD" - slot_gradient: "embedding_272.tmp_0@GRAD" - slot_gradient: "embedding_273.tmp_0@GRAD" - slot_gradient: "embedding_274.tmp_0@GRAD" - slot_gradient: "embedding_275.tmp_0@GRAD" - slot_gradient: "embedding_276.tmp_0@GRAD" - slot_gradient: "embedding_277.tmp_0@GRAD" - slot_gradient: "embedding_278.tmp_0@GRAD" - slot_gradient: "embedding_279.tmp_0@GRAD" - slot_gradient: "embedding_280.tmp_0@GRAD" - slot_gradient: "embedding_281.tmp_0@GRAD" - slot_gradient: "embedding_282.tmp_0@GRAD" - slot_gradient: "embedding_283.tmp_0@GRAD" - slot_gradient: "embedding_284.tmp_0@GRAD" - slot_gradient: "embedding_285.tmp_0@GRAD" - slot_gradient: "embedding_286.tmp_0@GRAD" - slot_gradient: "embedding_287.tmp_0@GRAD" - slot_gradient: "embedding_288.tmp_0@GRAD" - slot_gradient: "embedding_289.tmp_0@GRAD" - slot_gradient: "embedding_290.tmp_0@GRAD" - slot_gradient: "embedding_291.tmp_0@GRAD" - slot_gradient: "embedding_292.tmp_0@GRAD" - slot_gradient: "embedding_293.tmp_0@GRAD" - slot_gradient: "embedding_294.tmp_0@GRAD" - slot_gradient: "embedding_295.tmp_0@GRAD" - slot_gradient: "embedding_296.tmp_0@GRAD" - slot_gradient: "embedding_297.tmp_0@GRAD" - slot_gradient: "embedding_298.tmp_0@GRAD" - slot_gradient: "embedding_299.tmp_0@GRAD" - slot_gradient: "embedding_300.tmp_0@GRAD" - slot_gradient: "embedding_301.tmp_0@GRAD" - slot_gradient: "embedding_302.tmp_0@GRAD" - slot_gradient: "embedding_303.tmp_0@GRAD" - slot_gradient: "embedding_304.tmp_0@GRAD" - slot_gradient: "embedding_305.tmp_0@GRAD" - slot_gradient: "embedding_306.tmp_0@GRAD" - slot_gradient: "embedding_307.tmp_0@GRAD" - slot_gradient: "embedding_308.tmp_0@GRAD" - slot_gradient: "embedding_309.tmp_0@GRAD" - slot_gradient: "embedding_310.tmp_0@GRAD" - slot_gradient: "embedding_311.tmp_0@GRAD" - slot_gradient: "embedding_312.tmp_0@GRAD" - slot_gradient: "embedding_313.tmp_0@GRAD" - slot_gradient: "embedding_314.tmp_0@GRAD" - slot_gradient: "embedding_315.tmp_0@GRAD" - slot_gradient: "embedding_316.tmp_0@GRAD" - slot_gradient: "embedding_317.tmp_0@GRAD" - slot_gradient: "embedding_318.tmp_0@GRAD" - slot_gradient: "embedding_319.tmp_0@GRAD" - slot_gradient: "embedding_320.tmp_0@GRAD" - slot_gradient: "embedding_321.tmp_0@GRAD" - slot_gradient: "embedding_322.tmp_0@GRAD" - slot_gradient: "embedding_323.tmp_0@GRAD" - slot_gradient: "embedding_324.tmp_0@GRAD" - slot_gradient: "embedding_325.tmp_0@GRAD" - slot_gradient: "embedding_326.tmp_0@GRAD" - slot_gradient: "embedding_327.tmp_0@GRAD" - slot_gradient: "embedding_328.tmp_0@GRAD" - slot_gradient: "embedding_329.tmp_0@GRAD" - slot_gradient: "embedding_330.tmp_0@GRAD" - slot_gradient: "embedding_331.tmp_0@GRAD" - slot_gradient: "embedding_332.tmp_0@GRAD" - slot_gradient: "embedding_333.tmp_0@GRAD" - slot_gradient: "embedding_334.tmp_0@GRAD" - slot_gradient: "embedding_335.tmp_0@GRAD" - slot_gradient: "embedding_336.tmp_0@GRAD" - slot_gradient: "embedding_337.tmp_0@GRAD" - slot_gradient: "embedding_338.tmp_0@GRAD" - slot_gradient: "embedding_339.tmp_0@GRAD" - slot_gradient: "embedding_340.tmp_0@GRAD" - slot_gradient: "embedding_341.tmp_0@GRAD" - slot_gradient: "embedding_342.tmp_0@GRAD" - slot_gradient: "embedding_343.tmp_0@GRAD" - slot_gradient: "embedding_344.tmp_0@GRAD" - slot_gradient: "embedding_345.tmp_0@GRAD" - slot_gradient: "embedding_346.tmp_0@GRAD" - slot_gradient: "embedding_347.tmp_0@GRAD" - slot_gradient: "embedding_348.tmp_0@GRAD" - slot_gradient: "embedding_349.tmp_0@GRAD" - slot_gradient: "embedding_350.tmp_0@GRAD" - slot_gradient: "embedding_351.tmp_0@GRAD" - slot_gradient: "embedding_352.tmp_0@GRAD" - slot_gradient: "embedding_353.tmp_0@GRAD" - slot_gradient: "embedding_354.tmp_0@GRAD" - slot_gradient: "embedding_355.tmp_0@GRAD" - slot_gradient: "embedding_356.tmp_0@GRAD" - slot_gradient: "embedding_357.tmp_0@GRAD" - slot_gradient: "embedding_358.tmp_0@GRAD" - slot_gradient: "embedding_359.tmp_0@GRAD" - slot_gradient: "embedding_360.tmp_0@GRAD" - slot_gradient: "embedding_361.tmp_0@GRAD" - slot_gradient: "embedding_362.tmp_0@GRAD" - slot_gradient: "embedding_363.tmp_0@GRAD" - slot_gradient: "embedding_364.tmp_0@GRAD" - slot_gradient: "embedding_365.tmp_0@GRAD" - slot_gradient: "embedding_366.tmp_0@GRAD" - slot_gradient: "embedding_367.tmp_0@GRAD" - slot_gradient: "embedding_368.tmp_0@GRAD" - slot_gradient: "embedding_369.tmp_0@GRAD" - slot_gradient: "embedding_370.tmp_0@GRAD" - slot_gradient: "embedding_371.tmp_0@GRAD" - slot_gradient: "embedding_372.tmp_0@GRAD" - slot_gradient: "embedding_373.tmp_0@GRAD" - slot_gradient: "embedding_374.tmp_0@GRAD" - slot_gradient: "embedding_375.tmp_0@GRAD" - slot_gradient: "embedding_376.tmp_0@GRAD" - slot_gradient: "embedding_377.tmp_0@GRAD" - slot_gradient: "embedding_378.tmp_0@GRAD" - slot_gradient: "embedding_379.tmp_0@GRAD" - slot_gradient: "embedding_380.tmp_0@GRAD" - slot_gradient: "embedding_381.tmp_0@GRAD" - slot_gradient: "embedding_382.tmp_0@GRAD" - slot_gradient: "embedding_383.tmp_0@GRAD" - slot_gradient: "embedding_384.tmp_0@GRAD" - slot_gradient: "embedding_385.tmp_0@GRAD" - slot_gradient: "embedding_386.tmp_0@GRAD" - slot_gradient: "embedding_387.tmp_0@GRAD" - slot_gradient: "embedding_388.tmp_0@GRAD" - slot_gradient: "embedding_389.tmp_0@GRAD" - slot_gradient: "embedding_390.tmp_0@GRAD" - slot_gradient: "embedding_391.tmp_0@GRAD" - slot_gradient: "embedding_392.tmp_0@GRAD" - slot_gradient: "embedding_393.tmp_0@GRAD" - slot_gradient: "embedding_394.tmp_0@GRAD" - slot_gradient: "embedding_395.tmp_0@GRAD" - slot_gradient: "embedding_396.tmp_0@GRAD" - slot_gradient: "embedding_397.tmp_0@GRAD" - slot_gradient: "embedding_398.tmp_0@GRAD" - slot_gradient: "embedding_399.tmp_0@GRAD" - slot_gradient: "embedding_400.tmp_0@GRAD" - slot_gradient: "embedding_401.tmp_0@GRAD" - slot_gradient: "embedding_402.tmp_0@GRAD" - slot_gradient: "embedding_403.tmp_0@GRAD" - slot_gradient: "embedding_404.tmp_0@GRAD" - slot_gradient: "embedding_405.tmp_0@GRAD" - slot_gradient: "embedding_406.tmp_0@GRAD" - } - skip_op: "lookup_table" - skip_op: "lookup_table_grad" -} -fs_client_param { - uri: "afs://xingtian.afs.baidu.com:9902" - user: "mlarch_pro" - passwd: "proisvip" - hadoop_bin: "$HADOOP_HOME/bin/hadoop" -} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_compressor_mf.py b/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_compressor_mf.py deleted file mode 100755 index b306ddfe..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_compressor_mf.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/python -""" -xbox model compressor -""" - -import sys -import math -import time -import re - -#WISE -#SHOW_COMPRESS_RATIO : 8192 -#CLICK_COMPRESS_RATIO : 8192 -#LR_COMPRESS_RATIO : 1048576 -#MIO_COMPRESS_RATIO:8192 - -#PC -#MIO_COMPRESS_RATIO : 1024 -#SHOW_COMPRESS_RATIO : 128 -#CLICK_COMPRESS_RATIO : 1024 -#LR_COMPRESS_RATIO : 8192 - -#STAMP_COL = 2 -SHOW_COL = 3 -CLICK_COL = 4 -LR_W_COL = 5 -LR_G2SUM_COL = 6 -FM_COL = 9 - -#DAY_SPAN = 300 - -#show clk lr = float -SHOW_RATIO = 1 -#SHOW_RATIO = 1024 -CLK_RATIO = 8 -#CLK_RATIO = 1024 -LR_RATIO = 1024 -MF_RATIO = 1024 - -base_update_threshold=0.965 -base_xbox_clk_cof=1 -base_xbox_nonclk_cof=0.2 - -def as_num(x): - y='{:.5f}'.format(x) - return(y) - -def compress_show(xx): - """ - compress show - """ - preci = SHOW_RATIO - - x = float(xx) - return str(int(math.floor(x * preci + 0.5))) - - -def compress_clk(xx): - """ - compress clk - """ - preci = CLK_RATIO - - x = float(xx) - clk = int(math.floor(x * preci + 0.5)) - if clk == 0: - return "" - return str(clk) - - -def compress_lr(xx): - """ - compress lr - """ - preci = LR_RATIO - - x = float(xx) - lr = int(math.floor(x * preci + 0.5)) - if lr == 0: - return "" - return str(lr) - -def compress_mf(xx): - """ - compress mf - """ - preci = MF_RATIO - - x = float(xx) - return int(math.floor(x * preci + 0.5)) - - -def show_clk_score(show, clk): - """ - calculate show_clk score - """ - return (show - clk) * 0.2 + clk - - -for l in sys.stdin: - cols = re.split(r'\s+', l.strip()) - key = cols[0].strip() - - #day = int(cols[STAMP_COL].strip()) - #cur_day = int(time.time()/3600/24) - #if (day + DAY_SPAN) <= cur_day: - # continue - - # cvm features - show = cols[SHOW_COL] - click = cols[CLICK_COL] - pred = "" - - f_show = float(show) - f_clk = float(click) - """ - if f_show != 0: - show_log = math.log(f_show) - else: - show_log = 0 - - if f_clk != 0: - click_log = math.log(f_clk) - show_log - else: - click_log = 0 - """ - show_log = f_show - click_log = f_clk - #print f_show, f_clk - #if show_clk_score(f_show, f_clk) < base_update_threshold: - # continue - - #show = compress_show(show) - show = compress_show(show_log) - #clk = compress_clk(click) - clk = compress_clk(click_log) - - # personal lr weight - lr_w = cols[LR_W_COL].strip() - lr_wei = compress_lr(lr_w) - - # fm weight - fm_wei = [] - fm_sum = 0 - if len(cols) > 7: - #fm_dim = int(cols[FM_COL].strip()) - #if fm_dim != 0: - for v in xrange(FM_COL, len(cols), 1): - mf_v = compress_mf(cols[v]) - #print mf_v - fm_wei.append(str(mf_v)) - fm_sum += (mf_v * mf_v) - - sys.stdout.write("%s\t%s\t%s\t%s" % (key, show, clk, pred)) - sys.stdout.write("\t") - sys.stdout.write("%s" % lr_wei) - if len(fm_wei) > 0 and fm_sum > 0: - sys.stdout.write("\t%s" % "\t".join(fm_wei)) - else: - sys.stdout.write("\t[\t]") - sys.stdout.write("\n") - diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_decompressor_mf.awk b/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_decompressor_mf.awk deleted file mode 100755 index 080e8441..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/scripts/xbox_decompressor_mf.awk +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/awk -f -{ - OFS="\t"; - SHOW_RATIO = 1; - CLK_RATIO = 8; - LR_RATIO = 1024; - MF_RATIO = 1024; -} - -function decompress_show(x) { - x = x * 1.0 / SHOW_RATIO; - return x; -} - -function decompress_clk(x) { - if (x == "") { - x = 0; - } - x = x * 1.0 / CLK_RATIO; - return x; -} - -function decompress_lr(x) { - return x * 1.0 / LR_RATIO; -} - -function decompress_mf(x) { - return x * 1.0 / MF_RATIO; -} - -function show_clk_sore(show, clk, nonclk_coeff, clk_coeff) { - return (show - clk) * nonclk_coeff + clk * clk_coeff; -} - -#key, show, clk, pred, lr_w, mf_w or [\t] -{ - l=split($0, a, "\t"); - - show = decompress_show(a[2]); - click = decompress_clk(a[3]); - lr = decompress_lr(a[5]); - printf("%s\t0\t0\t%s\t%s\t%s\t0\t", a[1], show, click, lr); - if (l == 7) { - printf("\n"); - } else { - printf("%d", l-5) - for(i = 6; i <= l; i++) { - printf("\t%s", decompress_mf(a[i])); - } - printf("\n"); - } -} diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot b/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot deleted file mode 100644 index dd6723ff..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot +++ /dev/null @@ -1,407 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 -6009 -6011 -6012 -6013 -6014 -6015 -6019 -6023 -6024 -6027 -6029 -6031 -6050 -6060 -6068 -6069 -6089 -6095 -6105 -6112 -6130 -6131 -6132 -6134 -6161 -6162 -6163 -6166 -6182 -6183 -6185 -6190 -6212 -6213 -6231 -6233 -6234 -6236 -6238 -6239 -6240 -6241 -6242 -6243 -6244 -6245 -6354 -7002 -7005 -7008 -7010 -7013 -7015 -7019 -7020 -7045 -7046 -7048 -7049 -7052 -7054 -7056 -7064 -7066 -7076 -7078 -7083 -7084 -7085 -7086 -7087 -7088 -7089 -7090 -7099 -7100 -7101 -7102 -7103 -7104 -7105 -7109 -7124 -7126 -7136 -7142 -7143 -7144 -7145 -7146 -7147 -7148 -7150 -7151 -7152 -7153 -7154 -7155 -7156 -7157 -7047 -7050 -6257 -6259 -6260 -6261 -7170 -7185 -7186 -6751 -6755 -6757 -6759 -6760 -6763 -6764 -6765 -6766 -6767 -6768 -6769 -6770 -7502 -7503 -7504 -7505 -7510 -7511 -7512 -7513 -6806 -6807 -6808 -6809 -6810 -6811 -6812 -6813 -6815 -6816 -6817 -6819 -6823 -6828 -6831 -6840 -6845 -6875 -6879 -6881 -6888 -6889 -6947 -6950 -6956 -6957 -6959 -10006 -10008 -10009 -10010 -10011 -10016 -10017 -10018 -10019 -10020 -10021 -10022 -10023 -10024 -10029 -10030 -10031 -10032 -10033 -10034 -10035 -10036 -10037 -10038 -10039 -10040 -10041 -10042 -10044 -10045 -10046 -10051 -10052 -10053 -10054 -10055 -10056 -10057 -10060 -10066 -10069 -6820 -6821 -6822 -13333 -13334 -13335 -13336 -13337 -13338 -13339 -13340 -13341 -13351 -13352 -13353 -13359 -13361 -13362 -13363 -13366 -13367 -13368 -13369 -13370 -13371 -13375 -13376 -5700 -5702 -13400 -13401 -13402 -13403 -13404 -13406 -13407 -13408 -13410 -13417 -13418 -13419 -13420 -13422 -13425 -13427 -13428 -13429 -13430 -13431 -13433 -13434 -13436 -13437 -13326 -13330 -13331 -5717 -13442 -13451 -13452 -13455 -13456 -13457 -13458 -13459 -13460 -13461 -13462 -13463 -13464 -13465 -13466 -13467 -13468 -1104 -1106 -1107 -1108 -1109 -1110 -1111 -1112 -1113 -1114 -1115 -1116 -1117 -1119 -1120 -1121 -1122 -1123 -1124 -1125 -1126 -1127 -1128 -1129 -13812 -13813 -6740 -1490 -32915 -32950 -32952 -32953 -32954 -33077 -33085 -33086 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot_common b/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot_common deleted file mode 100644 index 869fb695..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/slot/slot_common +++ /dev/null @@ -1,99 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot b/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot deleted file mode 100644 index 3e91b42e..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot +++ /dev/null @@ -1,408 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 -6009 -6011 -6012 -6013 -6014 -6015 -6019 -6023 -6024 -6027 -6029 -6031 -6050 -6060 -6068 -6069 -6089 -6095 -6105 -6112 -6130 -6131 -6132 -6134 -6161 -6162 -6163 -6166 -6182 -6183 -6185 -6190 -6212 -6213 -6231 -6233 -6234 -6236 -6238 -6239 -6240 -6241 -6242 -6243 -6244 -6245 -6354 -7002 -7005 -7008 -7010 -7012 -7013 -7015 -7016 -7017 -7018 -7019 -7020 -7045 -7046 -7048 -7049 -7052 -7054 -7056 -7064 -7066 -7076 -7078 -7083 -7084 -7085 -7086 -7087 -7088 -7089 -7090 -7099 -7100 -7101 -7102 -7103 -7104 -7105 -7109 -7124 -7126 -7136 -7142 -7143 -7144 -7145 -7146 -7147 -7148 -7150 -7151 -7152 -7153 -7154 -7155 -7156 -7157 -7047 -7050 -6253 -6254 -6255 -6256 -6257 -6259 -6260 -6261 -7170 -7185 -7186 -6751 -6755 -6757 -6759 -6760 -6763 -6764 -6765 -6766 -6767 -6768 -6769 -6770 -7502 -7503 -7504 -7505 -7510 -7511 -7512 -7513 -6806 -6807 -6808 -6809 -6810 -6811 -6812 -6813 -6815 -6816 -6817 -6819 -6823 -6828 -6831 -6840 -6845 -6875 -6879 -6881 -6888 -6889 -6947 -6950 -6956 -6957 -6959 -10006 -10008 -10009 -10010 -10011 -10016 -10017 -10018 -10019 -10020 -10021 -10022 -10023 -10024 -10029 -10030 -10031 -10032 -10033 -10034 -10035 -10036 -10037 -10038 -10039 -10040 -10041 -10042 -10044 -10045 -10046 -10051 -10052 -10053 -10054 -10055 -10056 -10057 -10060 -10066 -10069 -6820 -6821 -6822 -13333 -13334 -13335 -13336 -13337 -13338 -13339 -13340 -13341 -13351 -13352 -13353 -13359 -13361 -13362 -13363 -13366 -13367 -13368 -13369 -13370 -13371 -13375 -13376 -5700 -5702 -13400 -13401 -13402 -13403 -13404 -13406 -13407 -13408 -13410 -13417 -13418 -13419 -13420 -13422 -13425 -13427 -13428 -13429 -13430 -13431 -13433 -13434 -13436 -13437 -13326 -13330 -13331 -5717 -13442 -13451 -13452 -13455 -13456 -13457 -13458 -13459 -13460 -13461 -13462 -13463 -13464 -13465 -13466 -13467 -13468 -1104 -1106 -1107 -1108 -1109 -1110 -1111 -1112 -1113 -1114 -1115 -1116 -1117 -1119 -1120 -1121 -1122 -1123 -1124 -1125 -1126 -1127 -1128 -1129 -13812 -13813 -6740 -1490 -1491 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot_common b/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot_common deleted file mode 100644 index 869fb695..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/slot_common +++ /dev/null @@ -1,99 +0,0 @@ -6048 -6002 -6145 -6202 -6201 -6121 -6738 -6119 -6146 -6120 -6147 -6122 -6123 -6118 -6142 -6143 -6008 -6148 -6151 -6127 -6144 -6094 -6083 -6952 -6739 -6150 -6109 -6003 -6099 -6149 -6129 -6203 -6153 -6152 -6128 -6106 -6251 -7082 -7515 -6951 -6949 -7080 -6066 -7507 -6186 -6007 -7514 -6125 -7506 -10001 -6006 -7023 -6085 -10000 -6098 -6250 -6110 -6124 -6090 -6082 -6067 -6101 -6004 -6191 -7075 -6948 -6157 -6126 -6188 -7077 -6070 -6111 -6087 -6103 -6107 -6194 -6156 -6005 -6247 -6814 -6158 -7122 -6058 -6189 -7058 -6059 -6115 -7079 -7081 -6833 -7024 -6108 -13342 -13345 -13412 -13343 -13350 -13346 -13409 diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/to.py b/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/to.py deleted file mode 100644 index 638c5364..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/tmp/slot/to.py +++ /dev/null @@ -1,5 +0,0 @@ -with open("session_slot", "r") as fin: - res = [] - for i in fin: - res.append("\"" + i.strip() + "\"") - print ", ".join(res) diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online.py b/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online.py deleted file mode 100644 index 8f29b42c..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online.py +++ /dev/null @@ -1,593 +0,0 @@ -import numpy as np -import os -import sys -import paddle -import paddle.fluid as fluid -import threading -import time -import config -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet -from paddle.fluid.incubate.fleet.utils.fleet_util import FleetUtil -from paddle.fluid.incubate.fleet.utils.hdfs import HDFSClient -from model_new import Model -from model_new_jc import ModelJoinCommon -import util -from util import * - -fleet_util = FleetUtil() - -def time_prefix_str(): - return "\n" + time.strftime("%Y-%m-%d %H:%M:%S",time.localtime()) + "[0]:" - -auc_record = {} -def check_auc_ok(auc_label, auc_log, auc_alarm): - auc_datas = auc_log.split(' AUC=') - if len(auc_datas) < 2: - return True - if auc_label not in auc_record: - auc_record[auc_label] = 0.0 - auc = float(auc_datas[1].split(' ')[0]) - if auc < auc_record[auc_label] and auc < auc_alarm: - fleet_util.rank0_print("label:%s, auc:%s, check bad" % (auc_label, auc)) - return False - auc_record[auc_label] = auc - fleet_util.rank0_print("label:%s, auc:%s, check ok" % (auc_label, auc)) - return True - -def create_model(slot_file, slot_common_file, all_slot_file): - join_common_model = ModelJoinCommon(slot_file, slot_common_file, all_slot_file, 20) - update_model = Model(slot_file, all_slot_file, False, 0, True) - with open("join_common_main_program.pbtxt", "w") as fout: - print >> fout, join_common_model._train_program - with open("join_common_startup_program.pbtxt", "w") as fout: - print >> fout, join_common_model._startup_program - with open("update_main_program.pbtxt", "w") as fout: - print >> fout, update_model._train_program - with open("update_startup_program.pbtxt", "w") as fout: - print >> fout, update_model._startup_program - return [join_common_model, update_model] - -def create_dataset(use_var_list, my_filelist): - dataset = fluid.DatasetFactory().create_dataset(config.dataset_type) - dataset.set_batch_size(config.batch_size) - dataset.set_thread(config.thread_num) - dataset.set_hdfs_config(config.fs_name, config.fs_ugi) - dataset.set_pipe_command(config.pipe_command) - dataset.set_filelist(my_filelist) - dataset.set_use_var(use_var_list) - #dataset.set_fleet_send_sleep_seconds(2) - #dataset.set_fleet_send_batch_size(80000) - return dataset - -def hdfs_ls(path): - configs = { - "fs.default.name": config.fs_name, - "hadoop.job.ugi": config.fs_ugi - } - hdfs_client = HDFSClient("$HADOOP_HOME", configs) - filelist = [] - for i in path: - cur_path = hdfs_client.ls(i) - if config.fs_name.startswith("hdfs:"): - cur_path = ["hdfs:" + j for j in cur_path] - elif config.fs_name.startswith("afs:"): - cur_path = ["afs:" + j for j in cur_path] - filelist += cur_path - return filelist - -def get_avg_cost_mins(value): - t1 = time.time() - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - t2 = time.time() - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost) - t3 = time.time() - avg_cost = float(global_cost[0]) / fleet.worker_num() - avg_cost /= 60.0 - t4 = time.time() - tc = (t2 - t1 + t4 - t3) / 60.0 - tb = (t3 - t2) / 60.0 - fleet_util.rank0_print("get_avg_cost_mins calc time %s barrier time %s" % (tc, tb)) - return avg_cost - -def get_max_cost_mins(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MAX) - fleet_util.rank0_print("max train time %s mins" % (float(global_cost[0]) / 60.0)) - -def get_min_cost_mins(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MIN) - fleet_util.rank0_print("min train time %s mins" % (float(global_cost[0]) / 60.0)) - -def get_data_max(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MAX) - fleet_util.rank0_print("data size max %s" % global_cost[0]) - -def get_data_min(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MIN) - fleet_util.rank0_print("data size min %s" % global_cost[0]) - -def clear_metrics(fleet_util, model, scope): - fleet_util.set_zero(model.stat_pos.name, scope) - fleet_util.set_zero(model.stat_neg.name, scope) - fleet_util.set_zero(model.batch_stat_pos.name, scope) - fleet_util.set_zero(model.batch_stat_neg.name, scope) - fleet_util.set_zero(model.abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.prob.name, scope, param_type="float32") - fleet_util.set_zero(model.q.name, scope, param_type="float32") - fleet_util.set_zero(model.pos.name, scope, param_type="float32") - fleet_util.set_zero(model.total.name, scope, param_type="float32") - -def clear_metrics_2(fleet_util, model, scope): - fleet_util.set_zero(model.join_stat_pos.name, scope) - fleet_util.set_zero(model.join_stat_neg.name, scope) - fleet_util.set_zero(model.join_batch_stat_pos.name, scope) - fleet_util.set_zero(model.join_batch_stat_neg.name, scope) - fleet_util.set_zero(model.join_abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.join_sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.join_prob.name, scope, param_type="float32") - fleet_util.set_zero(model.join_q.name, scope, param_type="float32") - fleet_util.set_zero(model.join_pos.name, scope, param_type="float32") - fleet_util.set_zero(model.join_total.name, scope, param_type="float32") - - fleet_util.set_zero(model.common_stat_pos.name, scope) - fleet_util.set_zero(model.common_stat_neg.name, scope) - fleet_util.set_zero(model.common_batch_stat_pos.name, scope) - fleet_util.set_zero(model.common_batch_stat_neg.name, scope) - fleet_util.set_zero(model.common_abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.common_sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.common_prob.name, scope, param_type="float32") - fleet_util.set_zero(model.common_q.name, scope, param_type="float32") - fleet_util.set_zero(model.common_pos.name, scope, param_type="float32") - fleet_util.set_zero(model.common_total.name, scope, param_type="float32") - -def save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope_join, scope_common, scope_update, join_model, - join_common_model, update_model, join_save_params, common_save_params, update_save_params, monitor_data): - stdout_str = "" - fleet_util.rank0_print("begin save delta model") - begin = time.time() - if pass_index == -1: - fleet_util.save_xbox_base_model(config.output_path, day) - else: - fleet_util.save_delta_model(config.output_path, day, pass_index) - end = time.time() - fleet_util.save_paddle_params(exe, scope_join, join_model._train_program, "paddle_dense.model.0", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=join_save_params) - fleet_util.save_paddle_params(exe, scope_common, join_common_model._train_program, "paddle_dense.model.1", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=common_save_params) - fleet_util.save_paddle_params(exe, scope_update, update_model._train_program, "paddle_dense.model.2", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=update_save_params) - log_str = "end save delta cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - fleet_util.rank0_print("begin save cache") - begin = time.time() - if pass_index == -1: - key_num = fleet_util.save_cache_base_model(config.output_path, day) - else: - key_num = fleet_util.save_cache_model(config.output_path, day, pass_index) - fleet_util.write_cache_donefile(config.output_path, day, pass_index, key_num, config.fs_name, config.fs_ugi) - end = time.time() - log_str = "end save cache cost %s min, key_num=%s" % ((end - begin) / 60.0, key_num) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - write_xbox_donefile(day, pass_index, xbox_base_key, ",".join(cur_path), monitor_data=monitor_data) - return stdout_str - -if __name__ == "__main__": - - place = fluid.CPUPlace() - exe = fluid.Executor(place) - fleet.init(exe) - - slot_file = "slot/slot" - slot_common_file = "slot/slot_common" - all_slot_file = "all_slot.dict" - - join_common_model, update_model = create_model(slot_file, slot_common_file, all_slot_file) - - scope2 = fluid.Scope() - scope3 = fluid.Scope() - - adjust_ins_weight = { "need_adjust" : True, "nid_slot" : "6002", "nid_adjw_threshold" : 1000, "nid_adjw_ratio": 20, - "ins_weight_slot": update_model.ins_weight.name } - - thread_stat_var_names = [] - thread_stat_var_names.append(join_common_model.join_stat_pos.name) - thread_stat_var_names.append(join_common_model.join_stat_neg.name) - thread_stat_var_names.append(join_common_model.join_sqrerr.name) - thread_stat_var_names.append(join_common_model.join_abserr.name) - thread_stat_var_names.append(join_common_model.join_prob.name) - thread_stat_var_names.append(join_common_model.join_q.name) - thread_stat_var_names.append(join_common_model.join_pos.name) - thread_stat_var_names.append(join_common_model.join_total.name) - - thread_stat_var_names.append(join_common_model.common_stat_pos.name) - thread_stat_var_names.append(join_common_model.common_stat_neg.name) - thread_stat_var_names.append(join_common_model.common_sqrerr.name) - thread_stat_var_names.append(join_common_model.common_abserr.name) - thread_stat_var_names.append(join_common_model.common_prob.name) - thread_stat_var_names.append(join_common_model.common_q.name) - thread_stat_var_names.append(join_common_model.common_pos.name) - thread_stat_var_names.append(join_common_model.common_total.name) - - thread_stat_var_names.append(update_model.stat_pos.name) - thread_stat_var_names.append(update_model.stat_neg.name) - thread_stat_var_names.append(update_model.sqrerr.name) - thread_stat_var_names.append(update_model.abserr.name) - thread_stat_var_names.append(update_model.prob.name) - thread_stat_var_names.append(update_model.q.name) - thread_stat_var_names.append(update_model.pos.name) - thread_stat_var_names.append(update_model.total.name) - - thread_stat_var_names = list(set(thread_stat_var_names)) - - - adam = fluid.optimizer.Adam(learning_rate=0.000005) - adam = fleet.distributed_optimizer(adam, strategy={"use_cvm" : True, "adjust_ins_weight" : adjust_ins_weight, "scale_datanorm" : 1e-4, "dump_slot": True, "stat_var_names": thread_stat_var_names, "fleet_desc_file": "reqi_fleet_desc"}) - adam.minimize([join_common_model.joint_cost, update_model.avg_cost], [scope2, scope3]) - - join_common_model._train_program._fleet_opt["program_configs"][str(id(join_common_model.joint_cost.block.program))]["push_sparse"] = [] - - join_save_params = ["join.batch_size", "join.batch_sum", "join.batch_square_sum", - "join_0.w_0", "join_0.b_0", "join_1.w_0", "join_1.b_0", "join_2.w_0", "join_2.b_0", - "join_3.w_0", "join_3.b_0", "join_4.w_0", "join_4.b_0", "join_5.w_0", "join_5.b_0", - "join_6.w_0", "join_6.b_0", "join_7.w_0", "join_7.b_0"] - common_save_params = ["common.batch_size", "common.batch_sum", "common.batch_square_sum", - "common_0.w_0", "common_0.b_0", "common_1.w_0", "common_1.b_0", "common_2.w_0", "common_2.b_0", - "common_3.w_0", "common_3.b_0", "common_4.w_0", "common_4.b_0", "common_5.w_0", "common_5.b_0", - "common_6.w_0", "common_6.b_0", "common_7.w_0", "common_7.b_0"] - update_save_params = ["fc_0.w_0", "fc_0.b_0", "fc_1.w_0", "fc_1.b_0", - "fc_2.w_0", "fc_2.b_0", "fc_3.w_0", "fc_3.b_0", - "fc_4.w_0", "fc_4.b_0", "fc_5.w_0", "fc_5.b_0"] - - if fleet.is_server(): - fleet.run_server() - elif fleet.is_worker(): - with fluid.scope_guard(scope3): - exe.run(update_model._startup_program) - with fluid.scope_guard(scope2): - exe.run(join_common_model._startup_program) - - configs = { - "fs.default.name": config.fs_name, - "hadoop.job.ugi": config.fs_ugi - } - hdfs_client = HDFSClient("$HADOOP_HOME", configs) - - save_first_base = config.save_first_base - path = config.train_data_path - online_pass_interval = fleet_util.get_online_pass_interval(config.days, config.hours, config.split_interval, config.split_per_pass, False) - pass_per_day = len(online_pass_interval) - last_day, last_pass, last_path, xbox_base_key = fleet_util.get_last_save_model(config.output_path, config.fs_name, config.fs_ugi) - reqi = True if last_day != -1 else False - - if config.need_reqi_changeslot and config.reqi_dnn_plugin_day >= last_day and config.reqi_dnn_plugin_pass >= last_pass: - util.reqi_changeslot(config.hdfs_dnn_plugin_path, join_save_params, common_save_params, update_save_params, scope2, scope3) - fleet.init_worker() - - dataset = None - next_dataset = None - cur_path = None - next_path = None - start_train = False - days = os.popen("echo -n " + config.days).read().split(" ") - hours = os.popen("echo -n " + config.hours).read().split(" ") - stdout_str = "" - begin_days = {} - for day_index in range(len(days)): - day = days[day_index] - if last_day != -1 and int(day) < last_day: - continue - for pass_index in range(1, pass_per_day + 1): - dataset = next_dataset - next_dataset = None - cur_path = next_path - next_path = None - if (last_day != -1 and int(day) == last_day) and (last_pass != -1 and int(pass_index) < last_pass): - continue - if reqi: - begin = time.time() - log_str = "going to load model %s" % last_path - fleet_util.rank0_print(log_str) - if config.need_reqi_changeslot and config.reqi_dnn_plugin_day >= last_day and config.reqi_dnn_plugin_pass >= last_pass: - fleet.load_one_table(0, last_path) - else: - fleet_util.load_fleet_model(last_path) - - end = time.time() - log_str = "load model cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - reqi = False - if (last_day != -1 and int(day) == last_day) and (last_pass != -1 and int(pass_index) == last_pass): - continue - - #log_str = "===========going to train day/pass %s/%s===========" % (day, pass_index) - - if begin_days.get(day) is None: - log_str = "======== BEGIN DAY:%s ========" % day - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - begin_days[day] = True - - log_str = " ==== begin delta:%s ========" % pass_index - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - if save_first_base: - log_str = "save_first_base=True" - fleet_util.rank0_print(log_str) - save_first_base = False - last_base_day, last_base_path, tmp_xbox_base_key = \ - fleet_util.get_last_save_xbox_base(config.output_path, config.fs_name, config.fs_ugi) - if int(day) > last_base_day: - log_str = "going to save xbox base model" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - xbox_base_key = int(time.time()) - cur = [] - for interval in online_pass_interval[pass_index - 1]: - for p in path: - cur.append(p + "/" + day + "/" + interval) - stdout_str += save_delta(day, -1, xbox_base_key, cur, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params, "") - elif int(day) == last_base_day: - xbox_base_key = tmp_xbox_base_key - log_str = "xbox base model exists" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - else: - log_str = "xbox base model exists" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - start_train = True - train_begin = time.time() - - if dataset is not None: - begin = time.time() - dataset.wait_preload_done() - end = time.time() - log_str = "wait data preload done cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - if dataset is None: - cur_pass = online_pass_interval[pass_index - 1] - cur_path = [] - for interval in cur_pass: - for p in path: - cur_path.append(p + "/" + day + "/" + interval) - log_str = "data path: " + ",".join(cur_path) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - for i in cur_path: - while not hdfs_client.is_exist(i + "/to.hadoop.done"): - fleet_util.rank0_print("wait for data ready: %s" % i) - time.sleep(config.check_exist_seconds) - my_filelist = fleet.split_files(hdfs_ls(cur_path)) - - dataset = create_dataset(join_common_model._all_slots, my_filelist) - fleet_util.rank0_print("going to load into memory") - begin = time.time() - dataset.load_into_memory() - end = time.time() - log_str = "load into memory done, cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - fleet_util.rank0_print("going to global shuffle") - begin = time.time() - dataset.global_shuffle(fleet, config.shuffle_thread) - end = time.time() - log_str = "global shuffle done, cost %s min, data size %s" % ((end - begin) / 60.0, dataset.get_shuffle_data_size(fleet)) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - get_data_max(dataset.get_shuffle_data_size()) - get_data_min(dataset.get_shuffle_data_size()) - - if config.prefetch and (pass_index < pass_per_day or pass_index == pass_per_day and day_index < len(days) - 1): - if pass_index < pass_per_day: - next_pass = online_pass_interval[pass_index] - next_day = day - else: - next_pass = online_pass_interval[0] - next_day = days[day_index + 1] - next_path = [] - for interval in next_pass: - for p in path: - next_path.append(p + "/" + next_day + "/" + interval) - next_data_ready = True - for i in next_path: - if not hdfs_client.is_exist(i + "/to.hadoop.done"): - next_data_ready = False - fleet_util.rank0_print("next data not ready: %s" % i) - if not next_data_ready: - next_dataset = None - else: - my_filelist = fleet.split_files(hdfs_ls(next_path)) - next_dataset = create_dataset(join_common_model._all_slots, my_filelist) - log_str = "next pass data preload %s " % ",".join(next_path) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - next_dataset.preload_into_memory(config.preload_thread) - - - join_cost = 0 - common_cost = 0 - update_cost = 0 - monitor_data = "" - - with fluid.scope_guard(scope2): - fleet_util.rank0_print("Begin join + common pass") - begin = time.time() - exe.train_from_dataset(join_common_model._train_program, - dataset, - scope2, - thread=config.join_common_thread, - debug=False) - end = time.time() - avg_cost = get_avg_cost_mins(end - begin) - - fleet_util.rank0_print("avg train time %s mins" % avg_cost) - - get_max_cost_mins(end - begin) - get_min_cost_mins(end - begin) - - common_cost = avg_cost - - monitor_data = "" - log_str = print_global_metrics(scope2, join_common_model.join_stat_pos.name, join_common_model.join_stat_neg.name, - join_common_model.join_sqrerr.name, join_common_model.join_abserr.name, - join_common_model.join_prob.name, - join_common_model.join_q.name, join_common_model.join_pos.name, - join_common_model.join_total.name, "joining pass:")#"join pass:") - check_auc_ok("joining pass:", log_str, 0.79) - monitor_data += log_str - stdout_str += time_prefix_str() + "joining pass:" - stdout_str += time_prefix_str() + log_str - - log_str = print_global_metrics(scope2, join_common_model.common_stat_pos.name, join_common_model.common_stat_neg.name, - join_common_model.common_sqrerr.name, join_common_model.common_abserr.name, - join_common_model.common_prob.name, - join_common_model.common_q.name, join_common_model.common_pos.name, - join_common_model.common_total.name, "common pass:") - check_auc_ok("common pass:", log_str, 0.70) - monitor_data += " " + log_str - stdout_str += time_prefix_str() + "common pass:" - stdout_str += time_prefix_str() + log_str - fleet_util.rank0_print("End join+common pass") - clear_metrics_2(fleet_util, join_common_model, scope2) - - if config.save_xbox_before_update and pass_index % config.save_delta_frequency == 0: - fleet_util.rank0_print("going to save delta model") - last_xbox_day, last_xbox_pass, last_xbox_path, _ = fleet_util.get_last_save_xbox(config.output_path, config.fs_name, config.fs_ugi) - if int(day) < last_xbox_day or int(day) == last_xbox_day and int(pass_index) <= last_xbox_pass: - log_str = "delta model exists" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - else: - stdout_str += save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params, monitor_data) - - with fluid.scope_guard(scope3): - fleet_util.rank0_print("Begin update pass") - begin = time.time() - exe.train_from_dataset(update_model._train_program, - dataset, - scope3, - thread=config.update_thread, - debug=False) - end = time.time() - avg_cost = get_avg_cost_mins(end - begin) - - get_max_cost_mins(end - begin) - get_min_cost_mins(end - begin) - - update_cost = avg_cost - - log_str = print_global_metrics(scope3, update_model.stat_pos.name, update_model.stat_neg.name, - update_model.sqrerr.name, update_model.abserr.name, update_model.prob.name, - update_model.q.name, update_model.pos.name, update_model.total.name, - "updating pass:")#"update pass:") - check_auc_ok("updating pass:", log_str, 0.79) - stdout_str += time_prefix_str() + "updating pass:" - stdout_str += time_prefix_str() + log_str - fleet_util.rank0_print("End update pass") - clear_metrics(fleet_util, update_model, scope3) - - begin = time.time() - dataset.release_memory() - end = time.time() - fleet_util.rank0_print("release_memory cost %s min" % ((end - begin) / 60.0)) - - if (pass_index % config.checkpoint_per_pass) == 0 and pass_index != pass_per_day: - begin = time.time() - fleet_util.save_model(config.output_path, day, pass_index) - fleet_util.write_model_donefile(config.output_path, day, pass_index, xbox_base_key, config.fs_name, config.fs_ugi) - end = time.time() - log_str = "save model cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - if not config.save_xbox_before_update and pass_index % config.save_delta_frequency == 0: - fleet_util.rank0_print("going to save delta model") - last_xbox_day, last_xbox_pass, last_xbox_path, _ = fleet_util.get_last_save_xbox(config.output_path, config.fs_name, config.fs_ugi) - if int(day) < last_xbox_day or int(day) == last_xbox_day and int(pass_index) <= last_xbox_pass: - log_str = "delta model exists" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - else: - stdout_str += save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params, monitor_data) - - train_end = time.time() - train_cost = (train_end - train_begin) / 60.0 - other_cost = train_cost - join_cost - common_cost - update_cost - log_str = "finished train day %s pass %s time cost:%s min job time cost" \ - ":[join:%s min][join_common:%s min][update:%s min][other:%s min]" \ - % (day, pass_index, train_cost, join_cost, common_cost, update_cost, other_cost) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - if pass_index % config.write_stdout_frequency == 0: - write_stdout(stdout_str) - stdout_str = "" - - xbox_base_key = int(time.time()) - if not start_train: - write_stdout(stdout_str) - stdout_str = "" - continue - - - fleet_util.rank0_print("going to save batch model/base xbox model") - last_base_day, last_base_path, _ = fleet_util.get_last_save_xbox_base(config.output_path, config.fs_name, config.fs_ugi) - nextday = int(days[day_index + 1]) - if nextday <= last_base_day: - log_str = "batch model/base xbox model exists" - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - else: - stdout_str += save_delta(nextday, -1, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params, monitor_data) - - fleet_util.rank0_print("shrink table") - begin = time.time() - fleet.shrink_sparse_table() - fleet.shrink_dense_table(0.98, scope=scope2, table_id=1) - fleet.shrink_dense_table(0.98, scope=scope2, table_id=2) - fleet.shrink_dense_table(0.98, scope=scope3, table_id=3) - end = time.time() - log_str = "shrink table done, cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - - begin = time.time() - fleet_util.save_batch_model(config.output_path, nextday) - fleet_util.write_model_donefile(config.output_path, nextday, -1, xbox_base_key, config.fs_name, config.fs_ugi) - end = time.time() - log_str = "save batch model cost %s min" % ((end - begin) / 60.0) - fleet_util.rank0_print(log_str) - stdout_str += time_prefix_str() + log_str - write_stdout(stdout_str) - stdout_str = "" diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online_local.py b/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online_local.py deleted file mode 100644 index c7e1811e..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/trainer_online_local.py +++ /dev/null @@ -1,500 +0,0 @@ -import numpy as np -import os -import sys -import paddle -import paddle.fluid as fluid -import threading -import time -import config -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet -from paddle.fluid.incubate.fleet.utils.fleet_util import FleetUtil -from paddle.fluid.incubate.fleet.utils.hdfs import HDFSClient -from model_new import Model -from model_new_jc import ModelJoinCommon - -fleet_util = FleetUtil() - -def create_model(slot_file, slot_common_file, all_slot_file): - join_common_model = ModelJoinCommon(slot_file, slot_common_file, all_slot_file, 20) - update_model = Model(slot_file, all_slot_file, False, 0, True) - with open("join_common_main_program.pbtxt", "w") as fout: - print >> fout, join_common_model._train_program - with open("join_common_startup_program.pbtxt", "w") as fout: - print >> fout, join_common_model._startup_program - with open("update_main_program.pbtxt", "w") as fout: - print >> fout, update_model._train_program - with open("update_startup_program.pbtxt", "w") as fout: - print >> fout, update_model._startup_program - return [join_common_model, update_model] - -def create_dataset(use_var_list, my_filelist): - dataset = fluid.DatasetFactory().create_dataset(config.dataset_type) - dataset.set_batch_size(config.batch_size) - dataset.set_thread(config.thread_num) - dataset.set_hdfs_config(config.fs_name, config.fs_ugi) - dataset.set_pipe_command(config.pipe_command) - dataset.set_filelist(my_filelist) - dataset.set_use_var(use_var_list) - return dataset - -def hdfs_ls(path): - configs = { - "fs.default.name": config.fs_name, - "hadoop.job.ugi": config.fs_ugi - } - hdfs_client = HDFSClient("$HADOOP_HOME", configs) - filelist = [] - for i in path: - cur_path = hdfs_client.ls(i) - if config.fs_name.startswith("hdfs:"): - cur_path = ["hdfs:" + j for j in cur_path] - elif config.fs_name.startswith("afs:"): - cur_path = ["afs:" + j for j in cur_path] - filelist += cur_path - return filelist - -def get_avg_cost_mins(value): - t1 = time.time() - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - t2 = time.time() - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost) - t3 = time.time() - avg_cost = float(global_cost[0]) / fleet.worker_num() - avg_cost /= 60.0 - t4 = time.time() - tc = (t2 - t1 + t4 - t3) / 60.0 - tb = (t3 - t2) / 60.0 - fleet_util.rank0_print("get_avg_cost_mins calc time %s barrier time %s" % (tc, tb)) - return avg_cost - -def get_max_cost_mins(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MAX) - fleet_util.rank0_print("max train time %s mins" % (float(global_cost[0]) / 60.0)) - -def get_min_cost_mins(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MIN) - fleet_util.rank0_print("min train time %s mins" % (float(global_cost[0]) / 60.0)) - -def get_data_max(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MAX) - fleet_util.rank0_print("data size max %s" % global_cost[0]) - -def get_data_min(value): - from mpi4py import MPI - local_cost = np.array([value]) - global_cost = np.copy(local_cost) * 0 - fleet._role_maker._node_type_comm.Allreduce(local_cost, global_cost, op=MPI.MIN) - fleet_util.rank0_print("data size min %s" % global_cost[0]) - -def clear_metrics(fleet_util, model, scope): - fleet_util.set_zero(model.stat_pos.name, scope) - fleet_util.set_zero(model.stat_neg.name, scope) - fleet_util.set_zero(model.batch_stat_pos.name, scope) - fleet_util.set_zero(model.batch_stat_neg.name, scope) - fleet_util.set_zero(model.abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.prob.name, scope, param_type="float32") - fleet_util.set_zero(model.q.name, scope, param_type="float32") - fleet_util.set_zero(model.pos.name, scope, param_type="float32") - fleet_util.set_zero(model.total.name, scope, param_type="float32") - -def clear_metrics_2(fleet_util, model, scope): - fleet_util.set_zero(model.join_stat_pos.name, scope) - fleet_util.set_zero(model.join_stat_neg.name, scope) - fleet_util.set_zero(model.join_batch_stat_pos.name, scope) - fleet_util.set_zero(model.join_batch_stat_neg.name, scope) - fleet_util.set_zero(model.join_abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.join_sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.join_prob.name, scope, param_type="float32") - fleet_util.set_zero(model.join_q.name, scope, param_type="float32") - fleet_util.set_zero(model.join_pos.name, scope, param_type="float32") - fleet_util.set_zero(model.join_total.name, scope, param_type="float32") - - fleet_util.set_zero(model.common_stat_pos.name, scope) - fleet_util.set_zero(model.common_stat_neg.name, scope) - fleet_util.set_zero(model.common_batch_stat_pos.name, scope) - fleet_util.set_zero(model.common_batch_stat_neg.name, scope) - fleet_util.set_zero(model.common_abserr.name, scope, param_type="float32") - fleet_util.set_zero(model.common_sqrerr.name, scope, param_type="float32") - fleet_util.set_zero(model.common_prob.name, scope, param_type="float32") - fleet_util.set_zero(model.common_q.name, scope, param_type="float32") - fleet_util.set_zero(model.common_pos.name, scope, param_type="float32") - fleet_util.set_zero(model.common_total.name, scope, param_type="float32") - -def save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope_join, scope_common, scope_update, join_model, - join_common_model, update_model, join_save_params, common_save_params, update_save_params): - fleet_util.rank0_print("begin save delta model") - begin = time.time() - if pass_index == -1: - fleet_util.save_xbox_base_model(config.output_path, day) - else: - fleet_util.save_delta_model(config.output_path, day, pass_index) - end = time.time() - fleet_util.save_paddle_params(exe, scope_join, join_model._train_program, "paddle_dense.model.0", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=join_save_params) - fleet_util.save_paddle_params(exe, scope_common, join_common_model._train_program, "paddle_dense.model.1", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=common_save_params) - fleet_util.save_paddle_params(exe, scope_update, update_model._train_program, "paddle_dense.model.2", - config.output_path, day, pass_index, config.fs_name, config.fs_ugi, - var_names=update_save_params) - fleet_util.rank0_print("end save delta cost %s min" % ((end - begin) / 60.0)) - fleet_util.rank0_print("begin save cache") - begin = time.time() - if pass_index == -1: - key_num = fleet_util.save_cache_base_model(config.output_path, day) - else: - key_num = fleet_util.save_cache_model(config.output_path, day, pass_index) - fleet_util.write_cache_donefile(config.output_path, day, pass_index, key_num, config.fs_name, config.fs_ugi) - end = time.time() - fleet_util.rank0_print("end save cache cost %s min, key_num=%s" % ((end - begin) / 60.0, key_num)) - fleet_util.write_xbox_donefile(config.output_path, day, pass_index, xbox_base_key, ",".join(cur_path), - config.fs_name, config.fs_ugi) - -if __name__ == "__main__": - - place = fluid.CPUPlace() - exe = fluid.Executor(place) - fleet.init(exe) - - slot_file = "slot/slot" - slot_common_file = "slot/slot_common" - all_slot_file = "all_slot.dict" - - join_common_model, update_model = create_model(slot_file, slot_common_file, all_slot_file) - - scope2 = fluid.Scope() - scope3 = fluid.Scope() - - adjust_ins_weight = { "need_adjust" : True, "nid_slot" : "6002", "nid_adjw_threshold" : 1000, "nid_adjw_ratio": 20, - "ins_weight_slot": update_model.ins_weight.name } - - thread_stat_var_names = [] - thread_stat_var_names.append(join_common_model.join_stat_pos.name) - thread_stat_var_names.append(join_common_model.join_stat_neg.name) - thread_stat_var_names.append(join_common_model.join_sqrerr.name) - thread_stat_var_names.append(join_common_model.join_abserr.name) - thread_stat_var_names.append(join_common_model.join_prob.name) - thread_stat_var_names.append(join_common_model.join_q.name) - thread_stat_var_names.append(join_common_model.join_pos.name) - thread_stat_var_names.append(join_common_model.join_total.name) - - thread_stat_var_names.append(join_common_model.common_stat_pos.name) - thread_stat_var_names.append(join_common_model.common_stat_neg.name) - thread_stat_var_names.append(join_common_model.common_sqrerr.name) - thread_stat_var_names.append(join_common_model.common_abserr.name) - thread_stat_var_names.append(join_common_model.common_prob.name) - thread_stat_var_names.append(join_common_model.common_q.name) - thread_stat_var_names.append(join_common_model.common_pos.name) - thread_stat_var_names.append(join_common_model.common_total.name) - - thread_stat_var_names.append(update_model.stat_pos.name) - thread_stat_var_names.append(update_model.stat_neg.name) - thread_stat_var_names.append(update_model.sqrerr.name) - thread_stat_var_names.append(update_model.abserr.name) - thread_stat_var_names.append(update_model.prob.name) - thread_stat_var_names.append(update_model.q.name) - thread_stat_var_names.append(update_model.pos.name) - thread_stat_var_names.append(update_model.total.name) - - thread_stat_var_names = list(set(thread_stat_var_names)) - - - adam = fluid.optimizer.Adam(learning_rate=0.000005) - adam = fleet.distributed_optimizer(adam, strategy={"use_cvm" : True, "adjust_ins_weight" : adjust_ins_weight, "scale_datanorm" : 1e-4, "dump_slot": True, "stat_var_names": thread_stat_var_names, "fleet_desc_file": "fleet_desc_combinejoincommon.prototxt"}) - adam.minimize([join_common_model.joint_cost, update_model.avg_cost], [scope2, scope3]) - - join_common_model._train_program._fleet_opt["program_configs"][str(id(join_common_model.joint_cost.block.program))]["push_sparse"] = [] - - join_save_params = ["join.batch_size", "join.batch_sum", "join.batch_square_sum", - "join_0.w_0", "join_0.b_0", "join_1.w_0", "join_1.b_0", "join_2.w_0", "join_2.b_0", - "join_3.w_0", "join_3.b_0", "join_4.w_0", "join_4.b_0", "join_5.w_0", "join_5.b_0", - "join_6.w_0", "join_6.b_0", "join_7.w_0", "join_7.b_0"] - common_save_params = ["common.batch_size", "common.batch_sum", "common.batch_square_sum", - "common_0.w_0", "common_0.b_0", "common_1.w_0", "common_1.b_0", "common_2.w_0", "common_2.b_0", - "common_3.w_0", "common_3.b_0", "common_4.w_0", "common_4.b_0", "common_5.w_0", "common_5.b_0", - "common_6.w_0", "common_6.b_0", "common_7.w_0", "common_7.b_0"] - update_save_params = ["fc_0.w_0", "fc_0.b_0", "fc_1.w_0", "fc_1.b_0", - "fc_2.w_0", "fc_2.b_0", "fc_3.w_0", "fc_3.b_0", - "fc_4.w_0", "fc_4.b_0", "fc_5.w_0", "fc_5.b_0"] - - if fleet.is_server(): - fleet.run_server() - elif fleet.is_worker(): - with fluid.scope_guard(scope3): - exe.run(update_model._startup_program) - with fluid.scope_guard(scope2): - exe.run(join_common_model._startup_program) - fleet.init_worker() - - configs = { - "fs.default.name": config.fs_name, - "hadoop.job.ugi": config.fs_ugi - } - hdfs_client = HDFSClient("$HADOOP_HOME", configs) - - save_first_base = config.save_first_base - path = config.train_data_path - online_pass_interval = fleet_util.get_online_pass_interval(config.days, config.hours, config.split_interval, config.split_per_pass, False) - pass_per_day = len(online_pass_interval) - last_day, last_pass, last_path, xbox_base_key = fleet_util.get_last_save_model(config.output_path, config.fs_name, config.fs_ugi) - reqi = True if last_day != -1 else False - - dataset = None - next_dataset = None - cur_path = None - next_path = None - start_train = False - days = os.popen("echo -n " + config.days).read().split(" ") - hours = os.popen("echo -n " + config.hours).read().split(" ") - for day_index in range(len(days)): - day = days[day_index] - if last_day != -1 and int(day) < last_day: - continue - for pass_index in range(1, pass_per_day + 1): - dataset = next_dataset - next_dataset = None - cur_path = next_path - next_path = None - if (last_day != -1 and int(day) == last_day) and (last_pass != -1 and int(pass_index) < last_pass): - continue - if reqi: - begin = time.time() - fleet_util.rank0_print("going to load model %s" % last_path) - # fleet_util.load_fleet_model(last_path) - # fleet.load_one_table(0, last_path) - # tmppath = "afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0921_72/new_model" - #"afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0920_108/new_model" - #"afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0915/new_model" - # fleet.load_one_table(1,tmppath) - # fleet.load_one_table(2,tmppath) - # fleet.load_one_table(3,tmppath) - - end = time.time() - fleet_util.rank0_print("load model cost %s min" % ((end - begin) / 60.0)) - reqi = False - if (last_day != -1 and int(day) == last_day) and (last_pass != -1 and int(pass_index) == last_pass): - continue - - fleet_util.rank0_print("===========going to train day/pass %s/%s===========" % (day, pass_index)) - - if save_first_base: - fleet_util.rank0_print("save_first_base=True") - save_first_base = False - last_base_day, last_base_path, tmp_xbox_base_key = \ - fleet_util.get_last_save_xbox_base(config.output_path, config.fs_name, config.fs_ugi) - if int(day) > last_base_day: - fleet_util.rank0_print("going to save xbox base model") - xbox_base_key = int(time.time()) - cur = [] - for interval in online_pass_interval[pass_index - 1]: - for p in path: - cur.append(p + "/" + day + "/" + interval) - save_delta(day, -1, xbox_base_key, cur, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params) - elif int(day) == last_base_day: - xbox_base_key = tmp_xbox_base_key - fleet_util.rank0_print("xbox base model exists") - else: - fleet_util.rank0_print("xbox base model exists") - - start_train = True - train_begin = time.time() - - if dataset is not None: - begin = time.time() - dataset.wait_preload_done() - end = time.time() - fleet_util.rank0_print("wait data preload done cost %s min" % ((end - begin) / 60.0)) - - if dataset is None: - cur_pass = online_pass_interval[pass_index - 1] - cur_path = [] - for interval in cur_pass: - for p in path: - cur_path.append(p + "/" + day + "/" + interval) - fleet_util.rank0_print("data path: " + ",".join(cur_path)) - #for i in cur_path: - # while not hdfs_client.is_exist(i + "/to.hadoop.done"): - # fleet_util.rank0_print("wait for data ready: %s" % i) - # time.sleep(config.check_exist_seconds) - my_filelist = ["part-00000_1"]#fleet.split_files(hdfs_ls(cur_path)) - - dataset = create_dataset(join_common_model._all_slots, my_filelist) - fleet_util.rank0_print("going to load into memory") - begin = time.time() - dataset.load_into_memory() - end = time.time() - fleet_util.rank0_print("load into memory done, cost %s min" % ((end - begin) / 60.0)) - - if config.prefetch and (pass_index < pass_per_day or pass_index == pass_per_day and day_index < len(days) - 1): - if pass_index < pass_per_day: - next_pass = online_pass_interval[pass_index] - next_day = day - else: - next_pass = online_pass_interval[0] - next_day = days[day_index + 1] - next_path = [] - for interval in next_pass: - for p in path: - next_path.append(p + "/" + next_day + "/" + interval) - next_data_ready = True - #for i in next_path: - # if not hdfs_client.is_exist(i + "/to.hadoop.done"): - # next_data_ready = False - # fleet_util.rank0_print("next data not ready: %s" % i) - if not next_data_ready: - next_dataset = None - else: - my_filelist = ["part-00000_1"]#fleet.split_files(hdfs_ls(next_path)) - next_dataset = create_dataset(join_common_model._all_slots, my_filelist) - fleet_util.rank0_print("next pass data preload %s " % ",".join(next_path)) - next_dataset.preload_into_memory(config.preload_thread) - - fleet_util.rank0_print("going to global shuffle") - begin = time.time() - dataset.global_shuffle(fleet, config.shuffle_thread) - end = time.time() - fleet_util.rank0_print("global shuffle done, cost %s min, data size %s" % ((end - begin) / 60.0, dataset.get_shuffle_data_size(fleet))) - - get_data_max(dataset.get_shuffle_data_size()) - get_data_min(dataset.get_shuffle_data_size()) - - join_cost = 0 - common_cost = 0 - update_cost = 0 - - with fluid.scope_guard(scope2): - fleet_util.rank0_print("Begin join + common pass") - begin = time.time() - exe.train_from_dataset(join_common_model._train_program, - dataset, - scope2, - thread=config.join_common_thread, - debug=False) - end = time.time() - avg_cost = get_avg_cost_mins(end - begin) - - fleet_util.rank0_print("avg train time %s mins" % avg_cost) - - get_max_cost_mins(end - begin) - get_min_cost_mins(end - begin) - - common_cost = avg_cost - - fleet_util.print_global_metrics(scope2, join_common_model.join_stat_pos.name, join_common_model.join_stat_neg.name, - join_common_model.join_sqrerr.name, join_common_model.join_abserr.name, - join_common_model.join_prob.name, - join_common_model.join_q.name, join_common_model.join_pos.name, - join_common_model.join_total.name, - "join pass:") - - fleet_util.print_global_metrics(scope2, join_common_model.common_stat_pos.name, join_common_model.common_stat_neg.name, - join_common_model.common_sqrerr.name, join_common_model.common_abserr.name, - join_common_model.common_prob.name, - join_common_model.common_q.name, join_common_model.common_pos.name, - join_common_model.common_total.name, - "common pass:") - fleet_util.rank0_print("End join+common pass") - clear_metrics_2(fleet_util, join_common_model, scope2) - - if config.save_xbox_before_update and pass_index % config.save_delta_frequency == 0: - fleet_util.rank0_print("going to save delta model") - last_xbox_day, last_xbox_pass, last_xbox_path, _ = fleet_util.get_last_save_xbox(config.output_path, config.fs_name, config.fs_ugi) - if int(day) < last_xbox_day or int(day) == last_xbox_day and int(pass_index) <= last_xbox_pass: - fleet_util.rank0_print("delta model exists") - else: - save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params) - - with fluid.scope_guard(scope3): - fleet_util.rank0_print("Begin update pass") - begin = time.time() - exe.train_from_dataset(update_model._train_program, - dataset, - scope3, - thread=config.update_thread, - debug=False) - end = time.time() - avg_cost = get_avg_cost_mins(end - begin) - update_cost = avg_cost - - fleet_util.print_global_metrics(scope3, update_model.stat_pos.name, update_model.stat_neg.name, - update_model.sqrerr.name, update_model.abserr.name, update_model.prob.name, - update_model.q.name, update_model.pos.name, update_model.total.name, - "update pass:") - fleet_util.rank0_print("End update pass") - clear_metrics(fleet_util, update_model, scope3) - - begin = time.time() - dataset.release_memory() - end = time.time() - - print pass_index - print config.checkpoint_per_pass - - if (pass_index % config.checkpoint_per_pass) == 0 and pass_index != pass_per_day: - print "save" - begin = time.time() - fleet_util.save_model(config.output_path, day, pass_index) - fleet_util.write_model_donefile(config.output_path, day, pass_index, xbox_base_key, config.fs_name, config.fs_ugi) - end = time.time() - fleet_util.rank0_print("save model cost %s min" % ((end - begin) / 60.0)) - if not config.save_xbox_before_update and pass_index % config.save_delta_frequency == 0: - fleet_util.rank0_print("going to save delta model") - last_xbox_day, last_xbox_pass, last_xbox_path, _ = fleet_util.get_last_save_xbox(config.output_path, config.fs_name, config.fs_ugi) - if int(day) < last_xbox_day or int(day) == last_xbox_day and int(pass_index) <= last_xbox_pass: - fleet_util.rank0_print("delta model exists") - else: - save_delta(day, pass_index, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params) - - train_end = time.time() - train_cost = (train_end - train_begin) / 60.0 - other_cost = train_cost - join_cost - common_cost - update_cost - fleet_util.rank0_print(\ - "finished train day %s pass %s time cost:%s min job time cost" - ":[join:%s min][join_common:%s min][update:%s min][other:%s min]" \ - % (day, pass_index, train_cost, join_cost, common_cost, update_cost, other_cost)) - - xbox_base_key = int(time.time()) - if not start_train: - continue - - fleet_util.rank0_print("shrink table") - begin = time.time() - fleet.shrink_sparse_table() - fleet.shrink_dense_table(0.98, scope=scope2, table_id=1) - fleet.shrink_dense_table(0.98, scope=scope2, table_id=2) - fleet.shrink_dense_table(0.98, scope=scope3, table_id=3) - end = time.time() - fleet_util.rank0_print("shrink table done, cost %s min" % ((end - begin) / 60.0)) - - fleet_util.rank0_print("going to save batch model/base xbox model") - last_base_day, last_base_path, _ = fleet_util.get_last_save_xbox_base(config.output_path, config.fs_name, config.fs_ugi) - nextday = int(days[day_index + 1]) - if nextday <= last_base_day: - fleet_util.rank0_print("batch model/base xbox model exists") - else: - save_delta(nextday, -1, xbox_base_key, cur_path, exe, scope2, scope2, scope3, - join_common_model, join_common_model, update_model, - join_save_params, common_save_params, update_save_params) - begin = time.time() - fleet_util.save_batch_model(config.output_path, nextday) - fleet_util.write_model_donefile(config.output_path, nextday, -1, xbox_base_key, config.fs_name, config.fs_ugi) - end = time.time() - fleet_util.rank0_print("save batch model cost %s min" % ((end - begin) / 60.0)) diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/util.bak.py b/feed/feed_deploy/news_jingpai/package/format_nets/util.bak.py deleted file mode 100644 index 15e96c9e..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/util.bak.py +++ /dev/null @@ -1,135 +0,0 @@ -import paddle -import paddle.fluid as fluid -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet -import os -import numpy as np -import config - -def jingpai_load_paddle_model(old_startup_program_bin, - old_train_program_bin, - old_model_path, - old_slot_list, - new_slot_list, - model_all_vars, - new_scope, - modify_layer_names): - place = fluid.CPUPlace() - exe = fluid.Executor(place) - - old_scope = fluid.Scope() - old_program = fluid.Program() - old_program = old_program.parse_from_string(open(old_train_program_bin, "rb").read()) - old_startup_program = fluid.Program() - old_startup_program = old_startup_program.parse_from_string(open(old_startup_program_bin, "rb").read()) - with fluid.scope_guard(old_scope): - exe.run(old_startup_program) - variables = [old_program.global_block().var(i) for i in model_all_vars] - if os.path.isfile(old_model_path): - path = os.path.dirname(old_model_path) - path = "./" if path == "" else path - filename = os.path.basename(old_model_path) - fluid.io.load_vars(exe, path, old_program, vars=variables, filename=filename) - else: - fluid.io.load_vars(exe, old_model_path, old_program, vars=variables) - - old_pos = {} - idx = 0 - for i in old_slot_list: - old_pos[i] = idx - idx += 1 - - for i in modify_layer_names: - if old_scope.find_var(i) is None: - print("%s not found in old scope, skip" % i) - continue - elif new_scope.find_var(i) is None: - print("%s not found in new scope, skip" % i) - continue - old_param = old_scope.var(i).get_tensor() - old_param_array = np.array(old_param).astype("float32") - old_shape = old_param_array.shape - #print i," old_shape ", old_shape - - new_param = new_scope.var(i).get_tensor() - new_param_array = np.array(new_param).astype("float32") - new_shape = new_param_array.shape - #print i," new_shape ", new_shape - - per_dim = len(new_param_array) / len(new_slot_list) - #print "len(new_param_array) ",len(new_param_array),\ - # "len(new_slot_list) ", len(new_slot_list)," per_dim ", per_dim - - idx = -per_dim - for s in new_slot_list: - idx += per_dim - if old_pos.get(s) is None: - continue - for j in range(0, per_dim): - #print i," row/value ", idx + j, " copy from ", old_pos[s] * per_dim + j - # a row or a value - new_param_array[idx + j] = old_param_array[old_pos[s] * per_dim + j] - - new_param.set(new_param_array, place) - - for i in model_all_vars: - if i in modify_layer_names: - continue - old_param = old_scope.find_var(i).get_tensor() - old_param_array = np.array(old_param).astype("float32") - new_param = new_scope.find_var(i).get_tensor() - new_param.set(old_param_array, place) - - -def reqi_changeslot(hdfs_dnn_plugin_path, join_save_params, common_save_params, update_save_params, scope2, scope3): - if fleet.worker_index() != 0: - return - - print("load paddle model %s" % hdfs_dnn_plugin_path) - - os.system("rm -rf dnn_plugin/ ; hadoop fs -D hadoop.job.ugi=%s -D fs.default.name=%s -get %s ." % (config.fs_ugi, config.fs_name, hdfs_dnn_plugin_path)) - - new_join_slot = [] - for line in open("slot/slot", 'r'): - slot = line.strip() - new_join_slot.append(slot) - old_join_slot = [] - for line in open("old_slot/slot", 'r'): - slot = line.strip() - old_join_slot.append(slot) - - new_common_slot = [] - for line in open("slot/slot_common", 'r'): - slot = line.strip() - new_common_slot.append(slot) - old_common_slot = [] - for line in open("old_slot/slot_common", 'r'): - slot = line.strip() - old_common_slot.append(slot) - - - jingpai_load_paddle_model("old_program/old_join_common_startup_program.bin", - "old_program/old_join_common_train_program.bin", - "dnn_plugin/paddle_dense.model.0", - old_join_slot, - new_join_slot, - join_save_params, - scope2, - ["join.batch_size","join.batch_sum","join.batch_square_sum","join_0.w_0"]) - - jingpai_load_paddle_model("old_program/old_join_common_startup_program.bin", - "old_program/old_join_common_train_program.bin", - "dnn_plugin/paddle_dense.model.1", - old_common_slot, - new_common_slot, - common_save_params, - scope2, - ["common.batch_size","common.batch_sum","common.batch_square_sum","common_0.w_0"]) - - jingpai_load_paddle_model("old_program/old_update_startup_program.bin", - "old_program/old_update_main_program.bin", - "dnn_plugin/paddle_dense.model.2", - old_join_slot, - new_join_slot, - update_save_params, - scope3, - ["fc_0.w_0"]) diff --git a/feed/feed_deploy/news_jingpai/package/format_nets/util.py b/feed/feed_deploy/news_jingpai/package/format_nets/util.py deleted file mode 100644 index 46de454f..00000000 --- a/feed/feed_deploy/news_jingpai/package/format_nets/util.py +++ /dev/null @@ -1,286 +0,0 @@ -import paddle -import paddle.fluid as fluid -from paddle.fluid.incubate.fleet.parameter_server.pslib import fleet -import os -import numpy as np -import config -from paddle.fluid.incubate.fleet.utils.fleet_util import FleetUtil -from paddle.fluid.incubate.fleet.utils.hdfs import HDFSClient -import collections -import json -import time - -fleet_util = FleetUtil() - -def print_global_metrics(scope, stat_pos_name, stat_neg_name, sqrerr_name, - abserr_name, prob_name, q_name, pos_ins_num_name, - total_ins_num_name, print_prefix): - auc, bucket_error, mae, rmse, actual_ctr, predicted_ctr, copc,\ - mean_predict_qvalue, total_ins_num = fleet_util.get_global_metrics(\ - scope, stat_pos_name, stat_neg_name, sqrerr_name, abserr_name,\ - prob_name, q_name, pos_ins_num_name, total_ins_num_name) - log_str = "AUC=%.6f BUCKET_ERROR=%.6f MAE=%.6f " \ - "RMSE=%.6f Actural_CTR=%.6f Predicted_CTR=%.6f " \ - "COPC=%.6f MEAN Q_VALUE=%.6f Ins number=%s" % \ - (auc, bucket_error, mae, rmse, \ - actual_ctr, predicted_ctr, copc, mean_predict_qvalue, \ - total_ins_num) - fleet_util.rank0_print(print_prefix + " " + log_str) - return print_prefix + " " + log_str #print_prefix + "\n " + log_str - -def write_stdout(stdout_str): - if fleet.worker_index() != 0: - fleet._role_maker._barrier_worker() - return - hadoop_home="$HADOOP_HOME" - configs = {"fs.default.name": config.fs_name, "hadoop.job.ugi": config.fs_ugi} - client = HDFSClient(hadoop_home, configs) - out_dir = config.output_path + "/stdout/" - if not client.is_exist(out_dir): - client.makedirs(out_dir) - job_id_with_host = os.popen("echo -n ${JOB_ID}").read().strip() - instance_id = os.popen("echo -n ${INSTANCE_ID}").read().strip() - start_pos = instance_id.find(job_id_with_host) - end_pos = instance_id.find("--") - if start_pos != -1 and end_pos != -1: - job_id_with_host = instance_id[start_pos:end_pos] - file_path = out_dir + job_id_with_host - if client.is_file(file_path): - pre_content = client.cat(file_path) - with open(job_id_with_host, "w") as f: - f.write(pre_content + "\n") - f.write(stdout_str + "\n") - client.delete(file_path) - client.upload(out_dir, job_id_with_host, multi_processes=1, overwrite=False) - else: - with open(job_id_with_host, "w") as f: - f.write(stdout_str + "\n") - client.upload(out_dir, job_id_with_host, multi_processes=1, overwrite=False) - fleet_util.rank0_info("write %s succeed" % file_path) - fleet._role_maker._barrier_worker() - -def _get_xbox_str(day, model_path, xbox_base_key, data_path, monitor_data, mode="patch"): - xbox_dict = collections.OrderedDict() - if mode == "base": - xbox_dict["id"] = str(xbox_base_key) - elif mode == "patch": - xbox_dict["id"] = str(int(time.time())) - else: - print("warning: unknown mode %s, set it to patch" % mode) - mode = "patch" - xbox_dict["id"] = str(int(time.time())) - xbox_dict["key"] = str(xbox_base_key) - if model_path.startswith("hdfs:") or model_path.startswith("afs:"): - model_path = model_path[model_path.find(":") + 1:] - xbox_dict["input"] = config.fs_name + model_path.rstrip("/") + "/000" - xbox_dict["record_count"] = "111111" - xbox_dict["partition_type"] = "2" - xbox_dict["job_name"] = "default_job_name" - xbox_dict["ins_tag"] = "feasign" - xbox_dict["ins_path"] = data_path - job_id_with_host = os.popen("echo -n ${JOB_ID}").read().strip() - instance_id = os.popen("echo -n ${INSTANCE_ID}").read().strip() - start_pos = instance_id.find(job_id_with_host) - end_pos = instance_id.find("--") - if start_pos != -1 and end_pos != -1: - job_id_with_host = instance_id[start_pos:end_pos] - xbox_dict["job_id"] = job_id_with_host - xbox_dict["monitor_data"] = monitor_data - xbox_dict["monitor_path"] = config.output_path.rstrip("/") + "/monitor/" \ - + day + ".txt" - xbox_dict["mpi_size"] = str(fleet.worker_num()) - return json.dumps(xbox_dict) - -def write_xbox_donefile(day, pass_id, xbox_base_key, data_path, donefile_name=None, monitor_data=""): - if fleet.worker_index() != 0: - fleet._role_maker._barrier_worker() - return - day = str(day) - pass_id = str(pass_id) - xbox_base_key = int(xbox_base_key) - mode = None - if pass_id != "-1": - mode = "patch" - suffix_name = "/%s/delta-%s/" % (day, pass_id) - model_path = config.output_path.rstrip("/") + suffix_name - if donefile_name is None: - donefile_name = "xbox_patch_done.txt" - else: - mode = "base" - suffix_name = "/%s/base/" % day - model_path = config.output_path.rstrip("/") + suffix_name - if donefile_name is None: - donefile_name = "xbox_base_done.txt" - if isinstance(data_path, list): - data_path = ",".join(data_path) - - if fleet.worker_index() == 0: - donefile_path = config.output_path + "/" + donefile_name - xbox_str = _get_xbox_str(day, model_path, xbox_base_key, data_path, monitor_data, mode) - configs = {"fs.default.name": config.fs_name, "hadoop.job.ugi": config.fs_ugi} - client = HDFSClient("$HADOOP_HOME", configs) - if client.is_file(donefile_path): - pre_content = client.cat(donefile_path) - last_dict = json.loads(pre_content.split("\n")[-1]) - last_day = last_dict["input"].split("/")[-3] - last_pass = last_dict["input"].split("/")[-2].split("-")[-1] - exist = False - if int(day) < int(last_day) or \ - int(day) == int(last_day) and \ - int(pass_id) <= int(last_pass): - exist = True - if not exist: - with open(donefile_name, "w") as f: - f.write(pre_content + "\n") - f.write(xbox_str + "\n") - client.delete(donefile_path) - client.upload( - config.output_path, - donefile_name, - multi_processes=1, - overwrite=False) - fleet_util.rank0_info("write %s/%s %s succeed" % \ - (day, pass_id, donefile_name)) - else: - fleet_util.rank0_error("not write %s because %s/%s already " - "exists" % (donefile_name, day, pass_id)) - else: - with open(donefile_name, "w") as f: - f.write(xbox_str + "\n") - client.upload( - config.output_path, - donefile_name, - multi_processes=1, - overwrite=False) - fleet_util.rank0_error("write %s/%s %s succeed" % \ - (day, pass_id, donefile_name)) - fleet._role_maker._barrier_worker() - -def jingpai_load_paddle_model(old_startup_program_bin, - old_train_program_bin, - old_model_path, - old_slot_list, - new_slot_list, - model_all_vars, - new_scope, - modify_layer_names): - place = fluid.CPUPlace() - exe = fluid.Executor(place) - - old_scope = fluid.Scope() - old_program = fluid.Program() - old_program = old_program.parse_from_string(open(old_train_program_bin, "rb").read()) - old_startup_program = fluid.Program() - old_startup_program = old_startup_program.parse_from_string(open(old_startup_program_bin, "rb").read()) - with fluid.scope_guard(old_scope): - exe.run(old_startup_program) - variables = [old_program.global_block().var(i) for i in model_all_vars] - if os.path.isfile(old_model_path): - path = os.path.dirname(old_model_path) - path = "./" if path == "" else path - filename = os.path.basename(old_model_path) - fluid.io.load_vars(exe, path, old_program, vars=variables, filename=filename) - else: - fluid.io.load_vars(exe, old_model_path, old_program, vars=variables) - - old_pos = {} - idx = 0 - for i in old_slot_list: - old_pos[i] = idx - idx += 1 - - for i in modify_layer_names: - if old_scope.find_var(i) is None: - print("%s not found in old scope, skip" % i) - continue - elif new_scope.find_var(i) is None: - print("%s not found in new scope, skip" % i) - continue - old_param = old_scope.var(i).get_tensor() - old_param_array = np.array(old_param).astype("float32") - old_shape = old_param_array.shape - #print i," old_shape ", old_shape - - new_param = new_scope.var(i).get_tensor() - new_param_array = np.array(new_param).astype("float32") - new_shape = new_param_array.shape - #print i," new_shape ", new_shape - - per_dim = len(new_param_array) / len(new_slot_list) - #print "len(new_param_array) ",len(new_param_array),\ - # "len(new_slot_list) ", len(new_slot_list)," per_dim ", per_dim - - idx = -per_dim - for s in new_slot_list: - idx += per_dim - if old_pos.get(s) is None: - continue - for j in range(0, per_dim): - #print i," row/value ", idx + j, " copy from ", old_pos[s] * per_dim + j - # a row or a value - new_param_array[idx + j] = old_param_array[old_pos[s] * per_dim + j] - - new_param.set(new_param_array, place) - - for i in model_all_vars: - if i in modify_layer_names: - continue - old_param = old_scope.find_var(i).get_tensor() - old_param_array = np.array(old_param).astype("float32") - new_param = new_scope.find_var(i).get_tensor() - new_param.set(old_param_array, place) - - -def reqi_changeslot(hdfs_dnn_plugin_path, join_save_params, common_save_params, update_save_params, scope2, scope3): - if fleet.worker_index() != 0: - return - - print("load paddle model %s" % hdfs_dnn_plugin_path) - - os.system("rm -rf dnn_plugin/ ; hadoop fs -D hadoop.job.ugi=%s -D fs.default.name=%s -get %s ." % (config.fs_ugi, config.fs_name, hdfs_dnn_plugin_path)) - - new_join_slot = [] - for line in open("slot/slot", 'r'): - slot = line.strip() - new_join_slot.append(slot) - old_join_slot = [] - for line in open("old_slot/slot", 'r'): - slot = line.strip() - old_join_slot.append(slot) - - new_common_slot = [] - for line in open("slot/slot_common", 'r'): - slot = line.strip() - new_common_slot.append(slot) - old_common_slot = [] - for line in open("old_slot/slot_common", 'r'): - slot = line.strip() - old_common_slot.append(slot) - - - jingpai_load_paddle_model("old_program/old_join_common_startup_program.bin", - "old_program/old_join_common_train_program.bin", - "dnn_plugin/paddle_dense.model.0", - old_join_slot, - new_join_slot, - join_save_params, - scope2, - ["join.batch_size","join.batch_sum","join.batch_square_sum","join_0.w_0"]) - - jingpai_load_paddle_model("old_program/old_join_common_startup_program.bin", - "old_program/old_join_common_train_program.bin", - "dnn_plugin/paddle_dense.model.1", - old_common_slot, - new_common_slot, - common_save_params, - scope2, - ["common.batch_size","common.batch_sum","common.batch_square_sum","common_0.w_0"]) - - jingpai_load_paddle_model("old_program/old_update_startup_program.bin", - "old_program/old_update_main_program.bin", - "dnn_plugin/paddle_dense.model.2", - old_join_slot, - new_join_slot, - update_save_params, - scope3, - ["fc_0.w_0"]) -- GitLab