Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
PaddleRec
提交
73334d88
P
PaddleRec
项目概览
BaiXuePrincess
/
PaddleRec
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleRec
通知
1
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleRec
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
73334d88
编写于
11月 19, 2019
作者:
X
xiexionghang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
submit news_jingpai online config
上级
1fb811f8
变更
36
隐藏空白更改
内联
并排
Showing
36 changed file
with
9319 addition
and
0 deletion
+9319
-0
feed_deploy/news_jingpai/job.sh
feed_deploy/news_jingpai/job.sh
+24
-0
feed_deploy/news_jingpai/package/my_nets/all_slot.dict
feed_deploy/news_jingpai/package/my_nets/all_slot.dict
+409
-0
feed_deploy/news_jingpai/package/my_nets/config.py
feed_deploy/news_jingpai/package/my_nets/config.py
+40
-0
feed_deploy/news_jingpai/package/my_nets/data_generate_base.py
...deploy/news_jingpai/package/my_nets/data_generate_base.py
+358
-0
feed_deploy/news_jingpai/package/my_nets/fleet_desc_combinejoincommon.prototxt
...pai/package/my_nets/fleet_desc_combinejoincommon.prototxt
+1466
-0
feed_deploy/news_jingpai/package/my_nets/format_newcate_hotnews.awk
...y/news_jingpai/package/my_nets/format_newcate_hotnews.awk
+21
-0
feed_deploy/news_jingpai/package/my_nets/ins_weight.py
feed_deploy/news_jingpai/package/my_nets/ins_weight.py
+122
-0
feed_deploy/news_jingpai/package/my_nets/jingpai_fleet_desc_new.prototxt
...s_jingpai/package/my_nets/jingpai_fleet_desc_new.prototxt
+1504
-0
feed_deploy/news_jingpai/package/my_nets/model_new.py
feed_deploy/news_jingpai/package/my_nets/model_new.py
+188
-0
feed_deploy/news_jingpai/package/my_nets/model_new_jc.py
feed_deploy/news_jingpai/package/my_nets/model_new_jc.py
+166
-0
feed_deploy/news_jingpai/package/my_nets/my_data_generator_str.py
...loy/news_jingpai/package/my_nets/my_data_generator_str.py
+89
-0
feed_deploy/news_jingpai/package/my_nets/old_program/old_join_common_startup_program.bin
...e/my_nets/old_program/old_join_common_startup_program.bin
+0
-0
feed_deploy/news_jingpai/package/my_nets/old_program/old_join_common_train_program.bin
...age/my_nets/old_program/old_join_common_train_program.bin
+0
-0
feed_deploy/news_jingpai/package/my_nets/old_program/old_update_main_program.bin
...i/package/my_nets/old_program/old_update_main_program.bin
+0
-0
feed_deploy/news_jingpai/package/my_nets/old_program/old_update_startup_program.bin
...ackage/my_nets/old_program/old_update_startup_program.bin
+0
-0
feed_deploy/news_jingpai/package/my_nets/old_slot/slot
feed_deploy/news_jingpai/package/my_nets/old_slot/slot
+408
-0
feed_deploy/news_jingpai/package/my_nets/old_slot/slot_common
..._deploy/news_jingpai/package/my_nets/old_slot/slot_common
+99
-0
feed_deploy/news_jingpai/package/my_nets/old_slot/to.py
feed_deploy/news_jingpai/package/my_nets/old_slot/to.py
+5
-0
feed_deploy/news_jingpai/package/my_nets/reqi_fleet_desc
feed_deploy/news_jingpai/package/my_nets/reqi_fleet_desc
+1461
-0
feed_deploy/news_jingpai/package/my_nets/scripts/xbox_compressor_mf.py
...ews_jingpai/package/my_nets/scripts/xbox_compressor_mf.py
+162
-0
feed_deploy/news_jingpai/package/my_nets/scripts/xbox_decompressor_mf.awk
..._jingpai/package/my_nets/scripts/xbox_decompressor_mf.awk
+52
-0
feed_deploy/news_jingpai/package/my_nets/slot/slot
feed_deploy/news_jingpai/package/my_nets/slot/slot
+407
-0
feed_deploy/news_jingpai/package/my_nets/slot/slot_common
feed_deploy/news_jingpai/package/my_nets/slot/slot_common
+99
-0
feed_deploy/news_jingpai/package/my_nets/tmp/slot/slot
feed_deploy/news_jingpai/package/my_nets/tmp/slot/slot
+408
-0
feed_deploy/news_jingpai/package/my_nets/tmp/slot/slot_common
..._deploy/news_jingpai/package/my_nets/tmp/slot/slot_common
+99
-0
feed_deploy/news_jingpai/package/my_nets/tmp/slot/to.py
feed_deploy/news_jingpai/package/my_nets/tmp/slot/to.py
+5
-0
feed_deploy/news_jingpai/package/my_nets/trainer_online.py
feed_deploy/news_jingpai/package/my_nets/trainer_online.py
+573
-0
feed_deploy/news_jingpai/package/my_nets/trainer_online_local.py
...ploy/news_jingpai/package/my_nets/trainer_online_local.py
+500
-0
feed_deploy/news_jingpai/package/my_nets/util.bak.py
feed_deploy/news_jingpai/package/my_nets/util.bak.py
+135
-0
feed_deploy/news_jingpai/package/my_nets/util.py
feed_deploy/news_jingpai/package/my_nets/util.py
+286
-0
feed_deploy/news_jingpai/qsub_f.conf
feed_deploy/news_jingpai/qsub_f.conf
+4
-0
feed_deploy/news_jingpai/run.sh
feed_deploy/news_jingpai/run.sh
+62
-0
feed_deploy/news_jingpai/submit.sh
feed_deploy/news_jingpai/submit.sh
+54
-0
paddle/fluid/feed/CMakeLists.txt
paddle/fluid/feed/CMakeLists.txt
+1
-0
paddle/fluid/feed/tool/CMakeLists.txt
paddle/fluid/feed/tool/CMakeLists.txt
+1
-0
paddle/fluid/feed/tool/parse_feasign.cpp
paddle/fluid/feed/tool/parse_feasign.cpp
+111
-0
未找到文件。
feed_deploy/news_jingpai/job.sh
0 → 100755
浏览文件 @
73334d88
#!/bin/bash
WORKDIR
=
`
pwd
`
echo
"current:"
$WORKDIR
mpirun
-npernode
1
mv
package/
*
./
export
LIBRARY_PATH
=
$WORKDIR
/python/lib:
$LIBRARY_PATH
ulimit
-c
unlimited
#export FLAGS_check_nan_inf=True
#export check_nan_inf=True
#FLAGS_check_nan_inf=True check_nan_inf=True
#mpirun -npernode 2 -timestamp-output -tag-output -machinefile ${PBS_NODEFILE} python/bin/python -u trainer_online.py
mpirun
-npernode
2
-timestamp-output
-tag-output
python/bin/python
-u
trainer_online.py
if
[[
$?
-ne
0
]]
;
then
echo
"Failed to run mpi!"
1>&2
exit
1
fi
feed_deploy/news_jingpai/package/my_nets/all_slot.dict
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
6009
6011
6012
6013
6014
6015
6019
6023
6024
6027
6029
6031
6050
6060
6068
6069
6089
6095
6105
6112
6130
6131
6132
6134
6161
6162
6163
6166
6182
6183
6185
6190
6212
6213
6231
6233
6234
6236
6238
6239
6240
6241
6242
6243
6244
6245
6354
7002
7005
7008
7010
7013
7015
7019
7020
7045
7046
7048
7049
7052
7054
7056
7064
7066
7076
7078
7083
7084
7085
7086
7087
7088
7089
7090
7099
7100
7101
7102
7103
7104
7105
7109
7124
7126
7136
7142
7143
7144
7145
7146
7147
7148
7150
7151
7152
7153
7154
7155
7156
7157
7047
7050
6257
6259
6260
6261
7170
7185
7186
6751
6755
6757
6759
6760
6763
6764
6765
6766
6767
6768
6769
6770
7502
7503
7504
7505
7510
7511
7512
7513
6806
6807
6808
6809
6810
6811
6812
6813
6815
6816
6817
6819
6823
6828
6831
6840
6845
6875
6879
6881
6888
6889
6947
6950
6956
6957
6959
10006
10008
10009
10010
10011
10016
10017
10018
10019
10020
10021
10022
10023
10024
10029
10030
10031
10032
10033
10034
10035
10036
10037
10038
10039
10040
10041
10042
10044
10045
10046
10051
10052
10053
10054
10055
10056
10057
10060
10066
10069
6820
6821
6822
13333
13334
13335
13336
13337
13338
13339
13340
13341
13351
13352
13353
13359
13361
13362
13363
13366
13367
13368
13369
13370
13371
13375
13376
5700
5702
13400
13401
13402
13403
13404
13406
13407
13408
13410
13417
13418
13419
13420
13422
13425
13427
13428
13429
13430
13431
13433
13434
13436
13437
13326
13330
13331
5717
13442
13451
13452
13455
13456
13457
13458
13459
13460
13461
13462
13463
13464
13465
13466
13467
13468
1104
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
13812
13813
6740
1490
32915
32950
32952
32953
32954
33077
33085
33086
12345
23456
feed_deploy/news_jingpai/package/my_nets/config.py
0 → 100644
浏览文件 @
73334d88
dataset_type
=
"InMemoryDataset"
sparse_table_storage
=
"ssd"
batch_size
=
32
thread_num
=
12
shuffle_thread
=
12
preload_thread
=
12
join_common_thread
=
16
update_thread
=
12
fs_name
=
"afs://xingtian.afs.baidu.com:9902"
fs_ugi
=
"mlarch_pro,proisvip"
train_data_path
=
[
"afs:/user/feed/mlarch/samplejoin/mondr_shoubai_dnn_master/feasign"
]
init_model_path
=
""
days
=
"{20190915..20190930} {20191001..20191031} {20191101..20191130} {20191201..20191231} {20200101..20200131}"
hours
=
"{0..23}"
split_interval
=
5
split_per_pass
=
2
is_data_hourly_placed
=
False
save_first_base
=
False
output_path
=
"afs:/user/feed/mlarch/model/feed_muye_ln_paddle"
pipe_command
=
"./read_feasign | python/bin/python ins_weight.py | awk -f format_newcate_hotnews.awk | ./parse_feasign all_slot.dict"
save_xbox_before_update
=
True
check_exist_seconds
=
30
checkpoint_per_pass
=
36
save_delta_frequency
=
6
prefetch
=
True
write_stdout_frequency
=
10
need_reqi_changeslot
=
True
hdfs_dnn_plugin_path
=
"afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0918_amd/20191006/base/dnn_plugin"
reqi_dnn_plugin_day
=
20191006
reqi_dnn_plugin_pass
=
0
task_name
=
"feed_production_shoubai_video_ctr_fsort_session_cut"
nodes
=
119
node_memory
=
100000
mpi_server
=
yq01
-
hpc
-
lvliang01
-
smart
-
master
.
dmop
.
baidu
.
com
mpi_queue
=
feed5
mpi_priority
=
very_high
smart_client_home
=/
home
/
work
/
online_model
/
news_fsort
/
submit_jingpai_xiaoliuliang_paddlef50e701_pslibf7995_compile02255_reqi
/
smart_client
/
local_hadoop_home
=/
home
/
work
/
online_model
/
news_fsort
/
submit_jingpai_xiaoliuliang_paddlef50e701_pslibf7995_compile02255_reqi
/
hadoop
-
client
/
hadoop
feed_deploy/news_jingpai/package/my_nets/data_generate_base.py
0 → 100644
浏览文件 @
73334d88
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
os
import
sys
__all__
=
[
'MultiSlotDataGenerator'
]
class
DataGenerator
(
object
):
"""
DataGenerator is a general Base class for user to inherit
A user who wants to define his/her own python processing logic
with paddle.fluid.dataset should inherit this class.
"""
def
__init__
(
self
):
self
.
_proto_info
=
None
self
.
batch_size_
=
32
def
_set_line_limit
(
self
,
line_limit
):
if
not
isinstance
(
line_limit
,
int
):
raise
ValueError
(
"line_limit%s must be in int type"
%
type
(
line_limit
))
if
line_limit
<
1
:
raise
ValueError
(
"line_limit can not less than 1"
)
self
.
_line_limit
=
line_limit
def
set_batch
(
self
,
batch_size
):
'''
Set batch size of current DataGenerator
This is necessary only if a user wants to define generator_batch
Example:
.. code-block:: python
import paddle.fluid.incubate.data_generator as dg
class MyData(dg.DataGenerator):
def generate_sample(self, line):
def local_iter():
int_words = [int(x) for x in line.split()]
yield ("words", int_words)
return local_iter
def generate_batch(self, samples):
def local_iter():
for s in samples:
yield ("words", s[1].extend([s[1][0]]))
mydata = MyData()
mydata.set_batch(128)
'''
self
.
batch_size_
=
batch_size
def
run_from_memory
(
self
):
'''
This function generator data from memory, it is usually used for
debug and benchmarking
Example:
.. code-block:: python
import paddle.fluid.incubate.data_generator as dg
class MyData(dg.DataGenerator):
def generate_sample(self, line):
def local_iter():
yield ("words", [1, 2, 3, 4])
return local_iter
mydata = MyData()
mydata.run_from_memory()
'''
batch_samples
=
[]
line_iter
=
self
.
generate_sample
(
None
)
for
user_parsed_line
in
line_iter
():
if
user_parsed_line
==
None
:
continue
batch_samples
.
append
(
user_parsed_line
)
if
len
(
batch_samples
)
==
self
.
batch_size_
:
batch_iter
=
self
.
generate_batch
(
batch_samples
)
for
sample
in
batch_iter
():
sys
.
stdout
.
write
(
self
.
_gen_str
(
sample
))
batch_samples
=
[]
if
len
(
batch_samples
)
>
0
:
batch_iter
=
self
.
generate_batch
(
batch_samples
)
for
sample
in
batch_iter
():
sys
.
stdout
.
write
(
self
.
_gen_str
(
sample
))
def
run_from_stdin
(
self
):
'''
This function reads the data row from stdin, parses it with the
process function, and further parses the return value of the
process function with the _gen_str function. The parsed data will
be wrote to stdout and the corresponding protofile will be
generated.
Example:
.. code-block:: python
import paddle.fluid.incubate.data_generator as dg
class MyData(dg.DataGenerator):
def generate_sample(self, line):
def local_iter():
int_words = [int(x) for x in line.split()]
yield ("words", [int_words])
return local_iter
mydata = MyData()
mydata.run_from_stdin()
'''
batch_samples
=
[]
for
line
in
sys
.
stdin
:
line_iter
=
self
.
generate_sample
(
line
)
for
user_parsed_line
in
line_iter
():
if
user_parsed_line
==
None
:
continue
batch_samples
.
append
(
user_parsed_line
)
if
len
(
batch_samples
)
==
self
.
batch_size_
:
batch_iter
=
self
.
generate_batch
(
batch_samples
)
for
sample
in
batch_iter
():
sys
.
stdout
.
write
(
self
.
_gen_str
(
sample
))
batch_samples
=
[]
if
len
(
batch_samples
)
>
0
:
batch_iter
=
self
.
generate_batch
(
batch_samples
)
for
sample
in
batch_iter
():
sys
.
stdout
.
write
(
self
.
_gen_str
(
sample
))
def
_gen_str
(
self
,
line
):
'''
Further processing the output of the process() function rewritten by
user, outputting data that can be directly read by the datafeed,and
updating proto_info infomation.
Args:
line(str): the output of the process() function rewritten by user.
Returns:
Return a string data that can be read directly by the datafeed.
'''
raise
NotImplementedError
(
"pls use MultiSlotDataGenerator or PairWiseDataGenerator"
)
def
generate_sample
(
self
,
line
):
'''
This function needs to be overridden by the user to process the
original data row into a list or tuple.
Args:
line(str): the original data row
Returns:
Returns the data processed by the user.
The data format is list or tuple:
[(name, [feasign, ...]), ...]
or ((name, [feasign, ...]), ...)
For example:
[("words", [1926, 08, 17]), ("label", [1])]
or (("words", [1926, 08, 17]), ("label", [1]))
Note:
The type of feasigns must be in int or float. Once the float
element appears in the feasign, the type of that slot will be
processed into a float.
Example:
.. code-block:: python
import paddle.fluid.incubate.data_generator as dg
class MyData(dg.DataGenerator):
def generate_sample(self, line):
def local_iter():
int_words = [int(x) for x in line.split()]
yield ("words", [int_words])
return local_iter
'''
raise
NotImplementedError
(
"Please rewrite this function to return a list or tuple: "
+
"[(name, [feasign, ...]), ...] or ((name, [feasign, ...]), ...)"
)
def
generate_batch
(
self
,
samples
):
'''
This function needs to be overridden by the user to process the
generated samples from generate_sample(self, str) function
It is usually used as batch processing when a user wants to
do preprocessing on a batch of samples, e.g. padding according to
the max length of a sample in the batch
Args:
samples(list tuple): generated sample from generate_sample
Returns:
a python generator, the same format as return value of generate_sample
Example:
.. code-block:: python
import paddle.fluid.incubate.data_generator as dg
class MyData(dg.DataGenerator):
def generate_sample(self, line):
def local_iter():
int_words = [int(x) for x in line.split()]
yield ("words", int_words)
return local_iter
def generate_batch(self, samples):
def local_iter():
for s in samples:
yield ("words", s[1].extend([s[1][0]]))
mydata = MyData()
mydata.set_batch(128)
'''
def
local_iter
():
for
sample
in
samples
:
yield
sample
return
local_iter
class
MultiSlotDataGenerator
(
DataGenerator
):
def
_gen_str
(
self
,
line
):
'''
Further processing the output of the process() function rewritten by
user, outputting data that can be directly read by the MultiSlotDataFeed,
and updating proto_info infomation.
The input line will be in this format:
>>> [(name, [feasign, ...]), ...]
>>> or ((name, [feasign, ...]), ...)
The output will be in this format:
>>> [ids_num id1 id2 ...] ...
The proto_info will be in this format:
>>> [(name, type), ...]
For example, if the input is like this:
>>> [("words", [1926, 08, 17]), ("label", [1])]
>>> or (("words", [1926, 08, 17]), ("label", [1]))
the output will be:
>>> 3 1234 2345 3456 1 1
the proto_info will be:
>>> [("words", "uint64"), ("label", "uint64")]
Args:
line(str): the output of the process() function rewritten by user.
Returns:
Return a string data that can be read directly by the MultiSlotDataFeed.
'''
if
not
isinstance
(
line
,
list
)
and
not
isinstance
(
line
,
tuple
):
raise
ValueError
(
"the output of process() must be in list or tuple type"
)
output
=
""
for
index
,
item
in
enumerate
(
line
):
name
,
elements
=
item
if
output
:
output
+=
" "
out_str
=
[]
out_str
.
append
(
str
(
len
(
elements
)))
out_str
.
extend
(
elements
)
output
+=
" "
.
join
(
out_str
)
return
output
+
"
\n
"
if
self
.
_proto_info
is
None
:
self
.
_proto_info
=
[]
for
index
,
item
in
enumerate
(
line
):
name
,
elements
=
item
'''
if not isinstance(name, str):
raise ValueError("name%s must be in str type" % type(name))
if not isinstance(elements, list):
raise ValueError("elements%s must be in list type" %
type(elements))
if not elements:
raise ValueError(
"the elements of each field can not be empty, you need padding it in process()."
)
self._proto_info.append((name, "uint64"))
if output:
output += " "
output += str(len(elements))
for elem in elements:
if isinstance(elem, float):
self._proto_info[-1] = (name, "float")
elif not isinstance(elem, int) and not isinstance(elem,
long):
raise ValueError(
"the type of element%s must be in int or float" %
type(elem))
output += " " + str(elem)
'''
if
output
:
output
+=
" "
out_str
=
[]
out_str
.
append
(
str
(
len
(
elements
)))
out_str
.
extend
(
elements
)
output
+=
" "
.
join
(
out_str
)
else
:
if
len
(
line
)
!=
len
(
self
.
_proto_info
):
raise
ValueError
(
"the complete field set of two given line are inconsistent."
)
for
index
,
item
in
enumerate
(
line
):
name
,
elements
=
item
'''
if not isinstance(name, str):
raise ValueError("name%s must be in str type" % type(name))
if not isinstance(elements, list):
raise ValueError("elements%s must be in list type" %
type(elements))
if not elements:
raise ValueError(
"the elements of each field can not be empty, you need padding it in process()."
)
if name != self._proto_info[index][0]:
raise ValueError(
"the field name of two given line are not match: require<%s>, get<%s>."
% (self._proto_info[index][0], name))
'''
if
output
:
output
+=
" "
out_str
=
[]
out_str
.
append
(
str
(
len
(
elements
)))
#out_str.extend([str(x) for x in elements])
out_str
.
extend
(
elements
)
output
+=
" "
.
join
(
out_str
)
'''
for elem in elements:
if self._proto_info[index][1] != "float":
if isinstance(elem, float):
self._proto_info[index] = (name, "float")
elif not isinstance(elem, int) and not isinstance(elem,
long):
raise ValueError(
"the type of element%s must be in int or float"
% type(elem))
output += " " + str(elem)
'''
return
output
+
"
\n
"
feed_deploy/news_jingpai/package/my_nets/fleet_desc_combinejoincommon.prototxt
0 → 100644
浏览文件 @
73334d88
server_param {
downpour_server_param {
downpour_table_param {
table_id: 0
table_class: "DownpourSparseTable"
shard_num: 1950
accessor {
accessor_class: "DownpourCtrAccessor"
sparse_sgd_param {
learning_rate: 0.05
initial_g2sum: 3.0
initial_range: 0.0001
weight_bounds: -10.0
weight_bounds: 10.0
}
fea_dim: 11
embedx_dim: 8
embedx_threshold: 10
downpour_accessor_param {
nonclk_coeff: 0.1
click_coeff: 1
base_threshold: 1.5
delta_threshold: 0.25
delta_keep_days: 16
delete_after_unseen_days: 30
show_click_decay_rate: 0.98
delete_threshold: 0.8
}
table_accessor_save_param {
param: 1
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
table_accessor_save_param {
param: 2
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
}
type: PS_SPARSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 1
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
fea_dim: 3405365
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 2
table_class: "DownpourDenseDoubleTable"
accessor {
accessor_class: "DownpourDenseValueDoubleAccessor"
dense_sgd_param {
name: "summarydouble"
summary {
summary_decay_rate: 0.999999
}
}
fea_dim: 16731
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 3
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
fea_dim: 2072615
}
type: PS_DENSE_TABLE
compress_in_save: true
}
service_param {
server_class: "DownpourBrpcPsServer"
client_class: "DownpourBrpcPsClient"
service_class: "DownpourPsService"
start_server_port: 0
server_thread_num: 12
}
}
}
trainer_param {
dense_table {
table_id: 1
dense_variable_name: "join_0.w_0"
dense_variable_name: "join_0.b_0"
dense_variable_name: "join_1.w_0"
dense_variable_name: "join_1.b_0"
dense_variable_name: "join_2.w_0"
dense_variable_name: "join_2.b_0"
dense_variable_name: "join_3.w_0"
dense_variable_name: "join_3.b_0"
dense_variable_name: "join_4.w_0"
dense_variable_name: "join_4.b_0"
dense_variable_name: "join_5.w_0"
dense_variable_name: "join_5.b_0"
dense_variable_name: "join_6.w_0"
dense_variable_name: "join_6.b_0"
dense_variable_name: "join_7.w_0"
dense_variable_name: "join_7.b_0"
dense_variable_name: "common_0.w_0"
dense_variable_name: "common_0.b_0"
dense_variable_name: "common_1.w_0"
dense_variable_name: "common_1.b_0"
dense_variable_name: "common_2.w_0"
dense_variable_name: "common_2.b_0"
dense_variable_name: "common_3.w_0"
dense_variable_name: "common_3.b_0"
dense_variable_name: "common_4.w_0"
dense_variable_name: "common_4.b_0"
dense_variable_name: "common_5.w_0"
dense_variable_name: "common_5.b_0"
dense_variable_name: "common_6.w_0"
dense_variable_name: "common_6.b_0"
dense_variable_name: "common_7.w_0"
dense_variable_name: "common_7.b_0"
dense_gradient_variable_name: "join_0.w_0@GRAD"
dense_gradient_variable_name: "join_0.b_0@GRAD"
dense_gradient_variable_name: "join_1.w_0@GRAD"
dense_gradient_variable_name: "join_1.b_0@GRAD"
dense_gradient_variable_name: "join_2.w_0@GRAD"
dense_gradient_variable_name: "join_2.b_0@GRAD"
dense_gradient_variable_name: "join_3.w_0@GRAD"
dense_gradient_variable_name: "join_3.b_0@GRAD"
dense_gradient_variable_name: "join_4.w_0@GRAD"
dense_gradient_variable_name: "join_4.b_0@GRAD"
dense_gradient_variable_name: "join_5.w_0@GRAD"
dense_gradient_variable_name: "join_5.b_0@GRAD"
dense_gradient_variable_name: "join_6.w_0@GRAD"
dense_gradient_variable_name: "join_6.b_0@GRAD"
dense_gradient_variable_name: "join_7.w_0@GRAD"
dense_gradient_variable_name: "join_7.b_0@GRAD"
dense_gradient_variable_name: "common_0.w_0@GRAD"
dense_gradient_variable_name: "common_0.b_0@GRAD"
dense_gradient_variable_name: "common_1.w_0@GRAD"
dense_gradient_variable_name: "common_1.b_0@GRAD"
dense_gradient_variable_name: "common_2.w_0@GRAD"
dense_gradient_variable_name: "common_2.b_0@GRAD"
dense_gradient_variable_name: "common_3.w_0@GRAD"
dense_gradient_variable_name: "common_3.b_0@GRAD"
dense_gradient_variable_name: "common_4.w_0@GRAD"
dense_gradient_variable_name: "common_4.b_0@GRAD"
dense_gradient_variable_name: "common_5.w_0@GRAD"
dense_gradient_variable_name: "common_5.b_0@GRAD"
dense_gradient_variable_name: "common_6.w_0@GRAD"
dense_gradient_variable_name: "common_6.b_0@GRAD"
dense_gradient_variable_name: "common_7.w_0@GRAD"
dense_gradient_variable_name: "common_7.b_0@GRAD"
}
dense_table {
table_id: 2
dense_variable_name: "join.batch_size"
dense_variable_name: "join.batch_sum"
dense_variable_name: "join.batch_square_sum"
dense_variable_name: "common.batch_size"
dense_variable_name: "common.batch_sum"
dense_variable_name: "common.batch_square_sum"
dense_gradient_variable_name: "join.batch_size@GRAD"
dense_gradient_variable_name: "join.batch_sum@GRAD"
dense_gradient_variable_name: "join.batch_square_sum@GRAD"
dense_gradient_variable_name: "common.batch_size@GRAD"
dense_gradient_variable_name: "common.batch_sum@GRAD"
dense_gradient_variable_name: "common.batch_square_sum@GRAD"
}
dense_table {
table_id: 3
dense_variable_name: "fc_0.w_0"
dense_variable_name: "fc_0.b_0"
dense_variable_name: "fc_1.w_0"
dense_variable_name: "fc_1.b_0"
dense_variable_name: "fc_2.w_0"
dense_variable_name: "fc_2.b_0"
dense_variable_name: "fc_3.w_0"
dense_variable_name: "fc_3.b_0"
dense_variable_name: "fc_4.w_0"
dense_variable_name: "fc_4.b_0"
dense_variable_name: "fc_5.w_0"
dense_variable_name: "fc_5.b_0"
dense_gradient_variable_name: "fc_0.w_0@GRAD"
dense_gradient_variable_name: "fc_0.b_0@GRAD"
dense_gradient_variable_name: "fc_1.w_0@GRAD"
dense_gradient_variable_name: "fc_1.b_0@GRAD"
dense_gradient_variable_name: "fc_2.w_0@GRAD"
dense_gradient_variable_name: "fc_2.b_0@GRAD"
dense_gradient_variable_name: "fc_3.w_0@GRAD"
dense_gradient_variable_name: "fc_3.b_0@GRAD"
dense_gradient_variable_name: "fc_4.w_0@GRAD"
dense_gradient_variable_name: "fc_4.b_0@GRAD"
dense_gradient_variable_name: "fc_5.w_0@GRAD"
dense_gradient_variable_name: "fc_5.b_0@GRAD"
}
sparse_table {
table_id: 0
slot_key: "6048"
slot_key: "6002"
slot_key: "6145"
slot_key: "6202"
slot_key: "6201"
slot_key: "6121"
slot_key: "6738"
slot_key: "6119"
slot_key: "6146"
slot_key: "6120"
slot_key: "6147"
slot_key: "6122"
slot_key: "6123"
slot_key: "6118"
slot_key: "6142"
slot_key: "6143"
slot_key: "6008"
slot_key: "6148"
slot_key: "6151"
slot_key: "6127"
slot_key: "6144"
slot_key: "6094"
slot_key: "6083"
slot_key: "6952"
slot_key: "6739"
slot_key: "6150"
slot_key: "6109"
slot_key: "6003"
slot_key: "6099"
slot_key: "6149"
slot_key: "6129"
slot_key: "6203"
slot_key: "6153"
slot_key: "6152"
slot_key: "6128"
slot_key: "6106"
slot_key: "6251"
slot_key: "7082"
slot_key: "7515"
slot_key: "6951"
slot_key: "6949"
slot_key: "7080"
slot_key: "6066"
slot_key: "7507"
slot_key: "6186"
slot_key: "6007"
slot_key: "7514"
slot_key: "6125"
slot_key: "7506"
slot_key: "10001"
slot_key: "6006"
slot_key: "7023"
slot_key: "6085"
slot_key: "10000"
slot_key: "6098"
slot_key: "6250"
slot_key: "6110"
slot_key: "6124"
slot_key: "6090"
slot_key: "6082"
slot_key: "6067"
slot_key: "6101"
slot_key: "6004"
slot_key: "6191"
slot_key: "7075"
slot_key: "6948"
slot_key: "6157"
slot_key: "6126"
slot_key: "6188"
slot_key: "7077"
slot_key: "6070"
slot_key: "6111"
slot_key: "6087"
slot_key: "6103"
slot_key: "6107"
slot_key: "6194"
slot_key: "6156"
slot_key: "6005"
slot_key: "6247"
slot_key: "6814"
slot_key: "6158"
slot_key: "7122"
slot_key: "6058"
slot_key: "6189"
slot_key: "7058"
slot_key: "6059"
slot_key: "6115"
slot_key: "7079"
slot_key: "7081"
slot_key: "6833"
slot_key: "7024"
slot_key: "6108"
slot_key: "13342"
slot_key: "13345"
slot_key: "13412"
slot_key: "13343"
slot_key: "13350"
slot_key: "13346"
slot_key: "13409"
slot_key: "6009"
slot_key: "6011"
slot_key: "6012"
slot_key: "6013"
slot_key: "6014"
slot_key: "6015"
slot_key: "6019"
slot_key: "6023"
slot_key: "6024"
slot_key: "6027"
slot_key: "6029"
slot_key: "6031"
slot_key: "6050"
slot_key: "6060"
slot_key: "6068"
slot_key: "6069"
slot_key: "6089"
slot_key: "6095"
slot_key: "6105"
slot_key: "6112"
slot_key: "6130"
slot_key: "6131"
slot_key: "6132"
slot_key: "6134"
slot_key: "6161"
slot_key: "6162"
slot_key: "6163"
slot_key: "6166"
slot_key: "6182"
slot_key: "6183"
slot_key: "6185"
slot_key: "6190"
slot_key: "6212"
slot_key: "6213"
slot_key: "6231"
slot_key: "6233"
slot_key: "6234"
slot_key: "6236"
slot_key: "6238"
slot_key: "6239"
slot_key: "6240"
slot_key: "6241"
slot_key: "6242"
slot_key: "6243"
slot_key: "6244"
slot_key: "6245"
slot_key: "6354"
slot_key: "7002"
slot_key: "7005"
slot_key: "7008"
slot_key: "7010"
slot_key: "7012"
slot_key: "7013"
slot_key: "7015"
slot_key: "7016"
slot_key: "7017"
slot_key: "7018"
slot_key: "7019"
slot_key: "7020"
slot_key: "7045"
slot_key: "7046"
slot_key: "7048"
slot_key: "7049"
slot_key: "7052"
slot_key: "7054"
slot_key: "7056"
slot_key: "7064"
slot_key: "7066"
slot_key: "7076"
slot_key: "7078"
slot_key: "7083"
slot_key: "7084"
slot_key: "7085"
slot_key: "7086"
slot_key: "7087"
slot_key: "7088"
slot_key: "7089"
slot_key: "7090"
slot_key: "7099"
slot_key: "7100"
slot_key: "7101"
slot_key: "7102"
slot_key: "7103"
slot_key: "7104"
slot_key: "7105"
slot_key: "7109"
slot_key: "7124"
slot_key: "7126"
slot_key: "7136"
slot_key: "7142"
slot_key: "7143"
slot_key: "7144"
slot_key: "7145"
slot_key: "7146"
slot_key: "7147"
slot_key: "7148"
slot_key: "7150"
slot_key: "7151"
slot_key: "7152"
slot_key: "7153"
slot_key: "7154"
slot_key: "7155"
slot_key: "7156"
slot_key: "7157"
slot_key: "7047"
slot_key: "7050"
slot_key: "6253"
slot_key: "6254"
slot_key: "6255"
slot_key: "6256"
slot_key: "6257"
slot_key: "6259"
slot_key: "6260"
slot_key: "6261"
slot_key: "7170"
slot_key: "7185"
slot_key: "7186"
slot_key: "6751"
slot_key: "6755"
slot_key: "6757"
slot_key: "6759"
slot_key: "6760"
slot_key: "6763"
slot_key: "6764"
slot_key: "6765"
slot_key: "6766"
slot_key: "6767"
slot_key: "6768"
slot_key: "6769"
slot_key: "6770"
slot_key: "7502"
slot_key: "7503"
slot_key: "7504"
slot_key: "7505"
slot_key: "7510"
slot_key: "7511"
slot_key: "7512"
slot_key: "7513"
slot_key: "6806"
slot_key: "6807"
slot_key: "6808"
slot_key: "6809"
slot_key: "6810"
slot_key: "6811"
slot_key: "6812"
slot_key: "6813"
slot_key: "6815"
slot_key: "6816"
slot_key: "6817"
slot_key: "6819"
slot_key: "6823"
slot_key: "6828"
slot_key: "6831"
slot_key: "6840"
slot_key: "6845"
slot_key: "6875"
slot_key: "6879"
slot_key: "6881"
slot_key: "6888"
slot_key: "6889"
slot_key: "6947"
slot_key: "6950"
slot_key: "6956"
slot_key: "6957"
slot_key: "6959"
slot_key: "10006"
slot_key: "10008"
slot_key: "10009"
slot_key: "10010"
slot_key: "10011"
slot_key: "10016"
slot_key: "10017"
slot_key: "10018"
slot_key: "10019"
slot_key: "10020"
slot_key: "10021"
slot_key: "10022"
slot_key: "10023"
slot_key: "10024"
slot_key: "10029"
slot_key: "10030"
slot_key: "10031"
slot_key: "10032"
slot_key: "10033"
slot_key: "10034"
slot_key: "10035"
slot_key: "10036"
slot_key: "10037"
slot_key: "10038"
slot_key: "10039"
slot_key: "10040"
slot_key: "10041"
slot_key: "10042"
slot_key: "10044"
slot_key: "10045"
slot_key: "10046"
slot_key: "10051"
slot_key: "10052"
slot_key: "10053"
slot_key: "10054"
slot_key: "10055"
slot_key: "10056"
slot_key: "10057"
slot_key: "10060"
slot_key: "10066"
slot_key: "10069"
slot_key: "6820"
slot_key: "6821"
slot_key: "6822"
slot_key: "13333"
slot_key: "13334"
slot_key: "13335"
slot_key: "13336"
slot_key: "13337"
slot_key: "13338"
slot_key: "13339"
slot_key: "13340"
slot_key: "13341"
slot_key: "13351"
slot_key: "13352"
slot_key: "13353"
slot_key: "13359"
slot_key: "13361"
slot_key: "13362"
slot_key: "13363"
slot_key: "13366"
slot_key: "13367"
slot_key: "13368"
slot_key: "13369"
slot_key: "13370"
slot_key: "13371"
slot_key: "13375"
slot_key: "13376"
slot_key: "5700"
slot_key: "5702"
slot_key: "13400"
slot_key: "13401"
slot_key: "13402"
slot_key: "13403"
slot_key: "13404"
slot_key: "13406"
slot_key: "13407"
slot_key: "13408"
slot_key: "13410"
slot_key: "13417"
slot_key: "13418"
slot_key: "13419"
slot_key: "13420"
slot_key: "13422"
slot_key: "13425"
slot_key: "13427"
slot_key: "13428"
slot_key: "13429"
slot_key: "13430"
slot_key: "13431"
slot_key: "13433"
slot_key: "13434"
slot_key: "13436"
slot_key: "13437"
slot_key: "13326"
slot_key: "13330"
slot_key: "13331"
slot_key: "5717"
slot_key: "13442"
slot_key: "13451"
slot_key: "13452"
slot_key: "13455"
slot_key: "13456"
slot_key: "13457"
slot_key: "13458"
slot_key: "13459"
slot_key: "13460"
slot_key: "13461"
slot_key: "13462"
slot_key: "13463"
slot_key: "13464"
slot_key: "13465"
slot_key: "13466"
slot_key: "13467"
slot_key: "13468"
slot_key: "1104"
slot_key: "1106"
slot_key: "1107"
slot_key: "1108"
slot_key: "1109"
slot_key: "1110"
slot_key: "1111"
slot_key: "1112"
slot_key: "1113"
slot_key: "1114"
slot_key: "1115"
slot_key: "1116"
slot_key: "1117"
slot_key: "1119"
slot_key: "1120"
slot_key: "1121"
slot_key: "1122"
slot_key: "1123"
slot_key: "1124"
slot_key: "1125"
slot_key: "1126"
slot_key: "1127"
slot_key: "1128"
slot_key: "1129"
slot_key: "13812"
slot_key: "13813"
slot_key: "6740"
slot_key: "1490"
slot_key: "1491"
slot_value: "embedding_0.tmp_0"
slot_value: "embedding_1.tmp_0"
slot_value: "embedding_2.tmp_0"
slot_value: "embedding_3.tmp_0"
slot_value: "embedding_4.tmp_0"
slot_value: "embedding_5.tmp_0"
slot_value: "embedding_6.tmp_0"
slot_value: "embedding_7.tmp_0"
slot_value: "embedding_8.tmp_0"
slot_value: "embedding_9.tmp_0"
slot_value: "embedding_10.tmp_0"
slot_value: "embedding_11.tmp_0"
slot_value: "embedding_12.tmp_0"
slot_value: "embedding_13.tmp_0"
slot_value: "embedding_14.tmp_0"
slot_value: "embedding_15.tmp_0"
slot_value: "embedding_16.tmp_0"
slot_value: "embedding_17.tmp_0"
slot_value: "embedding_18.tmp_0"
slot_value: "embedding_19.tmp_0"
slot_value: "embedding_20.tmp_0"
slot_value: "embedding_21.tmp_0"
slot_value: "embedding_22.tmp_0"
slot_value: "embedding_23.tmp_0"
slot_value: "embedding_24.tmp_0"
slot_value: "embedding_25.tmp_0"
slot_value: "embedding_26.tmp_0"
slot_value: "embedding_27.tmp_0"
slot_value: "embedding_28.tmp_0"
slot_value: "embedding_29.tmp_0"
slot_value: "embedding_30.tmp_0"
slot_value: "embedding_31.tmp_0"
slot_value: "embedding_32.tmp_0"
slot_value: "embedding_33.tmp_0"
slot_value: "embedding_34.tmp_0"
slot_value: "embedding_35.tmp_0"
slot_value: "embedding_36.tmp_0"
slot_value: "embedding_37.tmp_0"
slot_value: "embedding_38.tmp_0"
slot_value: "embedding_39.tmp_0"
slot_value: "embedding_40.tmp_0"
slot_value: "embedding_41.tmp_0"
slot_value: "embedding_42.tmp_0"
slot_value: "embedding_43.tmp_0"
slot_value: "embedding_44.tmp_0"
slot_value: "embedding_45.tmp_0"
slot_value: "embedding_46.tmp_0"
slot_value: "embedding_47.tmp_0"
slot_value: "embedding_48.tmp_0"
slot_value: "embedding_49.tmp_0"
slot_value: "embedding_50.tmp_0"
slot_value: "embedding_51.tmp_0"
slot_value: "embedding_52.tmp_0"
slot_value: "embedding_53.tmp_0"
slot_value: "embedding_54.tmp_0"
slot_value: "embedding_55.tmp_0"
slot_value: "embedding_56.tmp_0"
slot_value: "embedding_57.tmp_0"
slot_value: "embedding_58.tmp_0"
slot_value: "embedding_59.tmp_0"
slot_value: "embedding_60.tmp_0"
slot_value: "embedding_61.tmp_0"
slot_value: "embedding_62.tmp_0"
slot_value: "embedding_63.tmp_0"
slot_value: "embedding_64.tmp_0"
slot_value: "embedding_65.tmp_0"
slot_value: "embedding_66.tmp_0"
slot_value: "embedding_67.tmp_0"
slot_value: "embedding_68.tmp_0"
slot_value: "embedding_69.tmp_0"
slot_value: "embedding_70.tmp_0"
slot_value: "embedding_71.tmp_0"
slot_value: "embedding_72.tmp_0"
slot_value: "embedding_73.tmp_0"
slot_value: "embedding_74.tmp_0"
slot_value: "embedding_75.tmp_0"
slot_value: "embedding_76.tmp_0"
slot_value: "embedding_77.tmp_0"
slot_value: "embedding_78.tmp_0"
slot_value: "embedding_79.tmp_0"
slot_value: "embedding_80.tmp_0"
slot_value: "embedding_81.tmp_0"
slot_value: "embedding_82.tmp_0"
slot_value: "embedding_83.tmp_0"
slot_value: "embedding_84.tmp_0"
slot_value: "embedding_85.tmp_0"
slot_value: "embedding_86.tmp_0"
slot_value: "embedding_87.tmp_0"
slot_value: "embedding_88.tmp_0"
slot_value: "embedding_89.tmp_0"
slot_value: "embedding_90.tmp_0"
slot_value: "embedding_91.tmp_0"
slot_value: "embedding_92.tmp_0"
slot_value: "embedding_93.tmp_0"
slot_value: "embedding_94.tmp_0"
slot_value: "embedding_95.tmp_0"
slot_value: "embedding_96.tmp_0"
slot_value: "embedding_97.tmp_0"
slot_value: "embedding_98.tmp_0"
slot_value: "embedding_99.tmp_0"
slot_value: "embedding_100.tmp_0"
slot_value: "embedding_101.tmp_0"
slot_value: "embedding_102.tmp_0"
slot_value: "embedding_103.tmp_0"
slot_value: "embedding_104.tmp_0"
slot_value: "embedding_105.tmp_0"
slot_value: "embedding_106.tmp_0"
slot_value: "embedding_107.tmp_0"
slot_value: "embedding_108.tmp_0"
slot_value: "embedding_109.tmp_0"
slot_value: "embedding_110.tmp_0"
slot_value: "embedding_111.tmp_0"
slot_value: "embedding_112.tmp_0"
slot_value: "embedding_113.tmp_0"
slot_value: "embedding_114.tmp_0"
slot_value: "embedding_115.tmp_0"
slot_value: "embedding_116.tmp_0"
slot_value: "embedding_117.tmp_0"
slot_value: "embedding_118.tmp_0"
slot_value: "embedding_119.tmp_0"
slot_value: "embedding_120.tmp_0"
slot_value: "embedding_121.tmp_0"
slot_value: "embedding_122.tmp_0"
slot_value: "embedding_123.tmp_0"
slot_value: "embedding_124.tmp_0"
slot_value: "embedding_125.tmp_0"
slot_value: "embedding_126.tmp_0"
slot_value: "embedding_127.tmp_0"
slot_value: "embedding_128.tmp_0"
slot_value: "embedding_129.tmp_0"
slot_value: "embedding_130.tmp_0"
slot_value: "embedding_131.tmp_0"
slot_value: "embedding_132.tmp_0"
slot_value: "embedding_133.tmp_0"
slot_value: "embedding_134.tmp_0"
slot_value: "embedding_135.tmp_0"
slot_value: "embedding_136.tmp_0"
slot_value: "embedding_137.tmp_0"
slot_value: "embedding_138.tmp_0"
slot_value: "embedding_139.tmp_0"
slot_value: "embedding_140.tmp_0"
slot_value: "embedding_141.tmp_0"
slot_value: "embedding_142.tmp_0"
slot_value: "embedding_143.tmp_0"
slot_value: "embedding_144.tmp_0"
slot_value: "embedding_145.tmp_0"
slot_value: "embedding_146.tmp_0"
slot_value: "embedding_147.tmp_0"
slot_value: "embedding_148.tmp_0"
slot_value: "embedding_149.tmp_0"
slot_value: "embedding_150.tmp_0"
slot_value: "embedding_151.tmp_0"
slot_value: "embedding_152.tmp_0"
slot_value: "embedding_153.tmp_0"
slot_value: "embedding_154.tmp_0"
slot_value: "embedding_155.tmp_0"
slot_value: "embedding_156.tmp_0"
slot_value: "embedding_157.tmp_0"
slot_value: "embedding_158.tmp_0"
slot_value: "embedding_159.tmp_0"
slot_value: "embedding_160.tmp_0"
slot_value: "embedding_161.tmp_0"
slot_value: "embedding_162.tmp_0"
slot_value: "embedding_163.tmp_0"
slot_value: "embedding_164.tmp_0"
slot_value: "embedding_165.tmp_0"
slot_value: "embedding_166.tmp_0"
slot_value: "embedding_167.tmp_0"
slot_value: "embedding_168.tmp_0"
slot_value: "embedding_169.tmp_0"
slot_value: "embedding_170.tmp_0"
slot_value: "embedding_171.tmp_0"
slot_value: "embedding_172.tmp_0"
slot_value: "embedding_173.tmp_0"
slot_value: "embedding_174.tmp_0"
slot_value: "embedding_175.tmp_0"
slot_value: "embedding_176.tmp_0"
slot_value: "embedding_177.tmp_0"
slot_value: "embedding_178.tmp_0"
slot_value: "embedding_179.tmp_0"
slot_value: "embedding_180.tmp_0"
slot_value: "embedding_181.tmp_0"
slot_value: "embedding_182.tmp_0"
slot_value: "embedding_183.tmp_0"
slot_value: "embedding_184.tmp_0"
slot_value: "embedding_185.tmp_0"
slot_value: "embedding_186.tmp_0"
slot_value: "embedding_187.tmp_0"
slot_value: "embedding_188.tmp_0"
slot_value: "embedding_189.tmp_0"
slot_value: "embedding_190.tmp_0"
slot_value: "embedding_191.tmp_0"
slot_value: "embedding_192.tmp_0"
slot_value: "embedding_193.tmp_0"
slot_value: "embedding_194.tmp_0"
slot_value: "embedding_195.tmp_0"
slot_value: "embedding_196.tmp_0"
slot_value: "embedding_197.tmp_0"
slot_value: "embedding_198.tmp_0"
slot_value: "embedding_199.tmp_0"
slot_value: "embedding_200.tmp_0"
slot_value: "embedding_201.tmp_0"
slot_value: "embedding_202.tmp_0"
slot_value: "embedding_203.tmp_0"
slot_value: "embedding_204.tmp_0"
slot_value: "embedding_205.tmp_0"
slot_value: "embedding_206.tmp_0"
slot_value: "embedding_207.tmp_0"
slot_value: "embedding_208.tmp_0"
slot_value: "embedding_209.tmp_0"
slot_value: "embedding_210.tmp_0"
slot_value: "embedding_211.tmp_0"
slot_value: "embedding_212.tmp_0"
slot_value: "embedding_213.tmp_0"
slot_value: "embedding_214.tmp_0"
slot_value: "embedding_215.tmp_0"
slot_value: "embedding_216.tmp_0"
slot_value: "embedding_217.tmp_0"
slot_value: "embedding_218.tmp_0"
slot_value: "embedding_219.tmp_0"
slot_value: "embedding_220.tmp_0"
slot_value: "embedding_221.tmp_0"
slot_value: "embedding_222.tmp_0"
slot_value: "embedding_223.tmp_0"
slot_value: "embedding_224.tmp_0"
slot_value: "embedding_225.tmp_0"
slot_value: "embedding_226.tmp_0"
slot_value: "embedding_227.tmp_0"
slot_value: "embedding_228.tmp_0"
slot_value: "embedding_229.tmp_0"
slot_value: "embedding_230.tmp_0"
slot_value: "embedding_231.tmp_0"
slot_value: "embedding_232.tmp_0"
slot_value: "embedding_233.tmp_0"
slot_value: "embedding_234.tmp_0"
slot_value: "embedding_235.tmp_0"
slot_value: "embedding_236.tmp_0"
slot_value: "embedding_237.tmp_0"
slot_value: "embedding_238.tmp_0"
slot_value: "embedding_239.tmp_0"
slot_value: "embedding_240.tmp_0"
slot_value: "embedding_241.tmp_0"
slot_value: "embedding_242.tmp_0"
slot_value: "embedding_243.tmp_0"
slot_value: "embedding_244.tmp_0"
slot_value: "embedding_245.tmp_0"
slot_value: "embedding_246.tmp_0"
slot_value: "embedding_247.tmp_0"
slot_value: "embedding_248.tmp_0"
slot_value: "embedding_249.tmp_0"
slot_value: "embedding_250.tmp_0"
slot_value: "embedding_251.tmp_0"
slot_value: "embedding_252.tmp_0"
slot_value: "embedding_253.tmp_0"
slot_value: "embedding_254.tmp_0"
slot_value: "embedding_255.tmp_0"
slot_value: "embedding_256.tmp_0"
slot_value: "embedding_257.tmp_0"
slot_value: "embedding_258.tmp_0"
slot_value: "embedding_259.tmp_0"
slot_value: "embedding_260.tmp_0"
slot_value: "embedding_261.tmp_0"
slot_value: "embedding_262.tmp_0"
slot_value: "embedding_263.tmp_0"
slot_value: "embedding_264.tmp_0"
slot_value: "embedding_265.tmp_0"
slot_value: "embedding_266.tmp_0"
slot_value: "embedding_267.tmp_0"
slot_value: "embedding_268.tmp_0"
slot_value: "embedding_269.tmp_0"
slot_value: "embedding_270.tmp_0"
slot_value: "embedding_271.tmp_0"
slot_value: "embedding_272.tmp_0"
slot_value: "embedding_273.tmp_0"
slot_value: "embedding_274.tmp_0"
slot_value: "embedding_275.tmp_0"
slot_value: "embedding_276.tmp_0"
slot_value: "embedding_277.tmp_0"
slot_value: "embedding_278.tmp_0"
slot_value: "embedding_279.tmp_0"
slot_value: "embedding_280.tmp_0"
slot_value: "embedding_281.tmp_0"
slot_value: "embedding_282.tmp_0"
slot_value: "embedding_283.tmp_0"
slot_value: "embedding_284.tmp_0"
slot_value: "embedding_285.tmp_0"
slot_value: "embedding_286.tmp_0"
slot_value: "embedding_287.tmp_0"
slot_value: "embedding_288.tmp_0"
slot_value: "embedding_289.tmp_0"
slot_value: "embedding_290.tmp_0"
slot_value: "embedding_291.tmp_0"
slot_value: "embedding_292.tmp_0"
slot_value: "embedding_293.tmp_0"
slot_value: "embedding_294.tmp_0"
slot_value: "embedding_295.tmp_0"
slot_value: "embedding_296.tmp_0"
slot_value: "embedding_297.tmp_0"
slot_value: "embedding_298.tmp_0"
slot_value: "embedding_299.tmp_0"
slot_value: "embedding_300.tmp_0"
slot_value: "embedding_301.tmp_0"
slot_value: "embedding_302.tmp_0"
slot_value: "embedding_303.tmp_0"
slot_value: "embedding_304.tmp_0"
slot_value: "embedding_305.tmp_0"
slot_value: "embedding_306.tmp_0"
slot_value: "embedding_307.tmp_0"
slot_value: "embedding_308.tmp_0"
slot_value: "embedding_309.tmp_0"
slot_value: "embedding_310.tmp_0"
slot_value: "embedding_311.tmp_0"
slot_value: "embedding_312.tmp_0"
slot_value: "embedding_313.tmp_0"
slot_value: "embedding_314.tmp_0"
slot_value: "embedding_315.tmp_0"
slot_value: "embedding_316.tmp_0"
slot_value: "embedding_317.tmp_0"
slot_value: "embedding_318.tmp_0"
slot_value: "embedding_319.tmp_0"
slot_value: "embedding_320.tmp_0"
slot_value: "embedding_321.tmp_0"
slot_value: "embedding_322.tmp_0"
slot_value: "embedding_323.tmp_0"
slot_value: "embedding_324.tmp_0"
slot_value: "embedding_325.tmp_0"
slot_value: "embedding_326.tmp_0"
slot_value: "embedding_327.tmp_0"
slot_value: "embedding_328.tmp_0"
slot_value: "embedding_329.tmp_0"
slot_value: "embedding_330.tmp_0"
slot_value: "embedding_331.tmp_0"
slot_value: "embedding_332.tmp_0"
slot_value: "embedding_333.tmp_0"
slot_value: "embedding_334.tmp_0"
slot_value: "embedding_335.tmp_0"
slot_value: "embedding_336.tmp_0"
slot_value: "embedding_337.tmp_0"
slot_value: "embedding_338.tmp_0"
slot_value: "embedding_339.tmp_0"
slot_value: "embedding_340.tmp_0"
slot_value: "embedding_341.tmp_0"
slot_value: "embedding_342.tmp_0"
slot_value: "embedding_343.tmp_0"
slot_value: "embedding_344.tmp_0"
slot_value: "embedding_345.tmp_0"
slot_value: "embedding_346.tmp_0"
slot_value: "embedding_347.tmp_0"
slot_value: "embedding_348.tmp_0"
slot_value: "embedding_349.tmp_0"
slot_value: "embedding_350.tmp_0"
slot_value: "embedding_351.tmp_0"
slot_value: "embedding_352.tmp_0"
slot_value: "embedding_353.tmp_0"
slot_value: "embedding_354.tmp_0"
slot_value: "embedding_355.tmp_0"
slot_value: "embedding_356.tmp_0"
slot_value: "embedding_357.tmp_0"
slot_value: "embedding_358.tmp_0"
slot_value: "embedding_359.tmp_0"
slot_value: "embedding_360.tmp_0"
slot_value: "embedding_361.tmp_0"
slot_value: "embedding_362.tmp_0"
slot_value: "embedding_363.tmp_0"
slot_value: "embedding_364.tmp_0"
slot_value: "embedding_365.tmp_0"
slot_value: "embedding_366.tmp_0"
slot_value: "embedding_367.tmp_0"
slot_value: "embedding_368.tmp_0"
slot_value: "embedding_369.tmp_0"
slot_value: "embedding_370.tmp_0"
slot_value: "embedding_371.tmp_0"
slot_value: "embedding_372.tmp_0"
slot_value: "embedding_373.tmp_0"
slot_value: "embedding_374.tmp_0"
slot_value: "embedding_375.tmp_0"
slot_value: "embedding_376.tmp_0"
slot_value: "embedding_377.tmp_0"
slot_value: "embedding_378.tmp_0"
slot_value: "embedding_379.tmp_0"
slot_value: "embedding_380.tmp_0"
slot_value: "embedding_381.tmp_0"
slot_value: "embedding_382.tmp_0"
slot_value: "embedding_383.tmp_0"
slot_value: "embedding_384.tmp_0"
slot_value: "embedding_385.tmp_0"
slot_value: "embedding_386.tmp_0"
slot_value: "embedding_387.tmp_0"
slot_value: "embedding_388.tmp_0"
slot_value: "embedding_389.tmp_0"
slot_value: "embedding_390.tmp_0"
slot_value: "embedding_391.tmp_0"
slot_value: "embedding_392.tmp_0"
slot_value: "embedding_393.tmp_0"
slot_value: "embedding_394.tmp_0"
slot_value: "embedding_395.tmp_0"
slot_value: "embedding_396.tmp_0"
slot_value: "embedding_397.tmp_0"
slot_value: "embedding_398.tmp_0"
slot_value: "embedding_399.tmp_0"
slot_value: "embedding_400.tmp_0"
slot_value: "embedding_401.tmp_0"
slot_value: "embedding_402.tmp_0"
slot_value: "embedding_403.tmp_0"
slot_value: "embedding_404.tmp_0"
slot_value: "embedding_405.tmp_0"
slot_value: "embedding_406.tmp_0"
slot_value: "embedding_407.tmp_0"
slot_gradient: "embedding_0.tmp_0@GRAD"
slot_gradient: "embedding_1.tmp_0@GRAD"
slot_gradient: "embedding_2.tmp_0@GRAD"
slot_gradient: "embedding_3.tmp_0@GRAD"
slot_gradient: "embedding_4.tmp_0@GRAD"
slot_gradient: "embedding_5.tmp_0@GRAD"
slot_gradient: "embedding_6.tmp_0@GRAD"
slot_gradient: "embedding_7.tmp_0@GRAD"
slot_gradient: "embedding_8.tmp_0@GRAD"
slot_gradient: "embedding_9.tmp_0@GRAD"
slot_gradient: "embedding_10.tmp_0@GRAD"
slot_gradient: "embedding_11.tmp_0@GRAD"
slot_gradient: "embedding_12.tmp_0@GRAD"
slot_gradient: "embedding_13.tmp_0@GRAD"
slot_gradient: "embedding_14.tmp_0@GRAD"
slot_gradient: "embedding_15.tmp_0@GRAD"
slot_gradient: "embedding_16.tmp_0@GRAD"
slot_gradient: "embedding_17.tmp_0@GRAD"
slot_gradient: "embedding_18.tmp_0@GRAD"
slot_gradient: "embedding_19.tmp_0@GRAD"
slot_gradient: "embedding_20.tmp_0@GRAD"
slot_gradient: "embedding_21.tmp_0@GRAD"
slot_gradient: "embedding_22.tmp_0@GRAD"
slot_gradient: "embedding_23.tmp_0@GRAD"
slot_gradient: "embedding_24.tmp_0@GRAD"
slot_gradient: "embedding_25.tmp_0@GRAD"
slot_gradient: "embedding_26.tmp_0@GRAD"
slot_gradient: "embedding_27.tmp_0@GRAD"
slot_gradient: "embedding_28.tmp_0@GRAD"
slot_gradient: "embedding_29.tmp_0@GRAD"
slot_gradient: "embedding_30.tmp_0@GRAD"
slot_gradient: "embedding_31.tmp_0@GRAD"
slot_gradient: "embedding_32.tmp_0@GRAD"
slot_gradient: "embedding_33.tmp_0@GRAD"
slot_gradient: "embedding_34.tmp_0@GRAD"
slot_gradient: "embedding_35.tmp_0@GRAD"
slot_gradient: "embedding_36.tmp_0@GRAD"
slot_gradient: "embedding_37.tmp_0@GRAD"
slot_gradient: "embedding_38.tmp_0@GRAD"
slot_gradient: "embedding_39.tmp_0@GRAD"
slot_gradient: "embedding_40.tmp_0@GRAD"
slot_gradient: "embedding_41.tmp_0@GRAD"
slot_gradient: "embedding_42.tmp_0@GRAD"
slot_gradient: "embedding_43.tmp_0@GRAD"
slot_gradient: "embedding_44.tmp_0@GRAD"
slot_gradient: "embedding_45.tmp_0@GRAD"
slot_gradient: "embedding_46.tmp_0@GRAD"
slot_gradient: "embedding_47.tmp_0@GRAD"
slot_gradient: "embedding_48.tmp_0@GRAD"
slot_gradient: "embedding_49.tmp_0@GRAD"
slot_gradient: "embedding_50.tmp_0@GRAD"
slot_gradient: "embedding_51.tmp_0@GRAD"
slot_gradient: "embedding_52.tmp_0@GRAD"
slot_gradient: "embedding_53.tmp_0@GRAD"
slot_gradient: "embedding_54.tmp_0@GRAD"
slot_gradient: "embedding_55.tmp_0@GRAD"
slot_gradient: "embedding_56.tmp_0@GRAD"
slot_gradient: "embedding_57.tmp_0@GRAD"
slot_gradient: "embedding_58.tmp_0@GRAD"
slot_gradient: "embedding_59.tmp_0@GRAD"
slot_gradient: "embedding_60.tmp_0@GRAD"
slot_gradient: "embedding_61.tmp_0@GRAD"
slot_gradient: "embedding_62.tmp_0@GRAD"
slot_gradient: "embedding_63.tmp_0@GRAD"
slot_gradient: "embedding_64.tmp_0@GRAD"
slot_gradient: "embedding_65.tmp_0@GRAD"
slot_gradient: "embedding_66.tmp_0@GRAD"
slot_gradient: "embedding_67.tmp_0@GRAD"
slot_gradient: "embedding_68.tmp_0@GRAD"
slot_gradient: "embedding_69.tmp_0@GRAD"
slot_gradient: "embedding_70.tmp_0@GRAD"
slot_gradient: "embedding_71.tmp_0@GRAD"
slot_gradient: "embedding_72.tmp_0@GRAD"
slot_gradient: "embedding_73.tmp_0@GRAD"
slot_gradient: "embedding_74.tmp_0@GRAD"
slot_gradient: "embedding_75.tmp_0@GRAD"
slot_gradient: "embedding_76.tmp_0@GRAD"
slot_gradient: "embedding_77.tmp_0@GRAD"
slot_gradient: "embedding_78.tmp_0@GRAD"
slot_gradient: "embedding_79.tmp_0@GRAD"
slot_gradient: "embedding_80.tmp_0@GRAD"
slot_gradient: "embedding_81.tmp_0@GRAD"
slot_gradient: "embedding_82.tmp_0@GRAD"
slot_gradient: "embedding_83.tmp_0@GRAD"
slot_gradient: "embedding_84.tmp_0@GRAD"
slot_gradient: "embedding_85.tmp_0@GRAD"
slot_gradient: "embedding_86.tmp_0@GRAD"
slot_gradient: "embedding_87.tmp_0@GRAD"
slot_gradient: "embedding_88.tmp_0@GRAD"
slot_gradient: "embedding_89.tmp_0@GRAD"
slot_gradient: "embedding_90.tmp_0@GRAD"
slot_gradient: "embedding_91.tmp_0@GRAD"
slot_gradient: "embedding_92.tmp_0@GRAD"
slot_gradient: "embedding_93.tmp_0@GRAD"
slot_gradient: "embedding_94.tmp_0@GRAD"
slot_gradient: "embedding_95.tmp_0@GRAD"
slot_gradient: "embedding_96.tmp_0@GRAD"
slot_gradient: "embedding_97.tmp_0@GRAD"
slot_gradient: "embedding_98.tmp_0@GRAD"
slot_gradient: "embedding_99.tmp_0@GRAD"
slot_gradient: "embedding_100.tmp_0@GRAD"
slot_gradient: "embedding_101.tmp_0@GRAD"
slot_gradient: "embedding_102.tmp_0@GRAD"
slot_gradient: "embedding_103.tmp_0@GRAD"
slot_gradient: "embedding_104.tmp_0@GRAD"
slot_gradient: "embedding_105.tmp_0@GRAD"
slot_gradient: "embedding_106.tmp_0@GRAD"
slot_gradient: "embedding_107.tmp_0@GRAD"
slot_gradient: "embedding_108.tmp_0@GRAD"
slot_gradient: "embedding_109.tmp_0@GRAD"
slot_gradient: "embedding_110.tmp_0@GRAD"
slot_gradient: "embedding_111.tmp_0@GRAD"
slot_gradient: "embedding_112.tmp_0@GRAD"
slot_gradient: "embedding_113.tmp_0@GRAD"
slot_gradient: "embedding_114.tmp_0@GRAD"
slot_gradient: "embedding_115.tmp_0@GRAD"
slot_gradient: "embedding_116.tmp_0@GRAD"
slot_gradient: "embedding_117.tmp_0@GRAD"
slot_gradient: "embedding_118.tmp_0@GRAD"
slot_gradient: "embedding_119.tmp_0@GRAD"
slot_gradient: "embedding_120.tmp_0@GRAD"
slot_gradient: "embedding_121.tmp_0@GRAD"
slot_gradient: "embedding_122.tmp_0@GRAD"
slot_gradient: "embedding_123.tmp_0@GRAD"
slot_gradient: "embedding_124.tmp_0@GRAD"
slot_gradient: "embedding_125.tmp_0@GRAD"
slot_gradient: "embedding_126.tmp_0@GRAD"
slot_gradient: "embedding_127.tmp_0@GRAD"
slot_gradient: "embedding_128.tmp_0@GRAD"
slot_gradient: "embedding_129.tmp_0@GRAD"
slot_gradient: "embedding_130.tmp_0@GRAD"
slot_gradient: "embedding_131.tmp_0@GRAD"
slot_gradient: "embedding_132.tmp_0@GRAD"
slot_gradient: "embedding_133.tmp_0@GRAD"
slot_gradient: "embedding_134.tmp_0@GRAD"
slot_gradient: "embedding_135.tmp_0@GRAD"
slot_gradient: "embedding_136.tmp_0@GRAD"
slot_gradient: "embedding_137.tmp_0@GRAD"
slot_gradient: "embedding_138.tmp_0@GRAD"
slot_gradient: "embedding_139.tmp_0@GRAD"
slot_gradient: "embedding_140.tmp_0@GRAD"
slot_gradient: "embedding_141.tmp_0@GRAD"
slot_gradient: "embedding_142.tmp_0@GRAD"
slot_gradient: "embedding_143.tmp_0@GRAD"
slot_gradient: "embedding_144.tmp_0@GRAD"
slot_gradient: "embedding_145.tmp_0@GRAD"
slot_gradient: "embedding_146.tmp_0@GRAD"
slot_gradient: "embedding_147.tmp_0@GRAD"
slot_gradient: "embedding_148.tmp_0@GRAD"
slot_gradient: "embedding_149.tmp_0@GRAD"
slot_gradient: "embedding_150.tmp_0@GRAD"
slot_gradient: "embedding_151.tmp_0@GRAD"
slot_gradient: "embedding_152.tmp_0@GRAD"
slot_gradient: "embedding_153.tmp_0@GRAD"
slot_gradient: "embedding_154.tmp_0@GRAD"
slot_gradient: "embedding_155.tmp_0@GRAD"
slot_gradient: "embedding_156.tmp_0@GRAD"
slot_gradient: "embedding_157.tmp_0@GRAD"
slot_gradient: "embedding_158.tmp_0@GRAD"
slot_gradient: "embedding_159.tmp_0@GRAD"
slot_gradient: "embedding_160.tmp_0@GRAD"
slot_gradient: "embedding_161.tmp_0@GRAD"
slot_gradient: "embedding_162.tmp_0@GRAD"
slot_gradient: "embedding_163.tmp_0@GRAD"
slot_gradient: "embedding_164.tmp_0@GRAD"
slot_gradient: "embedding_165.tmp_0@GRAD"
slot_gradient: "embedding_166.tmp_0@GRAD"
slot_gradient: "embedding_167.tmp_0@GRAD"
slot_gradient: "embedding_168.tmp_0@GRAD"
slot_gradient: "embedding_169.tmp_0@GRAD"
slot_gradient: "embedding_170.tmp_0@GRAD"
slot_gradient: "embedding_171.tmp_0@GRAD"
slot_gradient: "embedding_172.tmp_0@GRAD"
slot_gradient: "embedding_173.tmp_0@GRAD"
slot_gradient: "embedding_174.tmp_0@GRAD"
slot_gradient: "embedding_175.tmp_0@GRAD"
slot_gradient: "embedding_176.tmp_0@GRAD"
slot_gradient: "embedding_177.tmp_0@GRAD"
slot_gradient: "embedding_178.tmp_0@GRAD"
slot_gradient: "embedding_179.tmp_0@GRAD"
slot_gradient: "embedding_180.tmp_0@GRAD"
slot_gradient: "embedding_181.tmp_0@GRAD"
slot_gradient: "embedding_182.tmp_0@GRAD"
slot_gradient: "embedding_183.tmp_0@GRAD"
slot_gradient: "embedding_184.tmp_0@GRAD"
slot_gradient: "embedding_185.tmp_0@GRAD"
slot_gradient: "embedding_186.tmp_0@GRAD"
slot_gradient: "embedding_187.tmp_0@GRAD"
slot_gradient: "embedding_188.tmp_0@GRAD"
slot_gradient: "embedding_189.tmp_0@GRAD"
slot_gradient: "embedding_190.tmp_0@GRAD"
slot_gradient: "embedding_191.tmp_0@GRAD"
slot_gradient: "embedding_192.tmp_0@GRAD"
slot_gradient: "embedding_193.tmp_0@GRAD"
slot_gradient: "embedding_194.tmp_0@GRAD"
slot_gradient: "embedding_195.tmp_0@GRAD"
slot_gradient: "embedding_196.tmp_0@GRAD"
slot_gradient: "embedding_197.tmp_0@GRAD"
slot_gradient: "embedding_198.tmp_0@GRAD"
slot_gradient: "embedding_199.tmp_0@GRAD"
slot_gradient: "embedding_200.tmp_0@GRAD"
slot_gradient: "embedding_201.tmp_0@GRAD"
slot_gradient: "embedding_202.tmp_0@GRAD"
slot_gradient: "embedding_203.tmp_0@GRAD"
slot_gradient: "embedding_204.tmp_0@GRAD"
slot_gradient: "embedding_205.tmp_0@GRAD"
slot_gradient: "embedding_206.tmp_0@GRAD"
slot_gradient: "embedding_207.tmp_0@GRAD"
slot_gradient: "embedding_208.tmp_0@GRAD"
slot_gradient: "embedding_209.tmp_0@GRAD"
slot_gradient: "embedding_210.tmp_0@GRAD"
slot_gradient: "embedding_211.tmp_0@GRAD"
slot_gradient: "embedding_212.tmp_0@GRAD"
slot_gradient: "embedding_213.tmp_0@GRAD"
slot_gradient: "embedding_214.tmp_0@GRAD"
slot_gradient: "embedding_215.tmp_0@GRAD"
slot_gradient: "embedding_216.tmp_0@GRAD"
slot_gradient: "embedding_217.tmp_0@GRAD"
slot_gradient: "embedding_218.tmp_0@GRAD"
slot_gradient: "embedding_219.tmp_0@GRAD"
slot_gradient: "embedding_220.tmp_0@GRAD"
slot_gradient: "embedding_221.tmp_0@GRAD"
slot_gradient: "embedding_222.tmp_0@GRAD"
slot_gradient: "embedding_223.tmp_0@GRAD"
slot_gradient: "embedding_224.tmp_0@GRAD"
slot_gradient: "embedding_225.tmp_0@GRAD"
slot_gradient: "embedding_226.tmp_0@GRAD"
slot_gradient: "embedding_227.tmp_0@GRAD"
slot_gradient: "embedding_228.tmp_0@GRAD"
slot_gradient: "embedding_229.tmp_0@GRAD"
slot_gradient: "embedding_230.tmp_0@GRAD"
slot_gradient: "embedding_231.tmp_0@GRAD"
slot_gradient: "embedding_232.tmp_0@GRAD"
slot_gradient: "embedding_233.tmp_0@GRAD"
slot_gradient: "embedding_234.tmp_0@GRAD"
slot_gradient: "embedding_235.tmp_0@GRAD"
slot_gradient: "embedding_236.tmp_0@GRAD"
slot_gradient: "embedding_237.tmp_0@GRAD"
slot_gradient: "embedding_238.tmp_0@GRAD"
slot_gradient: "embedding_239.tmp_0@GRAD"
slot_gradient: "embedding_240.tmp_0@GRAD"
slot_gradient: "embedding_241.tmp_0@GRAD"
slot_gradient: "embedding_242.tmp_0@GRAD"
slot_gradient: "embedding_243.tmp_0@GRAD"
slot_gradient: "embedding_244.tmp_0@GRAD"
slot_gradient: "embedding_245.tmp_0@GRAD"
slot_gradient: "embedding_246.tmp_0@GRAD"
slot_gradient: "embedding_247.tmp_0@GRAD"
slot_gradient: "embedding_248.tmp_0@GRAD"
slot_gradient: "embedding_249.tmp_0@GRAD"
slot_gradient: "embedding_250.tmp_0@GRAD"
slot_gradient: "embedding_251.tmp_0@GRAD"
slot_gradient: "embedding_252.tmp_0@GRAD"
slot_gradient: "embedding_253.tmp_0@GRAD"
slot_gradient: "embedding_254.tmp_0@GRAD"
slot_gradient: "embedding_255.tmp_0@GRAD"
slot_gradient: "embedding_256.tmp_0@GRAD"
slot_gradient: "embedding_257.tmp_0@GRAD"
slot_gradient: "embedding_258.tmp_0@GRAD"
slot_gradient: "embedding_259.tmp_0@GRAD"
slot_gradient: "embedding_260.tmp_0@GRAD"
slot_gradient: "embedding_261.tmp_0@GRAD"
slot_gradient: "embedding_262.tmp_0@GRAD"
slot_gradient: "embedding_263.tmp_0@GRAD"
slot_gradient: "embedding_264.tmp_0@GRAD"
slot_gradient: "embedding_265.tmp_0@GRAD"
slot_gradient: "embedding_266.tmp_0@GRAD"
slot_gradient: "embedding_267.tmp_0@GRAD"
slot_gradient: "embedding_268.tmp_0@GRAD"
slot_gradient: "embedding_269.tmp_0@GRAD"
slot_gradient: "embedding_270.tmp_0@GRAD"
slot_gradient: "embedding_271.tmp_0@GRAD"
slot_gradient: "embedding_272.tmp_0@GRAD"
slot_gradient: "embedding_273.tmp_0@GRAD"
slot_gradient: "embedding_274.tmp_0@GRAD"
slot_gradient: "embedding_275.tmp_0@GRAD"
slot_gradient: "embedding_276.tmp_0@GRAD"
slot_gradient: "embedding_277.tmp_0@GRAD"
slot_gradient: "embedding_278.tmp_0@GRAD"
slot_gradient: "embedding_279.tmp_0@GRAD"
slot_gradient: "embedding_280.tmp_0@GRAD"
slot_gradient: "embedding_281.tmp_0@GRAD"
slot_gradient: "embedding_282.tmp_0@GRAD"
slot_gradient: "embedding_283.tmp_0@GRAD"
slot_gradient: "embedding_284.tmp_0@GRAD"
slot_gradient: "embedding_285.tmp_0@GRAD"
slot_gradient: "embedding_286.tmp_0@GRAD"
slot_gradient: "embedding_287.tmp_0@GRAD"
slot_gradient: "embedding_288.tmp_0@GRAD"
slot_gradient: "embedding_289.tmp_0@GRAD"
slot_gradient: "embedding_290.tmp_0@GRAD"
slot_gradient: "embedding_291.tmp_0@GRAD"
slot_gradient: "embedding_292.tmp_0@GRAD"
slot_gradient: "embedding_293.tmp_0@GRAD"
slot_gradient: "embedding_294.tmp_0@GRAD"
slot_gradient: "embedding_295.tmp_0@GRAD"
slot_gradient: "embedding_296.tmp_0@GRAD"
slot_gradient: "embedding_297.tmp_0@GRAD"
slot_gradient: "embedding_298.tmp_0@GRAD"
slot_gradient: "embedding_299.tmp_0@GRAD"
slot_gradient: "embedding_300.tmp_0@GRAD"
slot_gradient: "embedding_301.tmp_0@GRAD"
slot_gradient: "embedding_302.tmp_0@GRAD"
slot_gradient: "embedding_303.tmp_0@GRAD"
slot_gradient: "embedding_304.tmp_0@GRAD"
slot_gradient: "embedding_305.tmp_0@GRAD"
slot_gradient: "embedding_306.tmp_0@GRAD"
slot_gradient: "embedding_307.tmp_0@GRAD"
slot_gradient: "embedding_308.tmp_0@GRAD"
slot_gradient: "embedding_309.tmp_0@GRAD"
slot_gradient: "embedding_310.tmp_0@GRAD"
slot_gradient: "embedding_311.tmp_0@GRAD"
slot_gradient: "embedding_312.tmp_0@GRAD"
slot_gradient: "embedding_313.tmp_0@GRAD"
slot_gradient: "embedding_314.tmp_0@GRAD"
slot_gradient: "embedding_315.tmp_0@GRAD"
slot_gradient: "embedding_316.tmp_0@GRAD"
slot_gradient: "embedding_317.tmp_0@GRAD"
slot_gradient: "embedding_318.tmp_0@GRAD"
slot_gradient: "embedding_319.tmp_0@GRAD"
slot_gradient: "embedding_320.tmp_0@GRAD"
slot_gradient: "embedding_321.tmp_0@GRAD"
slot_gradient: "embedding_322.tmp_0@GRAD"
slot_gradient: "embedding_323.tmp_0@GRAD"
slot_gradient: "embedding_324.tmp_0@GRAD"
slot_gradient: "embedding_325.tmp_0@GRAD"
slot_gradient: "embedding_326.tmp_0@GRAD"
slot_gradient: "embedding_327.tmp_0@GRAD"
slot_gradient: "embedding_328.tmp_0@GRAD"
slot_gradient: "embedding_329.tmp_0@GRAD"
slot_gradient: "embedding_330.tmp_0@GRAD"
slot_gradient: "embedding_331.tmp_0@GRAD"
slot_gradient: "embedding_332.tmp_0@GRAD"
slot_gradient: "embedding_333.tmp_0@GRAD"
slot_gradient: "embedding_334.tmp_0@GRAD"
slot_gradient: "embedding_335.tmp_0@GRAD"
slot_gradient: "embedding_336.tmp_0@GRAD"
slot_gradient: "embedding_337.tmp_0@GRAD"
slot_gradient: "embedding_338.tmp_0@GRAD"
slot_gradient: "embedding_339.tmp_0@GRAD"
slot_gradient: "embedding_340.tmp_0@GRAD"
slot_gradient: "embedding_341.tmp_0@GRAD"
slot_gradient: "embedding_342.tmp_0@GRAD"
slot_gradient: "embedding_343.tmp_0@GRAD"
slot_gradient: "embedding_344.tmp_0@GRAD"
slot_gradient: "embedding_345.tmp_0@GRAD"
slot_gradient: "embedding_346.tmp_0@GRAD"
slot_gradient: "embedding_347.tmp_0@GRAD"
slot_gradient: "embedding_348.tmp_0@GRAD"
slot_gradient: "embedding_349.tmp_0@GRAD"
slot_gradient: "embedding_350.tmp_0@GRAD"
slot_gradient: "embedding_351.tmp_0@GRAD"
slot_gradient: "embedding_352.tmp_0@GRAD"
slot_gradient: "embedding_353.tmp_0@GRAD"
slot_gradient: "embedding_354.tmp_0@GRAD"
slot_gradient: "embedding_355.tmp_0@GRAD"
slot_gradient: "embedding_356.tmp_0@GRAD"
slot_gradient: "embedding_357.tmp_0@GRAD"
slot_gradient: "embedding_358.tmp_0@GRAD"
slot_gradient: "embedding_359.tmp_0@GRAD"
slot_gradient: "embedding_360.tmp_0@GRAD"
slot_gradient: "embedding_361.tmp_0@GRAD"
slot_gradient: "embedding_362.tmp_0@GRAD"
slot_gradient: "embedding_363.tmp_0@GRAD"
slot_gradient: "embedding_364.tmp_0@GRAD"
slot_gradient: "embedding_365.tmp_0@GRAD"
slot_gradient: "embedding_366.tmp_0@GRAD"
slot_gradient: "embedding_367.tmp_0@GRAD"
slot_gradient: "embedding_368.tmp_0@GRAD"
slot_gradient: "embedding_369.tmp_0@GRAD"
slot_gradient: "embedding_370.tmp_0@GRAD"
slot_gradient: "embedding_371.tmp_0@GRAD"
slot_gradient: "embedding_372.tmp_0@GRAD"
slot_gradient: "embedding_373.tmp_0@GRAD"
slot_gradient: "embedding_374.tmp_0@GRAD"
slot_gradient: "embedding_375.tmp_0@GRAD"
slot_gradient: "embedding_376.tmp_0@GRAD"
slot_gradient: "embedding_377.tmp_0@GRAD"
slot_gradient: "embedding_378.tmp_0@GRAD"
slot_gradient: "embedding_379.tmp_0@GRAD"
slot_gradient: "embedding_380.tmp_0@GRAD"
slot_gradient: "embedding_381.tmp_0@GRAD"
slot_gradient: "embedding_382.tmp_0@GRAD"
slot_gradient: "embedding_383.tmp_0@GRAD"
slot_gradient: "embedding_384.tmp_0@GRAD"
slot_gradient: "embedding_385.tmp_0@GRAD"
slot_gradient: "embedding_386.tmp_0@GRAD"
slot_gradient: "embedding_387.tmp_0@GRAD"
slot_gradient: "embedding_388.tmp_0@GRAD"
slot_gradient: "embedding_389.tmp_0@GRAD"
slot_gradient: "embedding_390.tmp_0@GRAD"
slot_gradient: "embedding_391.tmp_0@GRAD"
slot_gradient: "embedding_392.tmp_0@GRAD"
slot_gradient: "embedding_393.tmp_0@GRAD"
slot_gradient: "embedding_394.tmp_0@GRAD"
slot_gradient: "embedding_395.tmp_0@GRAD"
slot_gradient: "embedding_396.tmp_0@GRAD"
slot_gradient: "embedding_397.tmp_0@GRAD"
slot_gradient: "embedding_398.tmp_0@GRAD"
slot_gradient: "embedding_399.tmp_0@GRAD"
slot_gradient: "embedding_400.tmp_0@GRAD"
slot_gradient: "embedding_401.tmp_0@GRAD"
slot_gradient: "embedding_402.tmp_0@GRAD"
slot_gradient: "embedding_403.tmp_0@GRAD"
slot_gradient: "embedding_404.tmp_0@GRAD"
slot_gradient: "embedding_405.tmp_0@GRAD"
slot_gradient: "embedding_406.tmp_0@GRAD"
slot_gradient: "embedding_407.tmp_0@GRAD"
}
skip_op: "lookup_table"
skip_op: "lookup_table_grad"
}
fs_client_param {
uri: "afs://xingtian.afs.baidu.com:9902"
user: "mlarch"
passwd: "Fv1M87"
hadoop_bin: "$HADOOP_HOME/bin/hadoop"
}
feed_deploy/news_jingpai/package/my_nets/format_newcate_hotnews.awk
0 → 100755
浏览文件 @
73334d88
#!/bin/awk -f
{
if
(
$1
!~
/^
([
0-9a-zA-Z
])
+$/
||
$2
!~
/^
([
0-9
])
+$/
||
$3
!~
/^
([
0-9
])
+$/
)
{
next
;
}
show
=
$2
;
clk
=
$3
;
if
(
clk
>
show
)
{
clk
=
show
;
}
for
(
i
=
0
;
i
<
clk
;
i
++
)
{
$2
=
"1"
;
$3
=
"1"
;
print
$0
;
}
for
(
i
=
0
;
i
<
show
-
clk
;
i
++
)
{
$2
=
"1"
;
$3
=
"0"
;
print
$0
;
}
}
feed_deploy/news_jingpai/package/my_nets/ins_weight.py
0 → 100755
浏览文件 @
73334d88
#!/usr/bin/python
import
sys
import
re
import
math
del_text_slot
=
True
g_ratio
=
1
w_ratio
=
0.01
slots_str
=
"6048 6145 6202 6201 6121 6119 6146 6120 6147 6122 6123 6118 6142 6143 6008 6148 6151 6127 6144 6150 6109 6003 6096 6149 6129 6203 6153 6152 6128 6106 6251 7082 7515 7080 6066 7507 6186 6007 7514 6054 6125 7506 10001 6006 6080 7023 6085 10000 6250 6110 6124 6090 6082 6067 7516 6101 6004 6191 6188 6070 6194 6247 6814 7512 10007 6058 6189 6059 7517 10005 7510 7024 7502 7503 6183 7511 6060 6806 7504 6185 6810 6248 10004 6815 6182 10068 6069 6073 6196 6816 7513 6071 6809 6072 6817 6190 7505 6813 6192 6807 6808 6195 6826 6184 6197 6068 6812 7107 6811 6823 6824 6819 6818 6821 6822 6820 6094 6083 6952 6099 6951 6949 6098 7075 6948 6157 6126 7077 6111 6087 6103 6107 6156 6005 6158 7122 6155 7058 6115 7079 7081 6833 6108 6840 6837 7147 7129 6097 6231 6957 7145 6956 7143 6130 7149 7142 6212 6827 7144 6089 6161 7055 6233 6105 7057 6237 6828 6850 6163 7124 6354 6162 7146 6830 7123 6160 6235 7056 6081 6841 6132 6954 6131 6236 6831 6845 6832 6953 6839 6950 7125 7054 6138 6166 6076 6851 6353 7076 7148 6858 6842 6860 7126 6829 6835 7078 6866 6869 6871 7052 6134 6855 6947 6862 6215 6852 7128 6092 6112 6213 6232 6863 6113 6165 6214 6216 6873 6865 6870 6077 6234 6861 6164 6217 7127 6218 6962 7053 7051 6961 6002 6738 6739 10105 7064 6751 6770 7100 6014 6765 6755 10021 10022 6010 10056 6011 6756 10055 6768 10024 6023 10003 6769 10002 6767 6759 10018 6024 6064 6012 6050 10042 6168 6253 10010 10020 6015 6018 10033 10041 10039 10031 10016 6764 7083 7152 7066 6171 7150 7085 6255 10044 10008 7102 6167 6240 6238 6095 10017 10046 6019 6031 6763 6256 6169 6254 10034 7108 7186 6257 10019 6757 10040 6025 7019 7086 10029 10011 7104 6261 6013 6766 10106 7105 7153 7089 6057 7134 7151 7045 7005 7008 7101 6035 7137 10023 6036 6172 7099 7087 6239 7185 6170 10006 6243 6350 7103 7090 7157 6259 7171 6875 7084 7154 6242 6260 7155 7017 7048 7156 6959 7047 10053 7135 6244 7136 10030 7063 6760 7016 7065 7179 6881 7018 6876 10081 10052 10054 10038 6886 10069 7004 10051 7007 7109 10057 6029 6888 10009 6889 7021 10047 6245 6878 10067 6879 6884 7180 7182 10071 7002 6880 6890 6887 10061 6027 6877 6892 10060 6893 7050 10036 7049 10012 10025 7012 7183 10058 7181 10086 6891 6258 6894 6883 7046 6037 7106 10043 10048 10045 10087 6885 10013 10028 7187 10037 10035 10050 6895 7011 7170 7172 10026 10063 10095 10082 10084 6960 10092 10075 6038 7010 7015 10015 10027 10064 7184 10014 10059 7013 7020 10072 10066 10080 6896 10083 10090 6039 10049 7164 7165 10091 10099 6963 7166 10079 10103 7006 7009 7169 6034 7028 7029 7030 7034 7035 7036 7040 7041 7042 10032 6009 6241 7003 7014 7088 13326 13330 13331 13352 13353 6198"
slot_whitelist
=
slots_str
.
split
(
" "
)
def
calc_ins_weight
(
params
,
label
):
"""calc ins weight"""
global
g_ratio
global
w_ratio
slots
=
[]
s_clk_num
=
0
s_show_num
=
0
active
=
0
attclk_num
=
0
attshow_num
=
0
attclk_avg
=
0
for
items
in
params
:
if
len
(
items
)
!=
2
:
continue
slot_name
=
items
[
0
]
slot_val
=
items
[
1
]
if
slot_name
not
in
slots
:
slots
.
append
(
slot_name
)
if
slot_name
==
"session_click_num"
:
s_clk_num
=
int
(
slot_val
)
if
slot_name
==
"session_show_num"
:
s_show_num
=
int
(
slot_val
)
if
slot_name
==
"activity"
:
active
=
float
(
slot_val
)
/
10000.0
w
=
1
# for inactive user
if
active
>=
0
and
active
<
0.4
and
s_show_num
>=
0
and
s_show_num
<
20
:
w
=
math
.
log
(
w_ratio
*
(
420
-
(
active
*
50
+
1
)
*
(
s_show_num
+
1
))
+
math
.
e
)
if
label
==
"0"
:
w
=
1
+
(
w
-
1
)
*
g_ratio
return
w
def
filter_whitelist_slot
(
tmp_line
):
terms
=
tmp_line
.
split
()
line
=
"%s %s %s"
%
(
terms
[
0
],
terms
[
1
],
terms
[
2
])
for
item
in
terms
[
3
:]:
feasign
=
item
.
split
(
':'
)
if
len
(
feasign
)
==
2
and
\
feasign
[
1
]
in
slot_whitelist
:
line
=
"%s %s"
%
(
line
,
item
)
return
line
def
get_sample_type
(
line
):
# vertical_type = 20
# if line.find("13038012583501790:6738") > 0:
# return 30
# vertical_type = 0/5/1/2/9/11/13/16/29/-1
if
(
line
.
find
(
"7408512894065610:6738"
)
>
0
)
or
\
(
line
.
find
(
"8815887816424655:6738"
)
>
0
)
or
\
(
line
.
find
(
"7689987878537419:6738"
)
>
0
)
or
\
(
line
.
find
(
"7971462863009228:6738"
)
>
0
)
or
\
(
line
.
find
(
"9941787754311891:6738"
)
>
0
)
or
\
(
line
.
find
(
"10504737723255509:6738"
)
>
0
)
or
\
(
line
.
find
(
"11067687692199127:6738"
)
>
0
)
or
\
(
line
.
find
(
"11912112645614554:6738"
)
>
0
)
or
\
(
line
.
find
(
"15571287443748071:6738"
)
>
0
)
or
\
(
line
.
find
(
"7127025017546227:6738"
)
>
0
):
return
20
return
-
1
def
main
():
"""ins adjust"""
global
del_text_slot
for
l
in
sys
.
stdin
:
l
=
l
.
rstrip
(
"
\n
"
)
items
=
l
.
split
(
" "
)
if
len
(
items
)
<
3
:
continue
label
=
items
[
2
]
lines
=
l
.
split
(
"
\t
"
)
line
=
lines
[
0
]
# streaming ins include all ins, sample_type only handle NEWS ins
sample_type
=
-
1
if
'NEWS'
in
l
:
sample_type
=
get_sample_type
(
line
)
#line = filter_whitelist_slot(tmp_line)
if
len
(
lines
)
>=
4
:
if
'VIDEO'
in
lines
[
3
]:
continue
params
=
lines
[
2
]
params
=
params
.
split
(
" "
)
m
=
[
tuple
(
i
.
split
(
":"
))
for
i
in
params
]
if
m
is
None
or
len
(
m
)
==
0
:
if
sample_type
>
0
:
print
"%s $%s *1"
%
(
line
,
sample_type
)
else
:
print
"%s *1"
%
line
sys
.
stdout
.
flush
()
continue
weight
=
calc_ins_weight
(
m
,
label
)
if
sample_type
>
0
:
print
"%s $%s *%s"
%
(
line
,
sample_type
,
weight
)
else
:
print
"%s *%s"
%
(
line
,
weight
)
sys
.
stdout
.
flush
()
else
:
if
sample_type
>
0
:
print
"%s $%s *1"
%
(
line
,
sample_type
)
else
:
print
"%s *1"
%
line
sys
.
stdout
.
flush
()
if
__name__
==
"__main__"
:
if
len
(
sys
.
argv
)
>
1
:
if
sys
.
argv
[
1
]
==
"0"
:
del_text_slot
=
False
if
len
(
sys
.
argv
)
>
2
:
g_ratio
=
float
(
sys
.
argv
[
2
])
if
len
(
sys
.
argv
)
>
3
:
w_ratio
=
float
(
sys
.
argv
[
3
])
main
()
feed_deploy/news_jingpai/package/my_nets/jingpai_fleet_desc_new.prototxt
0 → 100644
浏览文件 @
73334d88
server_param {
downpour_server_param {
downpour_table_param {
table_id: 0
table_class: "DownpourSparseTable"
shard_num: 1950
accessor {
accessor_class: "DownpourCtrAccessor"
sparse_sgd_param {
learning_rate: 0.05
initial_g2sum: 3.0
initial_range: 0.0001
weight_bounds: -10.0
weight_bounds: 10.0
}
fea_dim: 11
embedx_dim: 8
embedx_threshold: 10
downpour_accessor_param {
nonclk_coeff: 0.1
click_coeff: 1
base_threshold: 1.5
delta_threshold: 0.25
delta_keep_days: 16
delete_after_unseen_days: 30
show_click_decay_rate: 0.98
delete_threshold: 0.8
}
table_accessor_save_param {
param: 1
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
table_accessor_save_param {
param: 2
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
}
type: PS_SPARSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 1
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
fea_dim: 2571127
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 2
table_class: "DownpourDenseDoubleTable"
accessor {
accessor_class: "DownpourDenseValueDoubleAccessor"
dense_sgd_param {
name: "summarydouble"
summary {
summary_decay_rate: 0.999999
}
}
fea_dim: 13464
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 3
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
fea_dim: 834238
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 4
table_class: "DownpourDenseDoubleTable"
accessor {
accessor_class: "DownpourDenseValueDoubleAccessor"
dense_sgd_param {
name: "summarydouble"
summary {
summary_decay_rate: 0.999999
}
}
fea_dim: 3267
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 5
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
fea_dim: 2072615
}
type: PS_DENSE_TABLE
compress_in_save: true
}
service_param {
server_class: "DownpourBrpcPsServer"
client_class: "DownpourBrpcPsClient"
service_class: "DownpourPsService"
start_server_port: 0
server_thread_num: 12
}
}
}
trainer_param {
dense_table {
table_id: 1
dense_variable_name: "fc_0.w_0"
dense_variable_name: "fc_0.b_0"
dense_variable_name: "fc_1.w_0"
dense_variable_name: "fc_1.b_0"
dense_variable_name: "fc_2.w_0"
dense_variable_name: "fc_2.b_0"
dense_variable_name: "fc_3.w_0"
dense_variable_name: "fc_3.b_0"
dense_variable_name: "fc_4.w_0"
dense_variable_name: "fc_4.b_0"
dense_variable_name: "fc_5.w_0"
dense_variable_name: "fc_5.b_0"
dense_variable_name: "fc_6.w_0"
dense_variable_name: "fc_6.b_0"
dense_variable_name: "fc_7.w_0"
dense_variable_name: "fc_7.b_0"
dense_gradient_variable_name: "fc_0.w_0@GRAD"
dense_gradient_variable_name: "fc_0.b_0@GRAD"
dense_gradient_variable_name: "fc_1.w_0@GRAD"
dense_gradient_variable_name: "fc_1.b_0@GRAD"
dense_gradient_variable_name: "fc_2.w_0@GRAD"
dense_gradient_variable_name: "fc_2.b_0@GRAD"
dense_gradient_variable_name: "fc_3.w_0@GRAD"
dense_gradient_variable_name: "fc_3.b_0@GRAD"
dense_gradient_variable_name: "fc_4.w_0@GRAD"
dense_gradient_variable_name: "fc_4.b_0@GRAD"
dense_gradient_variable_name: "fc_5.w_0@GRAD"
dense_gradient_variable_name: "fc_5.b_0@GRAD"
dense_gradient_variable_name: "fc_6.w_0@GRAD"
dense_gradient_variable_name: "fc_6.b_0@GRAD"
dense_gradient_variable_name: "fc_7.w_0@GRAD"
dense_gradient_variable_name: "fc_7.b_0@GRAD"
}
dense_table {
table_id: 2
dense_variable_name: "bn6048.batch_size"
dense_variable_name: "bn6048.batch_sum"
dense_variable_name: "bn6048.batch_square_sum"
dense_gradient_variable_name: "bn6048.batch_size@GRAD"
dense_gradient_variable_name: "bn6048.batch_sum@GRAD"
dense_gradient_variable_name: "bn6048.batch_square_sum@GRAD"
}
dense_table {
table_id: 3
dense_variable_name: "fc_0.w_0"
dense_variable_name: "fc_0.b_0"
dense_variable_name: "fc_1.w_0"
dense_variable_name: "fc_1.b_0"
dense_variable_name: "fc_2.w_0"
dense_variable_name: "fc_2.b_0"
dense_variable_name: "fc_3.w_0"
dense_variable_name: "fc_3.b_0"
dense_variable_name: "fc_4.w_0"
dense_variable_name: "fc_4.b_0"
dense_variable_name: "fc_5.w_0"
dense_variable_name: "fc_5.b_0"
dense_variable_name: "fc_6.w_0"
dense_variable_name: "fc_6.b_0"
dense_variable_name: "fc_7.w_0"
dense_variable_name: "fc_7.b_0"
dense_gradient_variable_name: "fc_0.w_0@GRAD"
dense_gradient_variable_name: "fc_0.b_0@GRAD"
dense_gradient_variable_name: "fc_1.w_0@GRAD"
dense_gradient_variable_name: "fc_1.b_0@GRAD"
dense_gradient_variable_name: "fc_2.w_0@GRAD"
dense_gradient_variable_name: "fc_2.b_0@GRAD"
dense_gradient_variable_name: "fc_3.w_0@GRAD"
dense_gradient_variable_name: "fc_3.b_0@GRAD"
dense_gradient_variable_name: "fc_4.w_0@GRAD"
dense_gradient_variable_name: "fc_4.b_0@GRAD"
dense_gradient_variable_name: "fc_5.w_0@GRAD"
dense_gradient_variable_name: "fc_5.b_0@GRAD"
dense_gradient_variable_name: "fc_6.w_0@GRAD"
dense_gradient_variable_name: "fc_6.b_0@GRAD"
dense_gradient_variable_name: "fc_7.w_0@GRAD"
dense_gradient_variable_name: "fc_7.b_0@GRAD"
}
dense_table {
table_id: 4
dense_variable_name: "bn6048.batch_size"
dense_variable_name: "bn6048.batch_sum"
dense_variable_name: "bn6048.batch_square_sum"
dense_gradient_variable_name: "bn6048.batch_size@GRAD"
dense_gradient_variable_name: "bn6048.batch_sum@GRAD"
dense_gradient_variable_name: "bn6048.batch_square_sum@GRAD"
}
dense_table {
table_id: 5
dense_variable_name: "fc_0.w_0"
dense_variable_name: "fc_0.b_0"
dense_variable_name: "fc_1.w_0"
dense_variable_name: "fc_1.b_0"
dense_variable_name: "fc_2.w_0"
dense_variable_name: "fc_2.b_0"
dense_variable_name: "fc_3.w_0"
dense_variable_name: "fc_3.b_0"
dense_variable_name: "fc_4.w_0"
dense_variable_name: "fc_4.b_0"
dense_variable_name: "fc_5.w_0"
dense_variable_name: "fc_5.b_0"
dense_gradient_variable_name: "fc_0.w_0@GRAD"
dense_gradient_variable_name: "fc_0.b_0@GRAD"
dense_gradient_variable_name: "fc_1.w_0@GRAD"
dense_gradient_variable_name: "fc_1.b_0@GRAD"
dense_gradient_variable_name: "fc_2.w_0@GRAD"
dense_gradient_variable_name: "fc_2.b_0@GRAD"
dense_gradient_variable_name: "fc_3.w_0@GRAD"
dense_gradient_variable_name: "fc_3.b_0@GRAD"
dense_gradient_variable_name: "fc_4.w_0@GRAD"
dense_gradient_variable_name: "fc_4.b_0@GRAD"
dense_gradient_variable_name: "fc_5.w_0@GRAD"
dense_gradient_variable_name: "fc_5.b_0@GRAD"
}
sparse_table {
table_id: 0
slot_key: "6048"
slot_key: "6002"
slot_key: "6145"
slot_key: "6202"
slot_key: "6201"
slot_key: "6121"
slot_key: "6738"
slot_key: "6119"
slot_key: "6146"
slot_key: "6120"
slot_key: "6147"
slot_key: "6122"
slot_key: "6123"
slot_key: "6118"
slot_key: "6142"
slot_key: "6143"
slot_key: "6008"
slot_key: "6148"
slot_key: "6151"
slot_key: "6127"
slot_key: "6144"
slot_key: "6094"
slot_key: "6083"
slot_key: "6952"
slot_key: "6739"
slot_key: "6150"
slot_key: "6109"
slot_key: "6003"
slot_key: "6099"
slot_key: "6149"
slot_key: "6129"
slot_key: "6203"
slot_key: "6153"
slot_key: "6152"
slot_key: "6128"
slot_key: "6106"
slot_key: "6251"
slot_key: "7082"
slot_key: "7515"
slot_key: "6951"
slot_key: "6949"
slot_key: "7080"
slot_key: "6066"
slot_key: "7507"
slot_key: "6186"
slot_key: "6007"
slot_key: "7514"
slot_key: "6125"
slot_key: "7506"
slot_key: "10001"
slot_key: "6006"
slot_key: "7023"
slot_key: "6085"
slot_key: "10000"
slot_key: "6098"
slot_key: "6250"
slot_key: "6110"
slot_key: "6124"
slot_key: "6090"
slot_key: "6082"
slot_key: "6067"
slot_key: "6101"
slot_key: "6004"
slot_key: "6191"
slot_key: "7075"
slot_key: "6948"
slot_key: "6157"
slot_key: "6126"
slot_key: "6188"
slot_key: "7077"
slot_key: "6070"
slot_key: "6111"
slot_key: "6087"
slot_key: "6103"
slot_key: "6107"
slot_key: "6194"
slot_key: "6156"
slot_key: "6005"
slot_key: "6247"
slot_key: "6814"
slot_key: "6158"
slot_key: "7122"
slot_key: "6058"
slot_key: "6189"
slot_key: "7058"
slot_key: "6059"
slot_key: "6115"
slot_key: "7079"
slot_key: "7081"
slot_key: "6833"
slot_key: "7024"
slot_key: "6108"
slot_key: "13342"
slot_key: "13345"
slot_key: "13412"
slot_key: "13343"
slot_key: "13350"
slot_key: "13346"
slot_key: "13409"
slot_key: "6009"
slot_key: "6011"
slot_key: "6012"
slot_key: "6013"
slot_key: "6014"
slot_key: "6015"
slot_key: "6019"
slot_key: "6023"
slot_key: "6024"
slot_key: "6027"
slot_key: "6029"
slot_key: "6031"
slot_key: "6050"
slot_key: "6060"
slot_key: "6068"
slot_key: "6069"
slot_key: "6089"
slot_key: "6095"
slot_key: "6105"
slot_key: "6112"
slot_key: "6130"
slot_key: "6131"
slot_key: "6132"
slot_key: "6134"
slot_key: "6161"
slot_key: "6162"
slot_key: "6163"
slot_key: "6166"
slot_key: "6182"
slot_key: "6183"
slot_key: "6185"
slot_key: "6190"
slot_key: "6212"
slot_key: "6213"
slot_key: "6231"
slot_key: "6233"
slot_key: "6234"
slot_key: "6236"
slot_key: "6238"
slot_key: "6239"
slot_key: "6240"
slot_key: "6241"
slot_key: "6242"
slot_key: "6243"
slot_key: "6244"
slot_key: "6245"
slot_key: "6354"
slot_key: "7002"
slot_key: "7005"
slot_key: "7008"
slot_key: "7010"
slot_key: "7012"
slot_key: "7013"
slot_key: "7015"
slot_key: "7016"
slot_key: "7017"
slot_key: "7018"
slot_key: "7019"
slot_key: "7020"
slot_key: "7045"
slot_key: "7046"
slot_key: "7048"
slot_key: "7049"
slot_key: "7052"
slot_key: "7054"
slot_key: "7056"
slot_key: "7064"
slot_key: "7066"
slot_key: "7076"
slot_key: "7078"
slot_key: "7083"
slot_key: "7084"
slot_key: "7085"
slot_key: "7086"
slot_key: "7087"
slot_key: "7088"
slot_key: "7089"
slot_key: "7090"
slot_key: "7099"
slot_key: "7100"
slot_key: "7101"
slot_key: "7102"
slot_key: "7103"
slot_key: "7104"
slot_key: "7105"
slot_key: "7109"
slot_key: "7124"
slot_key: "7126"
slot_key: "7136"
slot_key: "7142"
slot_key: "7143"
slot_key: "7144"
slot_key: "7145"
slot_key: "7146"
slot_key: "7147"
slot_key: "7148"
slot_key: "7150"
slot_key: "7151"
slot_key: "7152"
slot_key: "7153"
slot_key: "7154"
slot_key: "7155"
slot_key: "7156"
slot_key: "7157"
slot_key: "7047"
slot_key: "7050"
slot_key: "6253"
slot_key: "6254"
slot_key: "6255"
slot_key: "6256"
slot_key: "6257"
slot_key: "6259"
slot_key: "6260"
slot_key: "6261"
slot_key: "7170"
slot_key: "7185"
slot_key: "7186"
slot_key: "6751"
slot_key: "6755"
slot_key: "6757"
slot_key: "6759"
slot_key: "6760"
slot_key: "6763"
slot_key: "6764"
slot_key: "6765"
slot_key: "6766"
slot_key: "6767"
slot_key: "6768"
slot_key: "6769"
slot_key: "6770"
slot_key: "7502"
slot_key: "7503"
slot_key: "7504"
slot_key: "7505"
slot_key: "7510"
slot_key: "7511"
slot_key: "7512"
slot_key: "7513"
slot_key: "6806"
slot_key: "6807"
slot_key: "6808"
slot_key: "6809"
slot_key: "6810"
slot_key: "6811"
slot_key: "6812"
slot_key: "6813"
slot_key: "6815"
slot_key: "6816"
slot_key: "6817"
slot_key: "6819"
slot_key: "6823"
slot_key: "6828"
slot_key: "6831"
slot_key: "6840"
slot_key: "6845"
slot_key: "6875"
slot_key: "6879"
slot_key: "6881"
slot_key: "6888"
slot_key: "6889"
slot_key: "6947"
slot_key: "6950"
slot_key: "6956"
slot_key: "6957"
slot_key: "6959"
slot_key: "10006"
slot_key: "10008"
slot_key: "10009"
slot_key: "10010"
slot_key: "10011"
slot_key: "10016"
slot_key: "10017"
slot_key: "10018"
slot_key: "10019"
slot_key: "10020"
slot_key: "10021"
slot_key: "10022"
slot_key: "10023"
slot_key: "10024"
slot_key: "10029"
slot_key: "10030"
slot_key: "10031"
slot_key: "10032"
slot_key: "10033"
slot_key: "10034"
slot_key: "10035"
slot_key: "10036"
slot_key: "10037"
slot_key: "10038"
slot_key: "10039"
slot_key: "10040"
slot_key: "10041"
slot_key: "10042"
slot_key: "10044"
slot_key: "10045"
slot_key: "10046"
slot_key: "10051"
slot_key: "10052"
slot_key: "10053"
slot_key: "10054"
slot_key: "10055"
slot_key: "10056"
slot_key: "10057"
slot_key: "10060"
slot_key: "10066"
slot_key: "10069"
slot_key: "6820"
slot_key: "6821"
slot_key: "6822"
slot_key: "13333"
slot_key: "13334"
slot_key: "13335"
slot_key: "13336"
slot_key: "13337"
slot_key: "13338"
slot_key: "13339"
slot_key: "13340"
slot_key: "13341"
slot_key: "13351"
slot_key: "13352"
slot_key: "13353"
slot_key: "13359"
slot_key: "13361"
slot_key: "13362"
slot_key: "13363"
slot_key: "13366"
slot_key: "13367"
slot_key: "13368"
slot_key: "13369"
slot_key: "13370"
slot_key: "13371"
slot_key: "13375"
slot_key: "13376"
slot_key: "5700"
slot_key: "5702"
slot_key: "13400"
slot_key: "13401"
slot_key: "13402"
slot_key: "13403"
slot_key: "13404"
slot_key: "13406"
slot_key: "13407"
slot_key: "13408"
slot_key: "13410"
slot_key: "13417"
slot_key: "13418"
slot_key: "13419"
slot_key: "13420"
slot_key: "13422"
slot_key: "13425"
slot_key: "13427"
slot_key: "13428"
slot_key: "13429"
slot_key: "13430"
slot_key: "13431"
slot_key: "13433"
slot_key: "13434"
slot_key: "13436"
slot_key: "13437"
slot_key: "13326"
slot_key: "13330"
slot_key: "13331"
slot_key: "5717"
slot_key: "13442"
slot_key: "13451"
slot_key: "13452"
slot_key: "13455"
slot_key: "13456"
slot_key: "13457"
slot_key: "13458"
slot_key: "13459"
slot_key: "13460"
slot_key: "13461"
slot_key: "13462"
slot_key: "13463"
slot_key: "13464"
slot_key: "13465"
slot_key: "13466"
slot_key: "13467"
slot_key: "13468"
slot_key: "1104"
slot_key: "1106"
slot_key: "1107"
slot_key: "1108"
slot_key: "1109"
slot_key: "1110"
slot_key: "1111"
slot_key: "1112"
slot_key: "1113"
slot_key: "1114"
slot_key: "1115"
slot_key: "1116"
slot_key: "1117"
slot_key: "1119"
slot_key: "1120"
slot_key: "1121"
slot_key: "1122"
slot_key: "1123"
slot_key: "1124"
slot_key: "1125"
slot_key: "1126"
slot_key: "1127"
slot_key: "1128"
slot_key: "1129"
slot_key: "13812"
slot_key: "13813"
slot_key: "6740"
slot_key: "1490"
slot_key: "1491"
slot_value: "embedding_0.tmp_0"
slot_value: "embedding_1.tmp_0"
slot_value: "embedding_2.tmp_0"
slot_value: "embedding_3.tmp_0"
slot_value: "embedding_4.tmp_0"
slot_value: "embedding_5.tmp_0"
slot_value: "embedding_6.tmp_0"
slot_value: "embedding_7.tmp_0"
slot_value: "embedding_8.tmp_0"
slot_value: "embedding_9.tmp_0"
slot_value: "embedding_10.tmp_0"
slot_value: "embedding_11.tmp_0"
slot_value: "embedding_12.tmp_0"
slot_value: "embedding_13.tmp_0"
slot_value: "embedding_14.tmp_0"
slot_value: "embedding_15.tmp_0"
slot_value: "embedding_16.tmp_0"
slot_value: "embedding_17.tmp_0"
slot_value: "embedding_18.tmp_0"
slot_value: "embedding_19.tmp_0"
slot_value: "embedding_20.tmp_0"
slot_value: "embedding_21.tmp_0"
slot_value: "embedding_22.tmp_0"
slot_value: "embedding_23.tmp_0"
slot_value: "embedding_24.tmp_0"
slot_value: "embedding_25.tmp_0"
slot_value: "embedding_26.tmp_0"
slot_value: "embedding_27.tmp_0"
slot_value: "embedding_28.tmp_0"
slot_value: "embedding_29.tmp_0"
slot_value: "embedding_30.tmp_0"
slot_value: "embedding_31.tmp_0"
slot_value: "embedding_32.tmp_0"
slot_value: "embedding_33.tmp_0"
slot_value: "embedding_34.tmp_0"
slot_value: "embedding_35.tmp_0"
slot_value: "embedding_36.tmp_0"
slot_value: "embedding_37.tmp_0"
slot_value: "embedding_38.tmp_0"
slot_value: "embedding_39.tmp_0"
slot_value: "embedding_40.tmp_0"
slot_value: "embedding_41.tmp_0"
slot_value: "embedding_42.tmp_0"
slot_value: "embedding_43.tmp_0"
slot_value: "embedding_44.tmp_0"
slot_value: "embedding_45.tmp_0"
slot_value: "embedding_46.tmp_0"
slot_value: "embedding_47.tmp_0"
slot_value: "embedding_48.tmp_0"
slot_value: "embedding_49.tmp_0"
slot_value: "embedding_50.tmp_0"
slot_value: "embedding_51.tmp_0"
slot_value: "embedding_52.tmp_0"
slot_value: "embedding_53.tmp_0"
slot_value: "embedding_54.tmp_0"
slot_value: "embedding_55.tmp_0"
slot_value: "embedding_56.tmp_0"
slot_value: "embedding_57.tmp_0"
slot_value: "embedding_58.tmp_0"
slot_value: "embedding_59.tmp_0"
slot_value: "embedding_60.tmp_0"
slot_value: "embedding_61.tmp_0"
slot_value: "embedding_62.tmp_0"
slot_value: "embedding_63.tmp_0"
slot_value: "embedding_64.tmp_0"
slot_value: "embedding_65.tmp_0"
slot_value: "embedding_66.tmp_0"
slot_value: "embedding_67.tmp_0"
slot_value: "embedding_68.tmp_0"
slot_value: "embedding_69.tmp_0"
slot_value: "embedding_70.tmp_0"
slot_value: "embedding_71.tmp_0"
slot_value: "embedding_72.tmp_0"
slot_value: "embedding_73.tmp_0"
slot_value: "embedding_74.tmp_0"
slot_value: "embedding_75.tmp_0"
slot_value: "embedding_76.tmp_0"
slot_value: "embedding_77.tmp_0"
slot_value: "embedding_78.tmp_0"
slot_value: "embedding_79.tmp_0"
slot_value: "embedding_80.tmp_0"
slot_value: "embedding_81.tmp_0"
slot_value: "embedding_82.tmp_0"
slot_value: "embedding_83.tmp_0"
slot_value: "embedding_84.tmp_0"
slot_value: "embedding_85.tmp_0"
slot_value: "embedding_86.tmp_0"
slot_value: "embedding_87.tmp_0"
slot_value: "embedding_88.tmp_0"
slot_value: "embedding_89.tmp_0"
slot_value: "embedding_90.tmp_0"
slot_value: "embedding_91.tmp_0"
slot_value: "embedding_92.tmp_0"
slot_value: "embedding_93.tmp_0"
slot_value: "embedding_94.tmp_0"
slot_value: "embedding_95.tmp_0"
slot_value: "embedding_96.tmp_0"
slot_value: "embedding_97.tmp_0"
slot_value: "embedding_98.tmp_0"
slot_value: "embedding_99.tmp_0"
slot_value: "embedding_100.tmp_0"
slot_value: "embedding_101.tmp_0"
slot_value: "embedding_102.tmp_0"
slot_value: "embedding_103.tmp_0"
slot_value: "embedding_104.tmp_0"
slot_value: "embedding_105.tmp_0"
slot_value: "embedding_106.tmp_0"
slot_value: "embedding_107.tmp_0"
slot_value: "embedding_108.tmp_0"
slot_value: "embedding_109.tmp_0"
slot_value: "embedding_110.tmp_0"
slot_value: "embedding_111.tmp_0"
slot_value: "embedding_112.tmp_0"
slot_value: "embedding_113.tmp_0"
slot_value: "embedding_114.tmp_0"
slot_value: "embedding_115.tmp_0"
slot_value: "embedding_116.tmp_0"
slot_value: "embedding_117.tmp_0"
slot_value: "embedding_118.tmp_0"
slot_value: "embedding_119.tmp_0"
slot_value: "embedding_120.tmp_0"
slot_value: "embedding_121.tmp_0"
slot_value: "embedding_122.tmp_0"
slot_value: "embedding_123.tmp_0"
slot_value: "embedding_124.tmp_0"
slot_value: "embedding_125.tmp_0"
slot_value: "embedding_126.tmp_0"
slot_value: "embedding_127.tmp_0"
slot_value: "embedding_128.tmp_0"
slot_value: "embedding_129.tmp_0"
slot_value: "embedding_130.tmp_0"
slot_value: "embedding_131.tmp_0"
slot_value: "embedding_132.tmp_0"
slot_value: "embedding_133.tmp_0"
slot_value: "embedding_134.tmp_0"
slot_value: "embedding_135.tmp_0"
slot_value: "embedding_136.tmp_0"
slot_value: "embedding_137.tmp_0"
slot_value: "embedding_138.tmp_0"
slot_value: "embedding_139.tmp_0"
slot_value: "embedding_140.tmp_0"
slot_value: "embedding_141.tmp_0"
slot_value: "embedding_142.tmp_0"
slot_value: "embedding_143.tmp_0"
slot_value: "embedding_144.tmp_0"
slot_value: "embedding_145.tmp_0"
slot_value: "embedding_146.tmp_0"
slot_value: "embedding_147.tmp_0"
slot_value: "embedding_148.tmp_0"
slot_value: "embedding_149.tmp_0"
slot_value: "embedding_150.tmp_0"
slot_value: "embedding_151.tmp_0"
slot_value: "embedding_152.tmp_0"
slot_value: "embedding_153.tmp_0"
slot_value: "embedding_154.tmp_0"
slot_value: "embedding_155.tmp_0"
slot_value: "embedding_156.tmp_0"
slot_value: "embedding_157.tmp_0"
slot_value: "embedding_158.tmp_0"
slot_value: "embedding_159.tmp_0"
slot_value: "embedding_160.tmp_0"
slot_value: "embedding_161.tmp_0"
slot_value: "embedding_162.tmp_0"
slot_value: "embedding_163.tmp_0"
slot_value: "embedding_164.tmp_0"
slot_value: "embedding_165.tmp_0"
slot_value: "embedding_166.tmp_0"
slot_value: "embedding_167.tmp_0"
slot_value: "embedding_168.tmp_0"
slot_value: "embedding_169.tmp_0"
slot_value: "embedding_170.tmp_0"
slot_value: "embedding_171.tmp_0"
slot_value: "embedding_172.tmp_0"
slot_value: "embedding_173.tmp_0"
slot_value: "embedding_174.tmp_0"
slot_value: "embedding_175.tmp_0"
slot_value: "embedding_176.tmp_0"
slot_value: "embedding_177.tmp_0"
slot_value: "embedding_178.tmp_0"
slot_value: "embedding_179.tmp_0"
slot_value: "embedding_180.tmp_0"
slot_value: "embedding_181.tmp_0"
slot_value: "embedding_182.tmp_0"
slot_value: "embedding_183.tmp_0"
slot_value: "embedding_184.tmp_0"
slot_value: "embedding_185.tmp_0"
slot_value: "embedding_186.tmp_0"
slot_value: "embedding_187.tmp_0"
slot_value: "embedding_188.tmp_0"
slot_value: "embedding_189.tmp_0"
slot_value: "embedding_190.tmp_0"
slot_value: "embedding_191.tmp_0"
slot_value: "embedding_192.tmp_0"
slot_value: "embedding_193.tmp_0"
slot_value: "embedding_194.tmp_0"
slot_value: "embedding_195.tmp_0"
slot_value: "embedding_196.tmp_0"
slot_value: "embedding_197.tmp_0"
slot_value: "embedding_198.tmp_0"
slot_value: "embedding_199.tmp_0"
slot_value: "embedding_200.tmp_0"
slot_value: "embedding_201.tmp_0"
slot_value: "embedding_202.tmp_0"
slot_value: "embedding_203.tmp_0"
slot_value: "embedding_204.tmp_0"
slot_value: "embedding_205.tmp_0"
slot_value: "embedding_206.tmp_0"
slot_value: "embedding_207.tmp_0"
slot_value: "embedding_208.tmp_0"
slot_value: "embedding_209.tmp_0"
slot_value: "embedding_210.tmp_0"
slot_value: "embedding_211.tmp_0"
slot_value: "embedding_212.tmp_0"
slot_value: "embedding_213.tmp_0"
slot_value: "embedding_214.tmp_0"
slot_value: "embedding_215.tmp_0"
slot_value: "embedding_216.tmp_0"
slot_value: "embedding_217.tmp_0"
slot_value: "embedding_218.tmp_0"
slot_value: "embedding_219.tmp_0"
slot_value: "embedding_220.tmp_0"
slot_value: "embedding_221.tmp_0"
slot_value: "embedding_222.tmp_0"
slot_value: "embedding_223.tmp_0"
slot_value: "embedding_224.tmp_0"
slot_value: "embedding_225.tmp_0"
slot_value: "embedding_226.tmp_0"
slot_value: "embedding_227.tmp_0"
slot_value: "embedding_228.tmp_0"
slot_value: "embedding_229.tmp_0"
slot_value: "embedding_230.tmp_0"
slot_value: "embedding_231.tmp_0"
slot_value: "embedding_232.tmp_0"
slot_value: "embedding_233.tmp_0"
slot_value: "embedding_234.tmp_0"
slot_value: "embedding_235.tmp_0"
slot_value: "embedding_236.tmp_0"
slot_value: "embedding_237.tmp_0"
slot_value: "embedding_238.tmp_0"
slot_value: "embedding_239.tmp_0"
slot_value: "embedding_240.tmp_0"
slot_value: "embedding_241.tmp_0"
slot_value: "embedding_242.tmp_0"
slot_value: "embedding_243.tmp_0"
slot_value: "embedding_244.tmp_0"
slot_value: "embedding_245.tmp_0"
slot_value: "embedding_246.tmp_0"
slot_value: "embedding_247.tmp_0"
slot_value: "embedding_248.tmp_0"
slot_value: "embedding_249.tmp_0"
slot_value: "embedding_250.tmp_0"
slot_value: "embedding_251.tmp_0"
slot_value: "embedding_252.tmp_0"
slot_value: "embedding_253.tmp_0"
slot_value: "embedding_254.tmp_0"
slot_value: "embedding_255.tmp_0"
slot_value: "embedding_256.tmp_0"
slot_value: "embedding_257.tmp_0"
slot_value: "embedding_258.tmp_0"
slot_value: "embedding_259.tmp_0"
slot_value: "embedding_260.tmp_0"
slot_value: "embedding_261.tmp_0"
slot_value: "embedding_262.tmp_0"
slot_value: "embedding_263.tmp_0"
slot_value: "embedding_264.tmp_0"
slot_value: "embedding_265.tmp_0"
slot_value: "embedding_266.tmp_0"
slot_value: "embedding_267.tmp_0"
slot_value: "embedding_268.tmp_0"
slot_value: "embedding_269.tmp_0"
slot_value: "embedding_270.tmp_0"
slot_value: "embedding_271.tmp_0"
slot_value: "embedding_272.tmp_0"
slot_value: "embedding_273.tmp_0"
slot_value: "embedding_274.tmp_0"
slot_value: "embedding_275.tmp_0"
slot_value: "embedding_276.tmp_0"
slot_value: "embedding_277.tmp_0"
slot_value: "embedding_278.tmp_0"
slot_value: "embedding_279.tmp_0"
slot_value: "embedding_280.tmp_0"
slot_value: "embedding_281.tmp_0"
slot_value: "embedding_282.tmp_0"
slot_value: "embedding_283.tmp_0"
slot_value: "embedding_284.tmp_0"
slot_value: "embedding_285.tmp_0"
slot_value: "embedding_286.tmp_0"
slot_value: "embedding_287.tmp_0"
slot_value: "embedding_288.tmp_0"
slot_value: "embedding_289.tmp_0"
slot_value: "embedding_290.tmp_0"
slot_value: "embedding_291.tmp_0"
slot_value: "embedding_292.tmp_0"
slot_value: "embedding_293.tmp_0"
slot_value: "embedding_294.tmp_0"
slot_value: "embedding_295.tmp_0"
slot_value: "embedding_296.tmp_0"
slot_value: "embedding_297.tmp_0"
slot_value: "embedding_298.tmp_0"
slot_value: "embedding_299.tmp_0"
slot_value: "embedding_300.tmp_0"
slot_value: "embedding_301.tmp_0"
slot_value: "embedding_302.tmp_0"
slot_value: "embedding_303.tmp_0"
slot_value: "embedding_304.tmp_0"
slot_value: "embedding_305.tmp_0"
slot_value: "embedding_306.tmp_0"
slot_value: "embedding_307.tmp_0"
slot_value: "embedding_308.tmp_0"
slot_value: "embedding_309.tmp_0"
slot_value: "embedding_310.tmp_0"
slot_value: "embedding_311.tmp_0"
slot_value: "embedding_312.tmp_0"
slot_value: "embedding_313.tmp_0"
slot_value: "embedding_314.tmp_0"
slot_value: "embedding_315.tmp_0"
slot_value: "embedding_316.tmp_0"
slot_value: "embedding_317.tmp_0"
slot_value: "embedding_318.tmp_0"
slot_value: "embedding_319.tmp_0"
slot_value: "embedding_320.tmp_0"
slot_value: "embedding_321.tmp_0"
slot_value: "embedding_322.tmp_0"
slot_value: "embedding_323.tmp_0"
slot_value: "embedding_324.tmp_0"
slot_value: "embedding_325.tmp_0"
slot_value: "embedding_326.tmp_0"
slot_value: "embedding_327.tmp_0"
slot_value: "embedding_328.tmp_0"
slot_value: "embedding_329.tmp_0"
slot_value: "embedding_330.tmp_0"
slot_value: "embedding_331.tmp_0"
slot_value: "embedding_332.tmp_0"
slot_value: "embedding_333.tmp_0"
slot_value: "embedding_334.tmp_0"
slot_value: "embedding_335.tmp_0"
slot_value: "embedding_336.tmp_0"
slot_value: "embedding_337.tmp_0"
slot_value: "embedding_338.tmp_0"
slot_value: "embedding_339.tmp_0"
slot_value: "embedding_340.tmp_0"
slot_value: "embedding_341.tmp_0"
slot_value: "embedding_342.tmp_0"
slot_value: "embedding_343.tmp_0"
slot_value: "embedding_344.tmp_0"
slot_value: "embedding_345.tmp_0"
slot_value: "embedding_346.tmp_0"
slot_value: "embedding_347.tmp_0"
slot_value: "embedding_348.tmp_0"
slot_value: "embedding_349.tmp_0"
slot_value: "embedding_350.tmp_0"
slot_value: "embedding_351.tmp_0"
slot_value: "embedding_352.tmp_0"
slot_value: "embedding_353.tmp_0"
slot_value: "embedding_354.tmp_0"
slot_value: "embedding_355.tmp_0"
slot_value: "embedding_356.tmp_0"
slot_value: "embedding_357.tmp_0"
slot_value: "embedding_358.tmp_0"
slot_value: "embedding_359.tmp_0"
slot_value: "embedding_360.tmp_0"
slot_value: "embedding_361.tmp_0"
slot_value: "embedding_362.tmp_0"
slot_value: "embedding_363.tmp_0"
slot_value: "embedding_364.tmp_0"
slot_value: "embedding_365.tmp_0"
slot_value: "embedding_366.tmp_0"
slot_value: "embedding_367.tmp_0"
slot_value: "embedding_368.tmp_0"
slot_value: "embedding_369.tmp_0"
slot_value: "embedding_370.tmp_0"
slot_value: "embedding_371.tmp_0"
slot_value: "embedding_372.tmp_0"
slot_value: "embedding_373.tmp_0"
slot_value: "embedding_374.tmp_0"
slot_value: "embedding_375.tmp_0"
slot_value: "embedding_376.tmp_0"
slot_value: "embedding_377.tmp_0"
slot_value: "embedding_378.tmp_0"
slot_value: "embedding_379.tmp_0"
slot_value: "embedding_380.tmp_0"
slot_value: "embedding_381.tmp_0"
slot_value: "embedding_382.tmp_0"
slot_value: "embedding_383.tmp_0"
slot_value: "embedding_384.tmp_0"
slot_value: "embedding_385.tmp_0"
slot_value: "embedding_386.tmp_0"
slot_value: "embedding_387.tmp_0"
slot_value: "embedding_388.tmp_0"
slot_value: "embedding_389.tmp_0"
slot_value: "embedding_390.tmp_0"
slot_value: "embedding_391.tmp_0"
slot_value: "embedding_392.tmp_0"
slot_value: "embedding_393.tmp_0"
slot_value: "embedding_394.tmp_0"
slot_value: "embedding_395.tmp_0"
slot_value: "embedding_396.tmp_0"
slot_value: "embedding_397.tmp_0"
slot_value: "embedding_398.tmp_0"
slot_value: "embedding_399.tmp_0"
slot_value: "embedding_400.tmp_0"
slot_value: "embedding_401.tmp_0"
slot_value: "embedding_402.tmp_0"
slot_value: "embedding_403.tmp_0"
slot_value: "embedding_404.tmp_0"
slot_value: "embedding_405.tmp_0"
slot_value: "embedding_406.tmp_0"
slot_value: "embedding_407.tmp_0"
slot_gradient: "embedding_0.tmp_0@GRAD"
slot_gradient: "embedding_1.tmp_0@GRAD"
slot_gradient: "embedding_2.tmp_0@GRAD"
slot_gradient: "embedding_3.tmp_0@GRAD"
slot_gradient: "embedding_4.tmp_0@GRAD"
slot_gradient: "embedding_5.tmp_0@GRAD"
slot_gradient: "embedding_6.tmp_0@GRAD"
slot_gradient: "embedding_7.tmp_0@GRAD"
slot_gradient: "embedding_8.tmp_0@GRAD"
slot_gradient: "embedding_9.tmp_0@GRAD"
slot_gradient: "embedding_10.tmp_0@GRAD"
slot_gradient: "embedding_11.tmp_0@GRAD"
slot_gradient: "embedding_12.tmp_0@GRAD"
slot_gradient: "embedding_13.tmp_0@GRAD"
slot_gradient: "embedding_14.tmp_0@GRAD"
slot_gradient: "embedding_15.tmp_0@GRAD"
slot_gradient: "embedding_16.tmp_0@GRAD"
slot_gradient: "embedding_17.tmp_0@GRAD"
slot_gradient: "embedding_18.tmp_0@GRAD"
slot_gradient: "embedding_19.tmp_0@GRAD"
slot_gradient: "embedding_20.tmp_0@GRAD"
slot_gradient: "embedding_21.tmp_0@GRAD"
slot_gradient: "embedding_22.tmp_0@GRAD"
slot_gradient: "embedding_23.tmp_0@GRAD"
slot_gradient: "embedding_24.tmp_0@GRAD"
slot_gradient: "embedding_25.tmp_0@GRAD"
slot_gradient: "embedding_26.tmp_0@GRAD"
slot_gradient: "embedding_27.tmp_0@GRAD"
slot_gradient: "embedding_28.tmp_0@GRAD"
slot_gradient: "embedding_29.tmp_0@GRAD"
slot_gradient: "embedding_30.tmp_0@GRAD"
slot_gradient: "embedding_31.tmp_0@GRAD"
slot_gradient: "embedding_32.tmp_0@GRAD"
slot_gradient: "embedding_33.tmp_0@GRAD"
slot_gradient: "embedding_34.tmp_0@GRAD"
slot_gradient: "embedding_35.tmp_0@GRAD"
slot_gradient: "embedding_36.tmp_0@GRAD"
slot_gradient: "embedding_37.tmp_0@GRAD"
slot_gradient: "embedding_38.tmp_0@GRAD"
slot_gradient: "embedding_39.tmp_0@GRAD"
slot_gradient: "embedding_40.tmp_0@GRAD"
slot_gradient: "embedding_41.tmp_0@GRAD"
slot_gradient: "embedding_42.tmp_0@GRAD"
slot_gradient: "embedding_43.tmp_0@GRAD"
slot_gradient: "embedding_44.tmp_0@GRAD"
slot_gradient: "embedding_45.tmp_0@GRAD"
slot_gradient: "embedding_46.tmp_0@GRAD"
slot_gradient: "embedding_47.tmp_0@GRAD"
slot_gradient: "embedding_48.tmp_0@GRAD"
slot_gradient: "embedding_49.tmp_0@GRAD"
slot_gradient: "embedding_50.tmp_0@GRAD"
slot_gradient: "embedding_51.tmp_0@GRAD"
slot_gradient: "embedding_52.tmp_0@GRAD"
slot_gradient: "embedding_53.tmp_0@GRAD"
slot_gradient: "embedding_54.tmp_0@GRAD"
slot_gradient: "embedding_55.tmp_0@GRAD"
slot_gradient: "embedding_56.tmp_0@GRAD"
slot_gradient: "embedding_57.tmp_0@GRAD"
slot_gradient: "embedding_58.tmp_0@GRAD"
slot_gradient: "embedding_59.tmp_0@GRAD"
slot_gradient: "embedding_60.tmp_0@GRAD"
slot_gradient: "embedding_61.tmp_0@GRAD"
slot_gradient: "embedding_62.tmp_0@GRAD"
slot_gradient: "embedding_63.tmp_0@GRAD"
slot_gradient: "embedding_64.tmp_0@GRAD"
slot_gradient: "embedding_65.tmp_0@GRAD"
slot_gradient: "embedding_66.tmp_0@GRAD"
slot_gradient: "embedding_67.tmp_0@GRAD"
slot_gradient: "embedding_68.tmp_0@GRAD"
slot_gradient: "embedding_69.tmp_0@GRAD"
slot_gradient: "embedding_70.tmp_0@GRAD"
slot_gradient: "embedding_71.tmp_0@GRAD"
slot_gradient: "embedding_72.tmp_0@GRAD"
slot_gradient: "embedding_73.tmp_0@GRAD"
slot_gradient: "embedding_74.tmp_0@GRAD"
slot_gradient: "embedding_75.tmp_0@GRAD"
slot_gradient: "embedding_76.tmp_0@GRAD"
slot_gradient: "embedding_77.tmp_0@GRAD"
slot_gradient: "embedding_78.tmp_0@GRAD"
slot_gradient: "embedding_79.tmp_0@GRAD"
slot_gradient: "embedding_80.tmp_0@GRAD"
slot_gradient: "embedding_81.tmp_0@GRAD"
slot_gradient: "embedding_82.tmp_0@GRAD"
slot_gradient: "embedding_83.tmp_0@GRAD"
slot_gradient: "embedding_84.tmp_0@GRAD"
slot_gradient: "embedding_85.tmp_0@GRAD"
slot_gradient: "embedding_86.tmp_0@GRAD"
slot_gradient: "embedding_87.tmp_0@GRAD"
slot_gradient: "embedding_88.tmp_0@GRAD"
slot_gradient: "embedding_89.tmp_0@GRAD"
slot_gradient: "embedding_90.tmp_0@GRAD"
slot_gradient: "embedding_91.tmp_0@GRAD"
slot_gradient: "embedding_92.tmp_0@GRAD"
slot_gradient: "embedding_93.tmp_0@GRAD"
slot_gradient: "embedding_94.tmp_0@GRAD"
slot_gradient: "embedding_95.tmp_0@GRAD"
slot_gradient: "embedding_96.tmp_0@GRAD"
slot_gradient: "embedding_97.tmp_0@GRAD"
slot_gradient: "embedding_98.tmp_0@GRAD"
slot_gradient: "embedding_99.tmp_0@GRAD"
slot_gradient: "embedding_100.tmp_0@GRAD"
slot_gradient: "embedding_101.tmp_0@GRAD"
slot_gradient: "embedding_102.tmp_0@GRAD"
slot_gradient: "embedding_103.tmp_0@GRAD"
slot_gradient: "embedding_104.tmp_0@GRAD"
slot_gradient: "embedding_105.tmp_0@GRAD"
slot_gradient: "embedding_106.tmp_0@GRAD"
slot_gradient: "embedding_107.tmp_0@GRAD"
slot_gradient: "embedding_108.tmp_0@GRAD"
slot_gradient: "embedding_109.tmp_0@GRAD"
slot_gradient: "embedding_110.tmp_0@GRAD"
slot_gradient: "embedding_111.tmp_0@GRAD"
slot_gradient: "embedding_112.tmp_0@GRAD"
slot_gradient: "embedding_113.tmp_0@GRAD"
slot_gradient: "embedding_114.tmp_0@GRAD"
slot_gradient: "embedding_115.tmp_0@GRAD"
slot_gradient: "embedding_116.tmp_0@GRAD"
slot_gradient: "embedding_117.tmp_0@GRAD"
slot_gradient: "embedding_118.tmp_0@GRAD"
slot_gradient: "embedding_119.tmp_0@GRAD"
slot_gradient: "embedding_120.tmp_0@GRAD"
slot_gradient: "embedding_121.tmp_0@GRAD"
slot_gradient: "embedding_122.tmp_0@GRAD"
slot_gradient: "embedding_123.tmp_0@GRAD"
slot_gradient: "embedding_124.tmp_0@GRAD"
slot_gradient: "embedding_125.tmp_0@GRAD"
slot_gradient: "embedding_126.tmp_0@GRAD"
slot_gradient: "embedding_127.tmp_0@GRAD"
slot_gradient: "embedding_128.tmp_0@GRAD"
slot_gradient: "embedding_129.tmp_0@GRAD"
slot_gradient: "embedding_130.tmp_0@GRAD"
slot_gradient: "embedding_131.tmp_0@GRAD"
slot_gradient: "embedding_132.tmp_0@GRAD"
slot_gradient: "embedding_133.tmp_0@GRAD"
slot_gradient: "embedding_134.tmp_0@GRAD"
slot_gradient: "embedding_135.tmp_0@GRAD"
slot_gradient: "embedding_136.tmp_0@GRAD"
slot_gradient: "embedding_137.tmp_0@GRAD"
slot_gradient: "embedding_138.tmp_0@GRAD"
slot_gradient: "embedding_139.tmp_0@GRAD"
slot_gradient: "embedding_140.tmp_0@GRAD"
slot_gradient: "embedding_141.tmp_0@GRAD"
slot_gradient: "embedding_142.tmp_0@GRAD"
slot_gradient: "embedding_143.tmp_0@GRAD"
slot_gradient: "embedding_144.tmp_0@GRAD"
slot_gradient: "embedding_145.tmp_0@GRAD"
slot_gradient: "embedding_146.tmp_0@GRAD"
slot_gradient: "embedding_147.tmp_0@GRAD"
slot_gradient: "embedding_148.tmp_0@GRAD"
slot_gradient: "embedding_149.tmp_0@GRAD"
slot_gradient: "embedding_150.tmp_0@GRAD"
slot_gradient: "embedding_151.tmp_0@GRAD"
slot_gradient: "embedding_152.tmp_0@GRAD"
slot_gradient: "embedding_153.tmp_0@GRAD"
slot_gradient: "embedding_154.tmp_0@GRAD"
slot_gradient: "embedding_155.tmp_0@GRAD"
slot_gradient: "embedding_156.tmp_0@GRAD"
slot_gradient: "embedding_157.tmp_0@GRAD"
slot_gradient: "embedding_158.tmp_0@GRAD"
slot_gradient: "embedding_159.tmp_0@GRAD"
slot_gradient: "embedding_160.tmp_0@GRAD"
slot_gradient: "embedding_161.tmp_0@GRAD"
slot_gradient: "embedding_162.tmp_0@GRAD"
slot_gradient: "embedding_163.tmp_0@GRAD"
slot_gradient: "embedding_164.tmp_0@GRAD"
slot_gradient: "embedding_165.tmp_0@GRAD"
slot_gradient: "embedding_166.tmp_0@GRAD"
slot_gradient: "embedding_167.tmp_0@GRAD"
slot_gradient: "embedding_168.tmp_0@GRAD"
slot_gradient: "embedding_169.tmp_0@GRAD"
slot_gradient: "embedding_170.tmp_0@GRAD"
slot_gradient: "embedding_171.tmp_0@GRAD"
slot_gradient: "embedding_172.tmp_0@GRAD"
slot_gradient: "embedding_173.tmp_0@GRAD"
slot_gradient: "embedding_174.tmp_0@GRAD"
slot_gradient: "embedding_175.tmp_0@GRAD"
slot_gradient: "embedding_176.tmp_0@GRAD"
slot_gradient: "embedding_177.tmp_0@GRAD"
slot_gradient: "embedding_178.tmp_0@GRAD"
slot_gradient: "embedding_179.tmp_0@GRAD"
slot_gradient: "embedding_180.tmp_0@GRAD"
slot_gradient: "embedding_181.tmp_0@GRAD"
slot_gradient: "embedding_182.tmp_0@GRAD"
slot_gradient: "embedding_183.tmp_0@GRAD"
slot_gradient: "embedding_184.tmp_0@GRAD"
slot_gradient: "embedding_185.tmp_0@GRAD"
slot_gradient: "embedding_186.tmp_0@GRAD"
slot_gradient: "embedding_187.tmp_0@GRAD"
slot_gradient: "embedding_188.tmp_0@GRAD"
slot_gradient: "embedding_189.tmp_0@GRAD"
slot_gradient: "embedding_190.tmp_0@GRAD"
slot_gradient: "embedding_191.tmp_0@GRAD"
slot_gradient: "embedding_192.tmp_0@GRAD"
slot_gradient: "embedding_193.tmp_0@GRAD"
slot_gradient: "embedding_194.tmp_0@GRAD"
slot_gradient: "embedding_195.tmp_0@GRAD"
slot_gradient: "embedding_196.tmp_0@GRAD"
slot_gradient: "embedding_197.tmp_0@GRAD"
slot_gradient: "embedding_198.tmp_0@GRAD"
slot_gradient: "embedding_199.tmp_0@GRAD"
slot_gradient: "embedding_200.tmp_0@GRAD"
slot_gradient: "embedding_201.tmp_0@GRAD"
slot_gradient: "embedding_202.tmp_0@GRAD"
slot_gradient: "embedding_203.tmp_0@GRAD"
slot_gradient: "embedding_204.tmp_0@GRAD"
slot_gradient: "embedding_205.tmp_0@GRAD"
slot_gradient: "embedding_206.tmp_0@GRAD"
slot_gradient: "embedding_207.tmp_0@GRAD"
slot_gradient: "embedding_208.tmp_0@GRAD"
slot_gradient: "embedding_209.tmp_0@GRAD"
slot_gradient: "embedding_210.tmp_0@GRAD"
slot_gradient: "embedding_211.tmp_0@GRAD"
slot_gradient: "embedding_212.tmp_0@GRAD"
slot_gradient: "embedding_213.tmp_0@GRAD"
slot_gradient: "embedding_214.tmp_0@GRAD"
slot_gradient: "embedding_215.tmp_0@GRAD"
slot_gradient: "embedding_216.tmp_0@GRAD"
slot_gradient: "embedding_217.tmp_0@GRAD"
slot_gradient: "embedding_218.tmp_0@GRAD"
slot_gradient: "embedding_219.tmp_0@GRAD"
slot_gradient: "embedding_220.tmp_0@GRAD"
slot_gradient: "embedding_221.tmp_0@GRAD"
slot_gradient: "embedding_222.tmp_0@GRAD"
slot_gradient: "embedding_223.tmp_0@GRAD"
slot_gradient: "embedding_224.tmp_0@GRAD"
slot_gradient: "embedding_225.tmp_0@GRAD"
slot_gradient: "embedding_226.tmp_0@GRAD"
slot_gradient: "embedding_227.tmp_0@GRAD"
slot_gradient: "embedding_228.tmp_0@GRAD"
slot_gradient: "embedding_229.tmp_0@GRAD"
slot_gradient: "embedding_230.tmp_0@GRAD"
slot_gradient: "embedding_231.tmp_0@GRAD"
slot_gradient: "embedding_232.tmp_0@GRAD"
slot_gradient: "embedding_233.tmp_0@GRAD"
slot_gradient: "embedding_234.tmp_0@GRAD"
slot_gradient: "embedding_235.tmp_0@GRAD"
slot_gradient: "embedding_236.tmp_0@GRAD"
slot_gradient: "embedding_237.tmp_0@GRAD"
slot_gradient: "embedding_238.tmp_0@GRAD"
slot_gradient: "embedding_239.tmp_0@GRAD"
slot_gradient: "embedding_240.tmp_0@GRAD"
slot_gradient: "embedding_241.tmp_0@GRAD"
slot_gradient: "embedding_242.tmp_0@GRAD"
slot_gradient: "embedding_243.tmp_0@GRAD"
slot_gradient: "embedding_244.tmp_0@GRAD"
slot_gradient: "embedding_245.tmp_0@GRAD"
slot_gradient: "embedding_246.tmp_0@GRAD"
slot_gradient: "embedding_247.tmp_0@GRAD"
slot_gradient: "embedding_248.tmp_0@GRAD"
slot_gradient: "embedding_249.tmp_0@GRAD"
slot_gradient: "embedding_250.tmp_0@GRAD"
slot_gradient: "embedding_251.tmp_0@GRAD"
slot_gradient: "embedding_252.tmp_0@GRAD"
slot_gradient: "embedding_253.tmp_0@GRAD"
slot_gradient: "embedding_254.tmp_0@GRAD"
slot_gradient: "embedding_255.tmp_0@GRAD"
slot_gradient: "embedding_256.tmp_0@GRAD"
slot_gradient: "embedding_257.tmp_0@GRAD"
slot_gradient: "embedding_258.tmp_0@GRAD"
slot_gradient: "embedding_259.tmp_0@GRAD"
slot_gradient: "embedding_260.tmp_0@GRAD"
slot_gradient: "embedding_261.tmp_0@GRAD"
slot_gradient: "embedding_262.tmp_0@GRAD"
slot_gradient: "embedding_263.tmp_0@GRAD"
slot_gradient: "embedding_264.tmp_0@GRAD"
slot_gradient: "embedding_265.tmp_0@GRAD"
slot_gradient: "embedding_266.tmp_0@GRAD"
slot_gradient: "embedding_267.tmp_0@GRAD"
slot_gradient: "embedding_268.tmp_0@GRAD"
slot_gradient: "embedding_269.tmp_0@GRAD"
slot_gradient: "embedding_270.tmp_0@GRAD"
slot_gradient: "embedding_271.tmp_0@GRAD"
slot_gradient: "embedding_272.tmp_0@GRAD"
slot_gradient: "embedding_273.tmp_0@GRAD"
slot_gradient: "embedding_274.tmp_0@GRAD"
slot_gradient: "embedding_275.tmp_0@GRAD"
slot_gradient: "embedding_276.tmp_0@GRAD"
slot_gradient: "embedding_277.tmp_0@GRAD"
slot_gradient: "embedding_278.tmp_0@GRAD"
slot_gradient: "embedding_279.tmp_0@GRAD"
slot_gradient: "embedding_280.tmp_0@GRAD"
slot_gradient: "embedding_281.tmp_0@GRAD"
slot_gradient: "embedding_282.tmp_0@GRAD"
slot_gradient: "embedding_283.tmp_0@GRAD"
slot_gradient: "embedding_284.tmp_0@GRAD"
slot_gradient: "embedding_285.tmp_0@GRAD"
slot_gradient: "embedding_286.tmp_0@GRAD"
slot_gradient: "embedding_287.tmp_0@GRAD"
slot_gradient: "embedding_288.tmp_0@GRAD"
slot_gradient: "embedding_289.tmp_0@GRAD"
slot_gradient: "embedding_290.tmp_0@GRAD"
slot_gradient: "embedding_291.tmp_0@GRAD"
slot_gradient: "embedding_292.tmp_0@GRAD"
slot_gradient: "embedding_293.tmp_0@GRAD"
slot_gradient: "embedding_294.tmp_0@GRAD"
slot_gradient: "embedding_295.tmp_0@GRAD"
slot_gradient: "embedding_296.tmp_0@GRAD"
slot_gradient: "embedding_297.tmp_0@GRAD"
slot_gradient: "embedding_298.tmp_0@GRAD"
slot_gradient: "embedding_299.tmp_0@GRAD"
slot_gradient: "embedding_300.tmp_0@GRAD"
slot_gradient: "embedding_301.tmp_0@GRAD"
slot_gradient: "embedding_302.tmp_0@GRAD"
slot_gradient: "embedding_303.tmp_0@GRAD"
slot_gradient: "embedding_304.tmp_0@GRAD"
slot_gradient: "embedding_305.tmp_0@GRAD"
slot_gradient: "embedding_306.tmp_0@GRAD"
slot_gradient: "embedding_307.tmp_0@GRAD"
slot_gradient: "embedding_308.tmp_0@GRAD"
slot_gradient: "embedding_309.tmp_0@GRAD"
slot_gradient: "embedding_310.tmp_0@GRAD"
slot_gradient: "embedding_311.tmp_0@GRAD"
slot_gradient: "embedding_312.tmp_0@GRAD"
slot_gradient: "embedding_313.tmp_0@GRAD"
slot_gradient: "embedding_314.tmp_0@GRAD"
slot_gradient: "embedding_315.tmp_0@GRAD"
slot_gradient: "embedding_316.tmp_0@GRAD"
slot_gradient: "embedding_317.tmp_0@GRAD"
slot_gradient: "embedding_318.tmp_0@GRAD"
slot_gradient: "embedding_319.tmp_0@GRAD"
slot_gradient: "embedding_320.tmp_0@GRAD"
slot_gradient: "embedding_321.tmp_0@GRAD"
slot_gradient: "embedding_322.tmp_0@GRAD"
slot_gradient: "embedding_323.tmp_0@GRAD"
slot_gradient: "embedding_324.tmp_0@GRAD"
slot_gradient: "embedding_325.tmp_0@GRAD"
slot_gradient: "embedding_326.tmp_0@GRAD"
slot_gradient: "embedding_327.tmp_0@GRAD"
slot_gradient: "embedding_328.tmp_0@GRAD"
slot_gradient: "embedding_329.tmp_0@GRAD"
slot_gradient: "embedding_330.tmp_0@GRAD"
slot_gradient: "embedding_331.tmp_0@GRAD"
slot_gradient: "embedding_332.tmp_0@GRAD"
slot_gradient: "embedding_333.tmp_0@GRAD"
slot_gradient: "embedding_334.tmp_0@GRAD"
slot_gradient: "embedding_335.tmp_0@GRAD"
slot_gradient: "embedding_336.tmp_0@GRAD"
slot_gradient: "embedding_337.tmp_0@GRAD"
slot_gradient: "embedding_338.tmp_0@GRAD"
slot_gradient: "embedding_339.tmp_0@GRAD"
slot_gradient: "embedding_340.tmp_0@GRAD"
slot_gradient: "embedding_341.tmp_0@GRAD"
slot_gradient: "embedding_342.tmp_0@GRAD"
slot_gradient: "embedding_343.tmp_0@GRAD"
slot_gradient: "embedding_344.tmp_0@GRAD"
slot_gradient: "embedding_345.tmp_0@GRAD"
slot_gradient: "embedding_346.tmp_0@GRAD"
slot_gradient: "embedding_347.tmp_0@GRAD"
slot_gradient: "embedding_348.tmp_0@GRAD"
slot_gradient: "embedding_349.tmp_0@GRAD"
slot_gradient: "embedding_350.tmp_0@GRAD"
slot_gradient: "embedding_351.tmp_0@GRAD"
slot_gradient: "embedding_352.tmp_0@GRAD"
slot_gradient: "embedding_353.tmp_0@GRAD"
slot_gradient: "embedding_354.tmp_0@GRAD"
slot_gradient: "embedding_355.tmp_0@GRAD"
slot_gradient: "embedding_356.tmp_0@GRAD"
slot_gradient: "embedding_357.tmp_0@GRAD"
slot_gradient: "embedding_358.tmp_0@GRAD"
slot_gradient: "embedding_359.tmp_0@GRAD"
slot_gradient: "embedding_360.tmp_0@GRAD"
slot_gradient: "embedding_361.tmp_0@GRAD"
slot_gradient: "embedding_362.tmp_0@GRAD"
slot_gradient: "embedding_363.tmp_0@GRAD"
slot_gradient: "embedding_364.tmp_0@GRAD"
slot_gradient: "embedding_365.tmp_0@GRAD"
slot_gradient: "embedding_366.tmp_0@GRAD"
slot_gradient: "embedding_367.tmp_0@GRAD"
slot_gradient: "embedding_368.tmp_0@GRAD"
slot_gradient: "embedding_369.tmp_0@GRAD"
slot_gradient: "embedding_370.tmp_0@GRAD"
slot_gradient: "embedding_371.tmp_0@GRAD"
slot_gradient: "embedding_372.tmp_0@GRAD"
slot_gradient: "embedding_373.tmp_0@GRAD"
slot_gradient: "embedding_374.tmp_0@GRAD"
slot_gradient: "embedding_375.tmp_0@GRAD"
slot_gradient: "embedding_376.tmp_0@GRAD"
slot_gradient: "embedding_377.tmp_0@GRAD"
slot_gradient: "embedding_378.tmp_0@GRAD"
slot_gradient: "embedding_379.tmp_0@GRAD"
slot_gradient: "embedding_380.tmp_0@GRAD"
slot_gradient: "embedding_381.tmp_0@GRAD"
slot_gradient: "embedding_382.tmp_0@GRAD"
slot_gradient: "embedding_383.tmp_0@GRAD"
slot_gradient: "embedding_384.tmp_0@GRAD"
slot_gradient: "embedding_385.tmp_0@GRAD"
slot_gradient: "embedding_386.tmp_0@GRAD"
slot_gradient: "embedding_387.tmp_0@GRAD"
slot_gradient: "embedding_388.tmp_0@GRAD"
slot_gradient: "embedding_389.tmp_0@GRAD"
slot_gradient: "embedding_390.tmp_0@GRAD"
slot_gradient: "embedding_391.tmp_0@GRAD"
slot_gradient: "embedding_392.tmp_0@GRAD"
slot_gradient: "embedding_393.tmp_0@GRAD"
slot_gradient: "embedding_394.tmp_0@GRAD"
slot_gradient: "embedding_395.tmp_0@GRAD"
slot_gradient: "embedding_396.tmp_0@GRAD"
slot_gradient: "embedding_397.tmp_0@GRAD"
slot_gradient: "embedding_398.tmp_0@GRAD"
slot_gradient: "embedding_399.tmp_0@GRAD"
slot_gradient: "embedding_400.tmp_0@GRAD"
slot_gradient: "embedding_401.tmp_0@GRAD"
slot_gradient: "embedding_402.tmp_0@GRAD"
slot_gradient: "embedding_403.tmp_0@GRAD"
slot_gradient: "embedding_404.tmp_0@GRAD"
slot_gradient: "embedding_405.tmp_0@GRAD"
slot_gradient: "embedding_406.tmp_0@GRAD"
slot_gradient: "embedding_407.tmp_0@GRAD"
}
skip_op: "lookup_table"
skip_op: "lookup_table_grad"
}
fs_client_param {
uri: "afs://xingtian.afs.baidu.com:9902"
user: "mlarch"
passwd: "Fv1M87"
hadoop_bin: "$HADOOP_HOME/bin/hadoop"
}
feed_deploy/news_jingpai/package/my_nets/model_new.py
0 → 100644
浏览文件 @
73334d88
import
paddle.fluid
as
fluid
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
class
Model
(
object
):
def
__init__
(
self
,
slot_file_name
,
all_slot_file
,
use_cvm
,
ins_tag
,
is_update_model
):
self
.
_slot_file_name
=
slot_file_name
self
.
_use_cvm
=
use_cvm
self
.
_dict_dim
=
10
# it's fake
self
.
_emb_dim
=
9
+
2
self
.
_init_range
=
0.2
self
.
_all_slot_file
=
all_slot_file
self
.
_not_use_slots
=
[]
self
.
_not_use_slotemb
=
[]
self
.
_all_slots
=
[]
self
.
_ins_tag_value
=
ins_tag
self
.
_is_update_model
=
is_update_model
self
.
_train_program
=
fluid
.
Program
()
self
.
_startup_program
=
fluid
.
Program
()
self
.
save_vars
=
[]
with
fluid
.
program_guard
(
self
.
_train_program
,
self
.
_startup_program
):
with
fluid
.
unique_name
.
guard
():
self
.
show
=
fluid
.
layers
.
data
(
name
=
"show"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
)
self
.
label
=
fluid
.
layers
.
data
(
name
=
"click"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
)
self
.
ins_weight
=
fluid
.
layers
.
data
(
name
=
"12345"
,
shape
=
[
-
1
,
1
],
dtype
=
"float32"
,
lod_level
=
0
,
append_batch_size
=
False
,
stop_gradient
=
True
)
self
.
ins_tag
=
fluid
.
layers
.
data
(
name
=
"23456"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
,
stop_gradient
=
True
)
self
.
slots
=
[]
self
.
slots_name
=
[]
self
.
embs
=
[]
if
self
.
_ins_tag_value
!=
0
:
self
.
x3_ts
=
fluid
.
layers
.
create_global_var
(
shape
=
[
1
,
1
],
value
=
self
.
_ins_tag_value
,
dtype
=
'int64'
,
persistable
=
True
,
force_cpu
=
True
,
name
=
'X3'
)
self
.
x3_ts
.
stop_gradient
=
True
self
.
label_after_filter
,
self
.
filter_loss
=
fluid
.
layers
.
filter_by_instag
(
self
.
label
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
label_after_filter
.
stop_gradient
=
True
self
.
show_after_filter
,
_
=
fluid
.
layers
.
filter_by_instag
(
self
.
show
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
show_after_filter
.
stop_gradient
=
True
self
.
ins_weight_after_filter
,
_
=
fluid
.
layers
.
filter_by_instag
(
self
.
ins_weight
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
ins_weight_after_filter
.
stop_gradient
=
True
for
line
in
open
(
self
.
_slot_file_name
,
'r'
):
slot
=
line
.
strip
()
self
.
slots_name
.
append
(
slot
)
self
.
all_slots_name
=
[]
for
line
in
open
(
self
.
_all_slot_file
,
'r'
):
self
.
all_slots_name
.
append
(
line
.
strip
())
for
i
in
self
.
all_slots_name
:
if
i
==
self
.
ins_weight
.
name
or
i
==
self
.
ins_tag
.
name
:
pass
elif
i
not
in
self
.
slots_name
:
pass
else
:
l
=
fluid
.
layers
.
data
(
name
=
i
,
shape
=
[
1
],
dtype
=
"int64"
,
lod_level
=
1
)
emb
=
fluid
.
layers
.
embedding
(
input
=
l
,
size
=
[
self
.
_dict_dim
,
self
.
_emb_dim
],
is_sparse
=
True
,
is_distributed
=
True
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"embedding"
))
self
.
slots
.
append
(
l
)
self
.
embs
.
append
(
emb
)
if
self
.
_ins_tag_value
!=
0
:
self
.
emb
=
self
.
slot_net
(
self
.
slots
,
self
.
label_after_filter
)
else
:
self
.
emb
=
self
.
slot_net
(
self
.
slots
,
self
.
label
)
self
.
similarity_norm
=
fluid
.
layers
.
sigmoid
(
fluid
.
layers
.
clip
(
self
.
emb
,
min
=-
15.0
,
max
=
15.0
),
name
=
"similarity_norm"
)
if
self
.
_ins_tag_value
!=
0
:
self
.
cost
=
fluid
.
layers
.
log_loss
(
input
=
self
.
similarity_norm
,
label
=
fluid
.
layers
.
cast
(
x
=
self
.
label_after_filter
,
dtype
=
'float32'
))
else
:
self
.
cost
=
fluid
.
layers
.
log_loss
(
input
=
self
.
similarity_norm
,
label
=
fluid
.
layers
.
cast
(
x
=
self
.
label
,
dtype
=
'float32'
))
if
self
.
_ins_tag_value
!=
0
:
self
.
cost
=
fluid
.
layers
.
elementwise_mul
(
self
.
cost
,
self
.
ins_weight_after_filter
)
else
:
self
.
cost
=
fluid
.
layers
.
elementwise_mul
(
self
.
cost
,
self
.
ins_weight
)
if
self
.
_ins_tag_value
!=
0
:
self
.
cost
=
fluid
.
layers
.
elementwise_mul
(
self
.
cost
,
self
.
filter_loss
)
self
.
avg_cost
=
fluid
.
layers
.
mean
(
x
=
self
.
cost
)
binary_predict
=
fluid
.
layers
.
concat
(
input
=
[
fluid
.
layers
.
elementwise_sub
(
fluid
.
layers
.
ceil
(
self
.
similarity_norm
),
self
.
similarity_norm
),
self
.
similarity_norm
],
axis
=
1
)
if
self
.
_ins_tag_value
!=
0
:
self
.
auc
,
batch_auc
,
[
self
.
batch_stat_pos
,
self
.
batch_stat_neg
,
self
.
stat_pos
,
self
.
stat_neg
]
=
\
fluid
.
layers
.
auc
(
input
=
binary_predict
,
label
=
self
.
label_after_filter
,
curve
=
'ROC'
,
num_thresholds
=
4096
)
self
.
sqrerr
,
self
.
abserr
,
self
.
prob
,
self
.
q
,
self
.
pos
,
self
.
total
=
\
fluid
.
contrib
.
layers
.
ctr_metric_bundle
(
self
.
similarity_norm
,
fluid
.
layers
.
cast
(
x
=
self
.
label_after_filter
,
dtype
=
'float32'
))
#self.precise_ins_num = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1])
#batch_ins_num = fluid.layers.reduce_sum(self.filter_loss)
#self.precise_ins_num = fluid.layers.elementwise_add(batch_ins_num, self.precise_ins_num)
else
:
self
.
auc
,
batch_auc
,
[
self
.
batch_stat_pos
,
self
.
batch_stat_neg
,
self
.
stat_pos
,
self
.
stat_neg
]
=
\
fluid
.
layers
.
auc
(
input
=
binary_predict
,
label
=
self
.
label
,
curve
=
'ROC'
,
num_thresholds
=
4096
)
self
.
sqrerr
,
self
.
abserr
,
self
.
prob
,
self
.
q
,
self
.
pos
,
self
.
total
=
\
fluid
.
contrib
.
layers
.
ctr_metric_bundle
(
self
.
similarity_norm
,
fluid
.
layers
.
cast
(
x
=
self
.
label
,
dtype
=
'float32'
))
self
.
tmp_train_program
=
fluid
.
Program
()
self
.
tmp_startup_program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
self
.
tmp_train_program
,
self
.
tmp_startup_program
):
with
fluid
.
unique_name
.
guard
():
self
.
_all_slots
=
[
self
.
show
,
self
.
label
]
self
.
_merge_slots
=
[]
for
i
in
self
.
all_slots_name
:
if
i
==
self
.
ins_weight
.
name
:
self
.
_all_slots
.
append
(
self
.
ins_weight
)
elif
i
==
self
.
ins_tag
.
name
:
self
.
_all_slots
.
append
(
self
.
ins_tag
)
else
:
l
=
fluid
.
layers
.
data
(
name
=
i
,
shape
=
[
1
],
dtype
=
"int64"
,
lod_level
=
1
)
self
.
_all_slots
.
append
(
l
)
self
.
_merge_slots
.
append
(
l
)
def
slot_net
(
self
,
slots
,
label
,
lr_x
=
1.0
):
input_data
=
[]
cvms
=
[]
cast_label
=
fluid
.
layers
.
cast
(
label
,
dtype
=
'float32'
)
cast_label
.
stop_gradient
=
True
ones
=
fluid
.
layers
.
fill_constant_batch_size_like
(
input
=
label
,
shape
=
[
-
1
,
1
],
dtype
=
"float32"
,
value
=
1
)
show_clk
=
fluid
.
layers
.
cast
(
fluid
.
layers
.
concat
([
ones
,
cast_label
],
axis
=
1
),
dtype
=
'float32'
)
show_clk
.
stop_gradient
=
True
for
index
in
range
(
len
(
slots
)):
input_data
.
append
(
slots
[
index
])
emb
=
self
.
embs
[
index
]
bow
=
fluid
.
layers
.
sequence_pool
(
input
=
emb
,
pool_type
=
'sum'
)
cvm
=
fluid
.
layers
.
continuous_value_model
(
bow
,
show_clk
,
self
.
_use_cvm
)
cvms
.
append
(
cvm
)
concat
=
None
if
self
.
_ins_tag_value
!=
0
:
concat
=
fluid
.
layers
.
concat
(
cvms
,
axis
=
1
)
concat
,
_
=
fluid
.
layers
.
filter_by_instag
(
concat
,
self
.
ins_tag
,
self
.
x3_ts
,
False
)
else
:
concat
=
fluid
.
layers
.
concat
(
cvms
,
axis
=
1
)
bn
=
concat
if
self
.
_use_cvm
:
bn
=
fluid
.
layers
.
data_norm
(
input
=
concat
,
name
=
"bn6048"
,
epsilon
=
1e-4
,
param_attr
=
{
"batch_size"
:
1e4
,
"batch_sum_default"
:
0.0
,
"batch_square"
:
1e4
})
self
.
save_vars
.
append
(
bn
)
fc_layers_input
=
[
bn
]
if
self
.
_is_update_model
:
fc_layers_size
=
[
511
,
255
,
127
,
127
,
127
,
1
]
else
:
fc_layers_size
=
[
511
,
255
,
255
,
127
,
127
,
127
,
127
,
1
]
fc_layers_act
=
[
"relu"
]
*
(
len
(
fc_layers_size
)
-
1
)
+
[
None
]
scales_tmp
=
[
bn
.
shape
[
1
]]
+
fc_layers_size
scales
=
[]
for
i
in
range
(
len
(
scales_tmp
)):
scales
.
append
(
self
.
_init_range
/
(
scales_tmp
[
i
]
**
0.5
))
for
i
in
range
(
len
(
fc_layers_size
)):
fc
=
fluid
.
layers
.
fc
(
input
=
fc_layers_input
[
-
1
],
size
=
fc_layers_size
[
i
],
act
=
fc_layers_act
[
i
],
param_attr
=
\
fluid
.
ParamAttr
(
learning_rate
=
lr_x
,
\
initializer
=
fluid
.
initializer
.
NormalInitializer
(
loc
=
0.0
,
scale
=
1.0
*
scales
[
i
])),
bias_attr
=
\
fluid
.
ParamAttr
(
learning_rate
=
lr_x
,
\
initializer
=
fluid
.
initializer
.
NormalInitializer
(
loc
=
0.0
,
scale
=
1.0
*
scales
[
i
])))
fc_layers_input
.
append
(
fc
)
self
.
save_vars
.
append
(
fc
)
return
fc_layers_input
[
-
1
]
feed_deploy/news_jingpai/package/my_nets/model_new_jc.py
0 → 100644
浏览文件 @
73334d88
import
paddle.fluid
as
fluid
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
class
ModelJoinCommon
(
object
):
def
__init__
(
self
,
slot_file_name
,
slot_common_file_name
,
all_slot_file
,
join_ins_tag
):
self
.
slot_file_name
=
slot_file_name
self
.
slot_common_file_name
=
slot_common_file_name
self
.
dict_dim
=
10
# it's fake
self
.
emb_dim
=
9
+
2
self
.
init_range
=
0.2
self
.
all_slot_file
=
all_slot_file
self
.
ins_tag_v
=
join_ins_tag
self
.
_train_program
=
fluid
.
Program
()
self
.
_startup_program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
self
.
_train_program
,
self
.
_startup_program
):
with
fluid
.
unique_name
.
guard
():
self
.
show
=
fluid
.
layers
.
data
(
name
=
"show"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
)
self
.
label
=
fluid
.
layers
.
data
(
name
=
"click"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
)
self
.
ins_weight
=
fluid
.
layers
.
data
(
name
=
"12345"
,
shape
=
[
-
1
,
1
],
dtype
=
"float32"
,
lod_level
=
0
,
append_batch_size
=
False
,
stop_gradient
=
True
)
self
.
ins_tag
=
fluid
.
layers
.
data
(
name
=
"23456"
,
shape
=
[
-
1
,
1
],
dtype
=
"int64"
,
lod_level
=
0
,
append_batch_size
=
False
,
stop_gradient
=
True
)
self
.
x3_ts
=
fluid
.
layers
.
create_global_var
(
shape
=
[
1
,
1
],
value
=
self
.
ins_tag_v
,
dtype
=
'int64'
,
persistable
=
True
,
force_cpu
=
True
,
name
=
'X3'
)
self
.
x3_ts
.
stop_gradient
=
True
self
.
label_after_filter
,
self
.
filter_loss
=
fluid
.
layers
.
filter_by_instag
(
self
.
label
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
label_after_filter
.
stop_gradient
=
True
self
.
show_after_filter
,
_
=
fluid
.
layers
.
filter_by_instag
(
self
.
show
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
show_after_filter
.
stop_gradient
=
True
self
.
ins_weight_after_filter
,
_
=
fluid
.
layers
.
filter_by_instag
(
self
.
ins_weight
,
self
.
ins_tag
,
self
.
x3_ts
,
True
)
self
.
ins_weight_after_filter
.
stop_gradient
=
True
self
.
slots_name
=
[]
for
line
in
open
(
self
.
slot_file_name
,
'r'
):
slot
=
line
.
strip
()
self
.
slots_name
.
append
(
slot
)
self
.
all_slots_name
=
[]
for
line
in
open
(
self
.
all_slot_file
,
'r'
):
self
.
all_slots_name
.
append
(
line
.
strip
())
self
.
slots
=
[]
self
.
embs
=
[]
for
i
in
self
.
all_slots_name
:
if
i
==
self
.
ins_weight
.
name
or
i
==
self
.
ins_tag
.
name
:
pass
elif
i
not
in
self
.
slots_name
:
pass
else
:
l
=
fluid
.
layers
.
data
(
name
=
i
,
shape
=
[
1
],
dtype
=
"int64"
,
lod_level
=
1
)
emb
=
fluid
.
layers
.
embedding
(
input
=
l
,
size
=
[
self
.
dict_dim
,
self
.
emb_dim
],
is_sparse
=
True
,
is_distributed
=
True
,
param_attr
=
fluid
.
ParamAttr
(
name
=
"embedding"
))
self
.
slots
.
append
(
l
)
self
.
embs
.
append
(
emb
)
self
.
common_slot_name
=
[]
for
i
in
open
(
self
.
slot_common_file_name
,
'r'
):
self
.
common_slot_name
.
append
(
i
.
strip
())
cvms
=
[]
cast_label
=
fluid
.
layers
.
cast
(
self
.
label
,
dtype
=
'float32'
)
cast_label
.
stop_gradient
=
True
ones
=
fluid
.
layers
.
fill_constant_batch_size_like
(
input
=
self
.
label
,
shape
=
[
-
1
,
1
],
dtype
=
"float32"
,
value
=
1
)
show_clk
=
fluid
.
layers
.
cast
(
fluid
.
layers
.
concat
([
ones
,
cast_label
],
axis
=
1
),
dtype
=
'float32'
)
show_clk
.
stop_gradient
=
True
for
index
in
range
(
len
(
self
.
embs
)):
emb
=
self
.
embs
[
index
]
emb
.
stop_gradient
=
True
bow
=
fluid
.
layers
.
sequence_pool
(
input
=
emb
,
pool_type
=
'sum'
)
bow
.
stop_gradient
=
True
cvm
=
fluid
.
layers
.
continuous_value_model
(
bow
,
show_clk
,
True
)
cvm
.
stop_gradient
=
True
cvms
.
append
(
cvm
)
concat_join
=
fluid
.
layers
.
concat
(
cvms
,
axis
=
1
)
concat_join
.
stop_gradient
=
True
cvms_common
=
[]
for
index
in
range
(
len
(
self
.
common_slot_name
)):
cvms_common
.
append
(
cvms
[
index
])
concat_common
=
fluid
.
layers
.
concat
(
cvms_common
,
axis
=
1
)
concat_common
.
stop_gradient
=
True
bn_common
=
fluid
.
layers
.
data_norm
(
input
=
concat_common
,
name
=
"common"
,
epsilon
=
1e-4
,
param_attr
=
{
"batch_size"
:
1e4
,
"batch_sum_default"
:
0.0
,
"batch_square"
:
1e4
})
concat_join
,
_
=
fluid
.
layers
.
filter_by_instag
(
concat_join
,
self
.
ins_tag
,
self
.
x3_ts
,
False
)
concat_join
.
stop_gradient
=
True
bn_join
=
fluid
.
layers
.
data_norm
(
input
=
concat_join
,
name
=
"join"
,
epsilon
=
1e-4
,
param_attr
=
{
"batch_size"
:
1e4
,
"batch_sum_default"
:
0.0
,
"batch_square"
:
1e4
})
join_fc
=
self
.
fcs
(
bn_join
,
"join"
)
join_similarity_norm
=
fluid
.
layers
.
sigmoid
(
fluid
.
layers
.
clip
(
join_fc
,
min
=-
15.0
,
max
=
15.0
),
name
=
"join_similarity_norm"
)
join_cost
=
fluid
.
layers
.
log_loss
(
input
=
join_similarity_norm
,
label
=
fluid
.
layers
.
cast
(
x
=
self
.
label_after_filter
,
dtype
=
'float32'
))
join_cost
=
fluid
.
layers
.
elementwise_mul
(
join_cost
,
self
.
ins_weight_after_filter
)
join_cost
=
fluid
.
layers
.
elementwise_mul
(
join_cost
,
self
.
filter_loss
)
join_avg_cost
=
fluid
.
layers
.
mean
(
x
=
join_cost
)
common_fc
=
self
.
fcs
(
bn_common
,
"common"
)
common_similarity_norm
=
fluid
.
layers
.
sigmoid
(
fluid
.
layers
.
clip
(
common_fc
,
min
=-
15.0
,
max
=
15.0
),
name
=
"common_similarity_norm"
)
common_cost
=
fluid
.
layers
.
log_loss
(
input
=
common_similarity_norm
,
label
=
fluid
.
layers
.
cast
(
x
=
self
.
label
,
dtype
=
'float32'
))
common_cost
=
fluid
.
layers
.
elementwise_mul
(
common_cost
,
self
.
ins_weight
)
common_avg_cost
=
fluid
.
layers
.
mean
(
x
=
common_cost
)
self
.
joint_cost
=
join_avg_cost
+
common_avg_cost
join_binary_predict
=
fluid
.
layers
.
concat
(
input
=
[
fluid
.
layers
.
elementwise_sub
(
fluid
.
layers
.
ceil
(
join_similarity_norm
),
join_similarity_norm
),
join_similarity_norm
],
axis
=
1
)
self
.
join_auc
,
batch_auc
,
[
self
.
join_batch_stat_pos
,
self
.
join_batch_stat_neg
,
self
.
join_stat_pos
,
self
.
join_stat_neg
]
=
\
fluid
.
layers
.
auc
(
input
=
join_binary_predict
,
label
=
self
.
label_after_filter
,
curve
=
'ROC'
,
num_thresholds
=
4096
)
self
.
join_sqrerr
,
self
.
join_abserr
,
self
.
join_prob
,
self
.
join_q
,
self
.
join_pos
,
self
.
join_total
=
\
fluid
.
contrib
.
layers
.
ctr_metric_bundle
(
join_similarity_norm
,
fluid
.
layers
.
cast
(
x
=
self
.
label_after_filter
,
dtype
=
'float32'
))
common_binary_predict
=
fluid
.
layers
.
concat
(
input
=
[
fluid
.
layers
.
elementwise_sub
(
fluid
.
layers
.
ceil
(
common_similarity_norm
),
common_similarity_norm
),
common_similarity_norm
],
axis
=
1
)
self
.
common_auc
,
batch_auc
,
[
self
.
common_batch_stat_pos
,
self
.
common_batch_stat_neg
,
self
.
common_stat_pos
,
self
.
common_stat_neg
]
=
\
fluid
.
layers
.
auc
(
input
=
common_binary_predict
,
label
=
self
.
label
,
curve
=
'ROC'
,
num_thresholds
=
4096
)
self
.
common_sqrerr
,
self
.
common_abserr
,
self
.
common_prob
,
self
.
common_q
,
self
.
common_pos
,
self
.
common_total
=
\
fluid
.
contrib
.
layers
.
ctr_metric_bundle
(
common_similarity_norm
,
fluid
.
layers
.
cast
(
x
=
self
.
label
,
dtype
=
'float32'
))
self
.
tmp_train_program
=
fluid
.
Program
()
self
.
tmp_startup_program
=
fluid
.
Program
()
with
fluid
.
program_guard
(
self
.
tmp_train_program
,
self
.
tmp_startup_program
):
with
fluid
.
unique_name
.
guard
():
self
.
_all_slots
=
[
self
.
show
,
self
.
label
]
self
.
_merge_slots
=
[]
for
i
in
self
.
all_slots_name
:
if
i
==
self
.
ins_weight
.
name
:
self
.
_all_slots
.
append
(
self
.
ins_weight
)
elif
i
==
self
.
ins_tag
.
name
:
self
.
_all_slots
.
append
(
self
.
ins_tag
)
else
:
l
=
fluid
.
layers
.
data
(
name
=
i
,
shape
=
[
1
],
dtype
=
"int64"
,
lod_level
=
1
)
self
.
_all_slots
.
append
(
l
)
self
.
_merge_slots
.
append
(
l
)
def
fcs
(
self
,
bn
,
prefix
):
fc_layers_input
=
[
bn
]
fc_layers_size
=
[
511
,
255
,
255
,
127
,
127
,
127
,
127
,
1
]
fc_layers_act
=
[
"relu"
]
*
(
len
(
fc_layers_size
)
-
1
)
+
[
None
]
scales_tmp
=
[
bn
.
shape
[
1
]]
+
fc_layers_size
scales
=
[]
for
i
in
range
(
len
(
scales_tmp
)):
scales
.
append
(
self
.
init_range
/
(
scales_tmp
[
i
]
**
0.5
))
for
i
in
range
(
len
(
fc_layers_size
)):
name
=
prefix
+
"_"
+
str
(
i
)
fc
=
fluid
.
layers
.
fc
(
input
=
fc_layers_input
[
-
1
],
size
=
fc_layers_size
[
i
],
act
=
fc_layers_act
[
i
],
param_attr
=
\
fluid
.
ParamAttr
(
learning_rate
=
1.0
,
\
initializer
=
fluid
.
initializer
.
NormalInitializer
(
loc
=
0.0
,
scale
=
1.0
*
scales
[
i
])),
bias_attr
=
\
fluid
.
ParamAttr
(
learning_rate
=
1.0
,
\
initializer
=
fluid
.
initializer
.
NormalInitializer
(
loc
=
0.0
,
scale
=
1.0
*
scales
[
i
])),
name
=
name
)
fc_layers_input
.
append
(
fc
)
return
fc_layers_input
[
-
1
]
feed_deploy/news_jingpai/package/my_nets/my_data_generator_str.py
0 → 100644
浏览文件 @
73334d88
import
sys
import
os
import
paddle
import
re
import
collections
import
time
#import paddle.fluid.incubate.data_generator as dg
import
data_generate_base
as
dg
class
MyDataset
(
dg
.
MultiSlotDataGenerator
):
def
load_resource
(
self
,
dictf
):
self
.
_all_slots_dict
=
collections
.
OrderedDict
()
with
open
(
dictf
,
'r'
)
as
f
:
slots
=
f
.
readlines
()
for
index
,
slot
in
enumerate
(
slots
):
#self._all_slots_dict[slot.strip()] = [False, index + 3] #+3 #
self
.
_all_slots_dict
[
slot
.
strip
()]
=
[
False
,
index
+
2
]
def
generate_sample
(
self
,
line
):
def
data_iter_str
():
s
=
line
.
split
(
'
\t
'
)[
0
].
split
()
#[1:]
lineid
=
s
[
0
]
elements
=
s
[
1
:]
#line.split('\t')[0].split()[1:]
padding
=
"0"
# output = [("lineid", [lineid]), ("show", [elements[0]]), ("click", [elements[1]])]
output
=
[(
"show"
,
[
elements
[
0
]]),
(
"click"
,
[
elements
[
1
]])]
output
.
extend
([(
slot
,
[])
for
slot
in
self
.
_all_slots_dict
])
for
elem
in
elements
[
2
:]:
if
elem
.
startswith
(
"*"
):
feasign
=
elem
[
1
:]
slot
=
"12345"
elif
elem
.
startswith
(
"$"
):
feasign
=
elem
[
1
:]
if
feasign
==
"D"
:
feasign
=
"0"
slot
=
"23456"
else
:
feasign
,
slot
=
elem
.
split
(
':'
)
#feasign, slot = elem.split(':')
if
not
self
.
_all_slots_dict
.
has_key
(
slot
):
continue
self
.
_all_slots_dict
[
slot
][
0
]
=
True
index
=
self
.
_all_slots_dict
[
slot
][
1
]
output
[
index
][
1
].
append
(
feasign
)
for
slot
in
self
.
_all_slots_dict
:
visit
,
index
=
self
.
_all_slots_dict
[
slot
]
if
visit
:
self
.
_all_slots_dict
[
slot
][
0
]
=
False
else
:
output
[
index
][
1
].
append
(
padding
)
#print output
yield
output
return
data_iter_str
def
data_iter
():
elements
=
line
.
split
(
'
\t
'
)[
0
].
split
()[
1
:]
padding
=
0
output
=
[(
"show"
,
[
int
(
elements
[
0
])]),
(
"click"
,
[
int
(
elements
[
1
])])]
#output += [(slot, []) for slot in self._all_slots_dict]
output
.
extend
([(
slot
,
[])
for
slot
in
self
.
_all_slots_dict
])
for
elem
in
elements
[
2
:]:
feasign
,
slot
=
elem
.
split
(
':'
)
if
slot
==
"12345"
:
feasign
=
float
(
feasign
)
else
:
feasign
=
int
(
feasign
)
if
not
self
.
_all_slots_dict
.
has_key
(
slot
):
continue
self
.
_all_slots_dict
[
slot
][
0
]
=
True
index
=
self
.
_all_slots_dict
[
slot
][
1
]
output
[
index
][
1
].
append
(
feasign
)
for
slot
in
self
.
_all_slots_dict
:
visit
,
index
=
self
.
_all_slots_dict
[
slot
]
if
visit
:
self
.
_all_slots_dict
[
slot
][
0
]
=
False
else
:
output
[
index
][
1
].
append
(
padding
)
yield
output
return
data_iter
if
__name__
==
"__main__"
:
#start = time.clock()
d
=
MyDataset
()
d
.
load_resource
(
"all_slot.dict"
)
d
.
run_from_stdin
()
#elapsed = (time.clock() - start)
#print("Time used:",elapsed)
feed_deploy/news_jingpai/package/my_nets/old_program/old_join_common_startup_program.bin
0 → 100644
浏览文件 @
73334d88
文件已添加
feed_deploy/news_jingpai/package/my_nets/old_program/old_join_common_train_program.bin
0 → 100644
浏览文件 @
73334d88
文件已添加
feed_deploy/news_jingpai/package/my_nets/old_program/old_update_main_program.bin
0 → 100644
浏览文件 @
73334d88
文件已添加
feed_deploy/news_jingpai/package/my_nets/old_program/old_update_startup_program.bin
0 → 100644
浏览文件 @
73334d88
文件已添加
feed_deploy/news_jingpai/package/my_nets/old_slot/slot
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
6009
6011
6012
6013
6014
6015
6019
6023
6024
6027
6029
6031
6050
6060
6068
6069
6089
6095
6105
6112
6130
6131
6132
6134
6161
6162
6163
6166
6182
6183
6185
6190
6212
6213
6231
6233
6234
6236
6238
6239
6240
6241
6242
6243
6244
6245
6354
7002
7005
7008
7010
7012
7013
7015
7016
7017
7018
7019
7020
7045
7046
7048
7049
7052
7054
7056
7064
7066
7076
7078
7083
7084
7085
7086
7087
7088
7089
7090
7099
7100
7101
7102
7103
7104
7105
7109
7124
7126
7136
7142
7143
7144
7145
7146
7147
7148
7150
7151
7152
7153
7154
7155
7156
7157
7047
7050
6253
6254
6255
6256
6257
6259
6260
6261
7170
7185
7186
6751
6755
6757
6759
6760
6763
6764
6765
6766
6767
6768
6769
6770
7502
7503
7504
7505
7510
7511
7512
7513
6806
6807
6808
6809
6810
6811
6812
6813
6815
6816
6817
6819
6823
6828
6831
6840
6845
6875
6879
6881
6888
6889
6947
6950
6956
6957
6959
10006
10008
10009
10010
10011
10016
10017
10018
10019
10020
10021
10022
10023
10024
10029
10030
10031
10032
10033
10034
10035
10036
10037
10038
10039
10040
10041
10042
10044
10045
10046
10051
10052
10053
10054
10055
10056
10057
10060
10066
10069
6820
6821
6822
13333
13334
13335
13336
13337
13338
13339
13340
13341
13351
13352
13353
13359
13361
13362
13363
13366
13367
13368
13369
13370
13371
13375
13376
5700
5702
13400
13401
13402
13403
13404
13406
13407
13408
13410
13417
13418
13419
13420
13422
13425
13427
13428
13429
13430
13431
13433
13434
13436
13437
13326
13330
13331
5717
13442
13451
13452
13455
13456
13457
13458
13459
13460
13461
13462
13463
13464
13465
13466
13467
13468
1104
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
13812
13813
6740
1490
1491
feed_deploy/news_jingpai/package/my_nets/old_slot/slot_common
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
feed_deploy/news_jingpai/package/my_nets/old_slot/to.py
0 → 100644
浏览文件 @
73334d88
with
open
(
"session_slot"
,
"r"
)
as
fin
:
res
=
[]
for
i
in
fin
:
res
.
append
(
"
\"
"
+
i
.
strip
()
+
"
\"
"
)
print
", "
.
join
(
res
)
feed_deploy/news_jingpai/package/my_nets/reqi_fleet_desc
0 → 100644
浏览文件 @
73334d88
server_param {
downpour_server_param {
downpour_table_param {
table_id: 0
table_class: "DownpourSparseSSDTable"
shard_num: 1950
sparse_table_cache_rate: 0.0035
accessor {
accessor_class: "DownpourCtrAccessor"
sparse_sgd_param {
learning_rate: 0.05
initial_g2sum: 3.0
initial_range: 0.0001
weight_bounds: -10.0
weight_bounds: 10.0
}
fea_dim: 11
embedx_dim: 8
embedx_threshold: 10
downpour_accessor_param {
nonclk_coeff: 0.1
click_coeff: 1
base_threshold: 1.5
delta_threshold: 0.25
delta_keep_days: 16
delete_after_unseen_days: 30
show_click_decay_rate: 0.98
delete_threshold: 0.8
}
table_accessor_save_param {
param: 1
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
table_accessor_save_param {
param: 2
converter: "(scripts/xbox_compressor_mf.py | bin/xbox_pb_converter)"
deconverter: "(bin/xbox_pb_deconverter | scripts/xbox_decompressor_mf.awk)"
}
}
type: PS_SPARSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 1
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 2
table_class: "DownpourDenseDoubleTable"
accessor {
accessor_class: "DownpourDenseValueDoubleAccessor"
dense_sgd_param {
name: "summarydouble"
summary {
summary_decay_rate: 0.999999
}
}
}
type: PS_DENSE_TABLE
compress_in_save: true
}
downpour_table_param {
table_id: 3
table_class: "DownpourDenseTable"
accessor {
accessor_class: "DownpourDenseValueAccessor"
dense_sgd_param {
name: "adam"
adam {
learning_rate: 5e-06
avg_decay_rate: 0.999993
ada_decay_rate: 0.9999
ada_epsilon: 1e-08
mom_decay_rate: 0.99
}
naive {
learning_rate: 0.0002
}
}
}
type: PS_DENSE_TABLE
compress_in_save: true
}
service_param {
server_class: "DownpourBrpcPsServer"
client_class: "DownpourBrpcPsClient"
service_class: "DownpourPsService"
start_server_port: 0
server_thread_num: 12
}
}
}
trainer_param {
dense_table {
table_id: 1
dense_variable_name: "join_0.w_0"
dense_variable_name: "join_0.b_0"
dense_variable_name: "join_1.w_0"
dense_variable_name: "join_1.b_0"
dense_variable_name: "join_2.w_0"
dense_variable_name: "join_2.b_0"
dense_variable_name: "join_3.w_0"
dense_variable_name: "join_3.b_0"
dense_variable_name: "join_4.w_0"
dense_variable_name: "join_4.b_0"
dense_variable_name: "join_5.w_0"
dense_variable_name: "join_5.b_0"
dense_variable_name: "join_6.w_0"
dense_variable_name: "join_6.b_0"
dense_variable_name: "join_7.w_0"
dense_variable_name: "join_7.b_0"
dense_variable_name: "common_0.w_0"
dense_variable_name: "common_0.b_0"
dense_variable_name: "common_1.w_0"
dense_variable_name: "common_1.b_0"
dense_variable_name: "common_2.w_0"
dense_variable_name: "common_2.b_0"
dense_variable_name: "common_3.w_0"
dense_variable_name: "common_3.b_0"
dense_variable_name: "common_4.w_0"
dense_variable_name: "common_4.b_0"
dense_variable_name: "common_5.w_0"
dense_variable_name: "common_5.b_0"
dense_variable_name: "common_6.w_0"
dense_variable_name: "common_6.b_0"
dense_variable_name: "common_7.w_0"
dense_variable_name: "common_7.b_0"
dense_gradient_variable_name: "join_0.w_0@GRAD"
dense_gradient_variable_name: "join_0.b_0@GRAD"
dense_gradient_variable_name: "join_1.w_0@GRAD"
dense_gradient_variable_name: "join_1.b_0@GRAD"
dense_gradient_variable_name: "join_2.w_0@GRAD"
dense_gradient_variable_name: "join_2.b_0@GRAD"
dense_gradient_variable_name: "join_3.w_0@GRAD"
dense_gradient_variable_name: "join_3.b_0@GRAD"
dense_gradient_variable_name: "join_4.w_0@GRAD"
dense_gradient_variable_name: "join_4.b_0@GRAD"
dense_gradient_variable_name: "join_5.w_0@GRAD"
dense_gradient_variable_name: "join_5.b_0@GRAD"
dense_gradient_variable_name: "join_6.w_0@GRAD"
dense_gradient_variable_name: "join_6.b_0@GRAD"
dense_gradient_variable_name: "join_7.w_0@GRAD"
dense_gradient_variable_name: "join_7.b_0@GRAD"
dense_gradient_variable_name: "common_0.w_0@GRAD"
dense_gradient_variable_name: "common_0.b_0@GRAD"
dense_gradient_variable_name: "common_1.w_0@GRAD"
dense_gradient_variable_name: "common_1.b_0@GRAD"
dense_gradient_variable_name: "common_2.w_0@GRAD"
dense_gradient_variable_name: "common_2.b_0@GRAD"
dense_gradient_variable_name: "common_3.w_0@GRAD"
dense_gradient_variable_name: "common_3.b_0@GRAD"
dense_gradient_variable_name: "common_4.w_0@GRAD"
dense_gradient_variable_name: "common_4.b_0@GRAD"
dense_gradient_variable_name: "common_5.w_0@GRAD"
dense_gradient_variable_name: "common_5.b_0@GRAD"
dense_gradient_variable_name: "common_6.w_0@GRAD"
dense_gradient_variable_name: "common_6.b_0@GRAD"
dense_gradient_variable_name: "common_7.w_0@GRAD"
dense_gradient_variable_name: "common_7.b_0@GRAD"
}
dense_table {
table_id: 2
dense_variable_name: "join.batch_size"
dense_variable_name: "join.batch_sum"
dense_variable_name: "join.batch_square_sum"
dense_variable_name: "common.batch_size"
dense_variable_name: "common.batch_sum"
dense_variable_name: "common.batch_square_sum"
dense_gradient_variable_name: "join.batch_size@GRAD"
dense_gradient_variable_name: "join.batch_sum@GRAD"
dense_gradient_variable_name: "join.batch_square_sum@GRAD"
dense_gradient_variable_name: "common.batch_size@GRAD"
dense_gradient_variable_name: "common.batch_sum@GRAD"
dense_gradient_variable_name: "common.batch_square_sum@GRAD"
}
dense_table {
table_id: 3
dense_variable_name: "fc_0.w_0"
dense_variable_name: "fc_0.b_0"
dense_variable_name: "fc_1.w_0"
dense_variable_name: "fc_1.b_0"
dense_variable_name: "fc_2.w_0"
dense_variable_name: "fc_2.b_0"
dense_variable_name: "fc_3.w_0"
dense_variable_name: "fc_3.b_0"
dense_variable_name: "fc_4.w_0"
dense_variable_name: "fc_4.b_0"
dense_variable_name: "fc_5.w_0"
dense_variable_name: "fc_5.b_0"
dense_gradient_variable_name: "fc_0.w_0@GRAD"
dense_gradient_variable_name: "fc_0.b_0@GRAD"
dense_gradient_variable_name: "fc_1.w_0@GRAD"
dense_gradient_variable_name: "fc_1.b_0@GRAD"
dense_gradient_variable_name: "fc_2.w_0@GRAD"
dense_gradient_variable_name: "fc_2.b_0@GRAD"
dense_gradient_variable_name: "fc_3.w_0@GRAD"
dense_gradient_variable_name: "fc_3.b_0@GRAD"
dense_gradient_variable_name: "fc_4.w_0@GRAD"
dense_gradient_variable_name: "fc_4.b_0@GRAD"
dense_gradient_variable_name: "fc_5.w_0@GRAD"
dense_gradient_variable_name: "fc_5.b_0@GRAD"
}
sparse_table {
table_id: 0
slot_key: "6048"
slot_key: "6002"
slot_key: "6145"
slot_key: "6202"
slot_key: "6201"
slot_key: "6121"
slot_key: "6738"
slot_key: "6119"
slot_key: "6146"
slot_key: "6120"
slot_key: "6147"
slot_key: "6122"
slot_key: "6123"
slot_key: "6118"
slot_key: "6142"
slot_key: "6143"
slot_key: "6008"
slot_key: "6148"
slot_key: "6151"
slot_key: "6127"
slot_key: "6144"
slot_key: "6094"
slot_key: "6083"
slot_key: "6952"
slot_key: "6739"
slot_key: "6150"
slot_key: "6109"
slot_key: "6003"
slot_key: "6099"
slot_key: "6149"
slot_key: "6129"
slot_key: "6203"
slot_key: "6153"
slot_key: "6152"
slot_key: "6128"
slot_key: "6106"
slot_key: "6251"
slot_key: "7082"
slot_key: "7515"
slot_key: "6951"
slot_key: "6949"
slot_key: "7080"
slot_key: "6066"
slot_key: "7507"
slot_key: "6186"
slot_key: "6007"
slot_key: "7514"
slot_key: "6125"
slot_key: "7506"
slot_key: "10001"
slot_key: "6006"
slot_key: "7023"
slot_key: "6085"
slot_key: "10000"
slot_key: "6098"
slot_key: "6250"
slot_key: "6110"
slot_key: "6124"
slot_key: "6090"
slot_key: "6082"
slot_key: "6067"
slot_key: "6101"
slot_key: "6004"
slot_key: "6191"
slot_key: "7075"
slot_key: "6948"
slot_key: "6157"
slot_key: "6126"
slot_key: "6188"
slot_key: "7077"
slot_key: "6070"
slot_key: "6111"
slot_key: "6087"
slot_key: "6103"
slot_key: "6107"
slot_key: "6194"
slot_key: "6156"
slot_key: "6005"
slot_key: "6247"
slot_key: "6814"
slot_key: "6158"
slot_key: "7122"
slot_key: "6058"
slot_key: "6189"
slot_key: "7058"
slot_key: "6059"
slot_key: "6115"
slot_key: "7079"
slot_key: "7081"
slot_key: "6833"
slot_key: "7024"
slot_key: "6108"
slot_key: "13342"
slot_key: "13345"
slot_key: "13412"
slot_key: "13343"
slot_key: "13350"
slot_key: "13346"
slot_key: "13409"
slot_key: "6009"
slot_key: "6011"
slot_key: "6012"
slot_key: "6013"
slot_key: "6014"
slot_key: "6015"
slot_key: "6019"
slot_key: "6023"
slot_key: "6024"
slot_key: "6027"
slot_key: "6029"
slot_key: "6031"
slot_key: "6050"
slot_key: "6060"
slot_key: "6068"
slot_key: "6069"
slot_key: "6089"
slot_key: "6095"
slot_key: "6105"
slot_key: "6112"
slot_key: "6130"
slot_key: "6131"
slot_key: "6132"
slot_key: "6134"
slot_key: "6161"
slot_key: "6162"
slot_key: "6163"
slot_key: "6166"
slot_key: "6182"
slot_key: "6183"
slot_key: "6185"
slot_key: "6190"
slot_key: "6212"
slot_key: "6213"
slot_key: "6231"
slot_key: "6233"
slot_key: "6234"
slot_key: "6236"
slot_key: "6238"
slot_key: "6239"
slot_key: "6240"
slot_key: "6241"
slot_key: "6242"
slot_key: "6243"
slot_key: "6244"
slot_key: "6245"
slot_key: "6354"
slot_key: "7002"
slot_key: "7005"
slot_key: "7008"
slot_key: "7010"
slot_key: "7013"
slot_key: "7015"
slot_key: "7019"
slot_key: "7020"
slot_key: "7045"
slot_key: "7046"
slot_key: "7048"
slot_key: "7049"
slot_key: "7052"
slot_key: "7054"
slot_key: "7056"
slot_key: "7064"
slot_key: "7066"
slot_key: "7076"
slot_key: "7078"
slot_key: "7083"
slot_key: "7084"
slot_key: "7085"
slot_key: "7086"
slot_key: "7087"
slot_key: "7088"
slot_key: "7089"
slot_key: "7090"
slot_key: "7099"
slot_key: "7100"
slot_key: "7101"
slot_key: "7102"
slot_key: "7103"
slot_key: "7104"
slot_key: "7105"
slot_key: "7109"
slot_key: "7124"
slot_key: "7126"
slot_key: "7136"
slot_key: "7142"
slot_key: "7143"
slot_key: "7144"
slot_key: "7145"
slot_key: "7146"
slot_key: "7147"
slot_key: "7148"
slot_key: "7150"
slot_key: "7151"
slot_key: "7152"
slot_key: "7153"
slot_key: "7154"
slot_key: "7155"
slot_key: "7156"
slot_key: "7157"
slot_key: "7047"
slot_key: "7050"
slot_key: "6257"
slot_key: "6259"
slot_key: "6260"
slot_key: "6261"
slot_key: "7170"
slot_key: "7185"
slot_key: "7186"
slot_key: "6751"
slot_key: "6755"
slot_key: "6757"
slot_key: "6759"
slot_key: "6760"
slot_key: "6763"
slot_key: "6764"
slot_key: "6765"
slot_key: "6766"
slot_key: "6767"
slot_key: "6768"
slot_key: "6769"
slot_key: "6770"
slot_key: "7502"
slot_key: "7503"
slot_key: "7504"
slot_key: "7505"
slot_key: "7510"
slot_key: "7511"
slot_key: "7512"
slot_key: "7513"
slot_key: "6806"
slot_key: "6807"
slot_key: "6808"
slot_key: "6809"
slot_key: "6810"
slot_key: "6811"
slot_key: "6812"
slot_key: "6813"
slot_key: "6815"
slot_key: "6816"
slot_key: "6817"
slot_key: "6819"
slot_key: "6823"
slot_key: "6828"
slot_key: "6831"
slot_key: "6840"
slot_key: "6845"
slot_key: "6875"
slot_key: "6879"
slot_key: "6881"
slot_key: "6888"
slot_key: "6889"
slot_key: "6947"
slot_key: "6950"
slot_key: "6956"
slot_key: "6957"
slot_key: "6959"
slot_key: "10006"
slot_key: "10008"
slot_key: "10009"
slot_key: "10010"
slot_key: "10011"
slot_key: "10016"
slot_key: "10017"
slot_key: "10018"
slot_key: "10019"
slot_key: "10020"
slot_key: "10021"
slot_key: "10022"
slot_key: "10023"
slot_key: "10024"
slot_key: "10029"
slot_key: "10030"
slot_key: "10031"
slot_key: "10032"
slot_key: "10033"
slot_key: "10034"
slot_key: "10035"
slot_key: "10036"
slot_key: "10037"
slot_key: "10038"
slot_key: "10039"
slot_key: "10040"
slot_key: "10041"
slot_key: "10042"
slot_key: "10044"
slot_key: "10045"
slot_key: "10046"
slot_key: "10051"
slot_key: "10052"
slot_key: "10053"
slot_key: "10054"
slot_key: "10055"
slot_key: "10056"
slot_key: "10057"
slot_key: "10060"
slot_key: "10066"
slot_key: "10069"
slot_key: "6820"
slot_key: "6821"
slot_key: "6822"
slot_key: "13333"
slot_key: "13334"
slot_key: "13335"
slot_key: "13336"
slot_key: "13337"
slot_key: "13338"
slot_key: "13339"
slot_key: "13340"
slot_key: "13341"
slot_key: "13351"
slot_key: "13352"
slot_key: "13353"
slot_key: "13359"
slot_key: "13361"
slot_key: "13362"
slot_key: "13363"
slot_key: "13366"
slot_key: "13367"
slot_key: "13368"
slot_key: "13369"
slot_key: "13370"
slot_key: "13371"
slot_key: "13375"
slot_key: "13376"
slot_key: "5700"
slot_key: "5702"
slot_key: "13400"
slot_key: "13401"
slot_key: "13402"
slot_key: "13403"
slot_key: "13404"
slot_key: "13406"
slot_key: "13407"
slot_key: "13408"
slot_key: "13410"
slot_key: "13417"
slot_key: "13418"
slot_key: "13419"
slot_key: "13420"
slot_key: "13422"
slot_key: "13425"
slot_key: "13427"
slot_key: "13428"
slot_key: "13429"
slot_key: "13430"
slot_key: "13431"
slot_key: "13433"
slot_key: "13434"
slot_key: "13436"
slot_key: "13437"
slot_key: "13326"
slot_key: "13330"
slot_key: "13331"
slot_key: "5717"
slot_key: "13442"
slot_key: "13451"
slot_key: "13452"
slot_key: "13455"
slot_key: "13456"
slot_key: "13457"
slot_key: "13458"
slot_key: "13459"
slot_key: "13460"
slot_key: "13461"
slot_key: "13462"
slot_key: "13463"
slot_key: "13464"
slot_key: "13465"
slot_key: "13466"
slot_key: "13467"
slot_key: "13468"
slot_key: "1104"
slot_key: "1106"
slot_key: "1107"
slot_key: "1108"
slot_key: "1109"
slot_key: "1110"
slot_key: "1111"
slot_key: "1112"
slot_key: "1113"
slot_key: "1114"
slot_key: "1115"
slot_key: "1116"
slot_key: "1117"
slot_key: "1119"
slot_key: "1120"
slot_key: "1121"
slot_key: "1122"
slot_key: "1123"
slot_key: "1124"
slot_key: "1125"
slot_key: "1126"
slot_key: "1127"
slot_key: "1128"
slot_key: "1129"
slot_key: "13812"
slot_key: "13813"
slot_key: "6740"
slot_key: "1490"
slot_key: "32915"
slot_key: "32950"
slot_key: "32952"
slot_key: "32953"
slot_key: "32954"
slot_key: "33077"
slot_key: "33085"
slot_key: "33086"
slot_value: "embedding_0.tmp_0"
slot_value: "embedding_1.tmp_0"
slot_value: "embedding_2.tmp_0"
slot_value: "embedding_3.tmp_0"
slot_value: "embedding_4.tmp_0"
slot_value: "embedding_5.tmp_0"
slot_value: "embedding_6.tmp_0"
slot_value: "embedding_7.tmp_0"
slot_value: "embedding_8.tmp_0"
slot_value: "embedding_9.tmp_0"
slot_value: "embedding_10.tmp_0"
slot_value: "embedding_11.tmp_0"
slot_value: "embedding_12.tmp_0"
slot_value: "embedding_13.tmp_0"
slot_value: "embedding_14.tmp_0"
slot_value: "embedding_15.tmp_0"
slot_value: "embedding_16.tmp_0"
slot_value: "embedding_17.tmp_0"
slot_value: "embedding_18.tmp_0"
slot_value: "embedding_19.tmp_0"
slot_value: "embedding_20.tmp_0"
slot_value: "embedding_21.tmp_0"
slot_value: "embedding_22.tmp_0"
slot_value: "embedding_23.tmp_0"
slot_value: "embedding_24.tmp_0"
slot_value: "embedding_25.tmp_0"
slot_value: "embedding_26.tmp_0"
slot_value: "embedding_27.tmp_0"
slot_value: "embedding_28.tmp_0"
slot_value: "embedding_29.tmp_0"
slot_value: "embedding_30.tmp_0"
slot_value: "embedding_31.tmp_0"
slot_value: "embedding_32.tmp_0"
slot_value: "embedding_33.tmp_0"
slot_value: "embedding_34.tmp_0"
slot_value: "embedding_35.tmp_0"
slot_value: "embedding_36.tmp_0"
slot_value: "embedding_37.tmp_0"
slot_value: "embedding_38.tmp_0"
slot_value: "embedding_39.tmp_0"
slot_value: "embedding_40.tmp_0"
slot_value: "embedding_41.tmp_0"
slot_value: "embedding_42.tmp_0"
slot_value: "embedding_43.tmp_0"
slot_value: "embedding_44.tmp_0"
slot_value: "embedding_45.tmp_0"
slot_value: "embedding_46.tmp_0"
slot_value: "embedding_47.tmp_0"
slot_value: "embedding_48.tmp_0"
slot_value: "embedding_49.tmp_0"
slot_value: "embedding_50.tmp_0"
slot_value: "embedding_51.tmp_0"
slot_value: "embedding_52.tmp_0"
slot_value: "embedding_53.tmp_0"
slot_value: "embedding_54.tmp_0"
slot_value: "embedding_55.tmp_0"
slot_value: "embedding_56.tmp_0"
slot_value: "embedding_57.tmp_0"
slot_value: "embedding_58.tmp_0"
slot_value: "embedding_59.tmp_0"
slot_value: "embedding_60.tmp_0"
slot_value: "embedding_61.tmp_0"
slot_value: "embedding_62.tmp_0"
slot_value: "embedding_63.tmp_0"
slot_value: "embedding_64.tmp_0"
slot_value: "embedding_65.tmp_0"
slot_value: "embedding_66.tmp_0"
slot_value: "embedding_67.tmp_0"
slot_value: "embedding_68.tmp_0"
slot_value: "embedding_69.tmp_0"
slot_value: "embedding_70.tmp_0"
slot_value: "embedding_71.tmp_0"
slot_value: "embedding_72.tmp_0"
slot_value: "embedding_73.tmp_0"
slot_value: "embedding_74.tmp_0"
slot_value: "embedding_75.tmp_0"
slot_value: "embedding_76.tmp_0"
slot_value: "embedding_77.tmp_0"
slot_value: "embedding_78.tmp_0"
slot_value: "embedding_79.tmp_0"
slot_value: "embedding_80.tmp_0"
slot_value: "embedding_81.tmp_0"
slot_value: "embedding_82.tmp_0"
slot_value: "embedding_83.tmp_0"
slot_value: "embedding_84.tmp_0"
slot_value: "embedding_85.tmp_0"
slot_value: "embedding_86.tmp_0"
slot_value: "embedding_87.tmp_0"
slot_value: "embedding_88.tmp_0"
slot_value: "embedding_89.tmp_0"
slot_value: "embedding_90.tmp_0"
slot_value: "embedding_91.tmp_0"
slot_value: "embedding_92.tmp_0"
slot_value: "embedding_93.tmp_0"
slot_value: "embedding_94.tmp_0"
slot_value: "embedding_95.tmp_0"
slot_value: "embedding_96.tmp_0"
slot_value: "embedding_97.tmp_0"
slot_value: "embedding_98.tmp_0"
slot_value: "embedding_99.tmp_0"
slot_value: "embedding_100.tmp_0"
slot_value: "embedding_101.tmp_0"
slot_value: "embedding_102.tmp_0"
slot_value: "embedding_103.tmp_0"
slot_value: "embedding_104.tmp_0"
slot_value: "embedding_105.tmp_0"
slot_value: "embedding_106.tmp_0"
slot_value: "embedding_107.tmp_0"
slot_value: "embedding_108.tmp_0"
slot_value: "embedding_109.tmp_0"
slot_value: "embedding_110.tmp_0"
slot_value: "embedding_111.tmp_0"
slot_value: "embedding_112.tmp_0"
slot_value: "embedding_113.tmp_0"
slot_value: "embedding_114.tmp_0"
slot_value: "embedding_115.tmp_0"
slot_value: "embedding_116.tmp_0"
slot_value: "embedding_117.tmp_0"
slot_value: "embedding_118.tmp_0"
slot_value: "embedding_119.tmp_0"
slot_value: "embedding_120.tmp_0"
slot_value: "embedding_121.tmp_0"
slot_value: "embedding_122.tmp_0"
slot_value: "embedding_123.tmp_0"
slot_value: "embedding_124.tmp_0"
slot_value: "embedding_125.tmp_0"
slot_value: "embedding_126.tmp_0"
slot_value: "embedding_127.tmp_0"
slot_value: "embedding_128.tmp_0"
slot_value: "embedding_129.tmp_0"
slot_value: "embedding_130.tmp_0"
slot_value: "embedding_131.tmp_0"
slot_value: "embedding_132.tmp_0"
slot_value: "embedding_133.tmp_0"
slot_value: "embedding_134.tmp_0"
slot_value: "embedding_135.tmp_0"
slot_value: "embedding_136.tmp_0"
slot_value: "embedding_137.tmp_0"
slot_value: "embedding_138.tmp_0"
slot_value: "embedding_139.tmp_0"
slot_value: "embedding_140.tmp_0"
slot_value: "embedding_141.tmp_0"
slot_value: "embedding_142.tmp_0"
slot_value: "embedding_143.tmp_0"
slot_value: "embedding_144.tmp_0"
slot_value: "embedding_145.tmp_0"
slot_value: "embedding_146.tmp_0"
slot_value: "embedding_147.tmp_0"
slot_value: "embedding_148.tmp_0"
slot_value: "embedding_149.tmp_0"
slot_value: "embedding_150.tmp_0"
slot_value: "embedding_151.tmp_0"
slot_value: "embedding_152.tmp_0"
slot_value: "embedding_153.tmp_0"
slot_value: "embedding_154.tmp_0"
slot_value: "embedding_155.tmp_0"
slot_value: "embedding_156.tmp_0"
slot_value: "embedding_157.tmp_0"
slot_value: "embedding_158.tmp_0"
slot_value: "embedding_159.tmp_0"
slot_value: "embedding_160.tmp_0"
slot_value: "embedding_161.tmp_0"
slot_value: "embedding_162.tmp_0"
slot_value: "embedding_163.tmp_0"
slot_value: "embedding_164.tmp_0"
slot_value: "embedding_165.tmp_0"
slot_value: "embedding_166.tmp_0"
slot_value: "embedding_167.tmp_0"
slot_value: "embedding_168.tmp_0"
slot_value: "embedding_169.tmp_0"
slot_value: "embedding_170.tmp_0"
slot_value: "embedding_171.tmp_0"
slot_value: "embedding_172.tmp_0"
slot_value: "embedding_173.tmp_0"
slot_value: "embedding_174.tmp_0"
slot_value: "embedding_175.tmp_0"
slot_value: "embedding_176.tmp_0"
slot_value: "embedding_177.tmp_0"
slot_value: "embedding_178.tmp_0"
slot_value: "embedding_179.tmp_0"
slot_value: "embedding_180.tmp_0"
slot_value: "embedding_181.tmp_0"
slot_value: "embedding_182.tmp_0"
slot_value: "embedding_183.tmp_0"
slot_value: "embedding_184.tmp_0"
slot_value: "embedding_185.tmp_0"
slot_value: "embedding_186.tmp_0"
slot_value: "embedding_187.tmp_0"
slot_value: "embedding_188.tmp_0"
slot_value: "embedding_189.tmp_0"
slot_value: "embedding_190.tmp_0"
slot_value: "embedding_191.tmp_0"
slot_value: "embedding_192.tmp_0"
slot_value: "embedding_193.tmp_0"
slot_value: "embedding_194.tmp_0"
slot_value: "embedding_195.tmp_0"
slot_value: "embedding_196.tmp_0"
slot_value: "embedding_197.tmp_0"
slot_value: "embedding_198.tmp_0"
slot_value: "embedding_199.tmp_0"
slot_value: "embedding_200.tmp_0"
slot_value: "embedding_201.tmp_0"
slot_value: "embedding_202.tmp_0"
slot_value: "embedding_203.tmp_0"
slot_value: "embedding_204.tmp_0"
slot_value: "embedding_205.tmp_0"
slot_value: "embedding_206.tmp_0"
slot_value: "embedding_207.tmp_0"
slot_value: "embedding_208.tmp_0"
slot_value: "embedding_209.tmp_0"
slot_value: "embedding_210.tmp_0"
slot_value: "embedding_211.tmp_0"
slot_value: "embedding_212.tmp_0"
slot_value: "embedding_213.tmp_0"
slot_value: "embedding_214.tmp_0"
slot_value: "embedding_215.tmp_0"
slot_value: "embedding_216.tmp_0"
slot_value: "embedding_217.tmp_0"
slot_value: "embedding_218.tmp_0"
slot_value: "embedding_219.tmp_0"
slot_value: "embedding_220.tmp_0"
slot_value: "embedding_221.tmp_0"
slot_value: "embedding_222.tmp_0"
slot_value: "embedding_223.tmp_0"
slot_value: "embedding_224.tmp_0"
slot_value: "embedding_225.tmp_0"
slot_value: "embedding_226.tmp_0"
slot_value: "embedding_227.tmp_0"
slot_value: "embedding_228.tmp_0"
slot_value: "embedding_229.tmp_0"
slot_value: "embedding_230.tmp_0"
slot_value: "embedding_231.tmp_0"
slot_value: "embedding_232.tmp_0"
slot_value: "embedding_233.tmp_0"
slot_value: "embedding_234.tmp_0"
slot_value: "embedding_235.tmp_0"
slot_value: "embedding_236.tmp_0"
slot_value: "embedding_237.tmp_0"
slot_value: "embedding_238.tmp_0"
slot_value: "embedding_239.tmp_0"
slot_value: "embedding_240.tmp_0"
slot_value: "embedding_241.tmp_0"
slot_value: "embedding_242.tmp_0"
slot_value: "embedding_243.tmp_0"
slot_value: "embedding_244.tmp_0"
slot_value: "embedding_245.tmp_0"
slot_value: "embedding_246.tmp_0"
slot_value: "embedding_247.tmp_0"
slot_value: "embedding_248.tmp_0"
slot_value: "embedding_249.tmp_0"
slot_value: "embedding_250.tmp_0"
slot_value: "embedding_251.tmp_0"
slot_value: "embedding_252.tmp_0"
slot_value: "embedding_253.tmp_0"
slot_value: "embedding_254.tmp_0"
slot_value: "embedding_255.tmp_0"
slot_value: "embedding_256.tmp_0"
slot_value: "embedding_257.tmp_0"
slot_value: "embedding_258.tmp_0"
slot_value: "embedding_259.tmp_0"
slot_value: "embedding_260.tmp_0"
slot_value: "embedding_261.tmp_0"
slot_value: "embedding_262.tmp_0"
slot_value: "embedding_263.tmp_0"
slot_value: "embedding_264.tmp_0"
slot_value: "embedding_265.tmp_0"
slot_value: "embedding_266.tmp_0"
slot_value: "embedding_267.tmp_0"
slot_value: "embedding_268.tmp_0"
slot_value: "embedding_269.tmp_0"
slot_value: "embedding_270.tmp_0"
slot_value: "embedding_271.tmp_0"
slot_value: "embedding_272.tmp_0"
slot_value: "embedding_273.tmp_0"
slot_value: "embedding_274.tmp_0"
slot_value: "embedding_275.tmp_0"
slot_value: "embedding_276.tmp_0"
slot_value: "embedding_277.tmp_0"
slot_value: "embedding_278.tmp_0"
slot_value: "embedding_279.tmp_0"
slot_value: "embedding_280.tmp_0"
slot_value: "embedding_281.tmp_0"
slot_value: "embedding_282.tmp_0"
slot_value: "embedding_283.tmp_0"
slot_value: "embedding_284.tmp_0"
slot_value: "embedding_285.tmp_0"
slot_value: "embedding_286.tmp_0"
slot_value: "embedding_287.tmp_0"
slot_value: "embedding_288.tmp_0"
slot_value: "embedding_289.tmp_0"
slot_value: "embedding_290.tmp_0"
slot_value: "embedding_291.tmp_0"
slot_value: "embedding_292.tmp_0"
slot_value: "embedding_293.tmp_0"
slot_value: "embedding_294.tmp_0"
slot_value: "embedding_295.tmp_0"
slot_value: "embedding_296.tmp_0"
slot_value: "embedding_297.tmp_0"
slot_value: "embedding_298.tmp_0"
slot_value: "embedding_299.tmp_0"
slot_value: "embedding_300.tmp_0"
slot_value: "embedding_301.tmp_0"
slot_value: "embedding_302.tmp_0"
slot_value: "embedding_303.tmp_0"
slot_value: "embedding_304.tmp_0"
slot_value: "embedding_305.tmp_0"
slot_value: "embedding_306.tmp_0"
slot_value: "embedding_307.tmp_0"
slot_value: "embedding_308.tmp_0"
slot_value: "embedding_309.tmp_0"
slot_value: "embedding_310.tmp_0"
slot_value: "embedding_311.tmp_0"
slot_value: "embedding_312.tmp_0"
slot_value: "embedding_313.tmp_0"
slot_value: "embedding_314.tmp_0"
slot_value: "embedding_315.tmp_0"
slot_value: "embedding_316.tmp_0"
slot_value: "embedding_317.tmp_0"
slot_value: "embedding_318.tmp_0"
slot_value: "embedding_319.tmp_0"
slot_value: "embedding_320.tmp_0"
slot_value: "embedding_321.tmp_0"
slot_value: "embedding_322.tmp_0"
slot_value: "embedding_323.tmp_0"
slot_value: "embedding_324.tmp_0"
slot_value: "embedding_325.tmp_0"
slot_value: "embedding_326.tmp_0"
slot_value: "embedding_327.tmp_0"
slot_value: "embedding_328.tmp_0"
slot_value: "embedding_329.tmp_0"
slot_value: "embedding_330.tmp_0"
slot_value: "embedding_331.tmp_0"
slot_value: "embedding_332.tmp_0"
slot_value: "embedding_333.tmp_0"
slot_value: "embedding_334.tmp_0"
slot_value: "embedding_335.tmp_0"
slot_value: "embedding_336.tmp_0"
slot_value: "embedding_337.tmp_0"
slot_value: "embedding_338.tmp_0"
slot_value: "embedding_339.tmp_0"
slot_value: "embedding_340.tmp_0"
slot_value: "embedding_341.tmp_0"
slot_value: "embedding_342.tmp_0"
slot_value: "embedding_343.tmp_0"
slot_value: "embedding_344.tmp_0"
slot_value: "embedding_345.tmp_0"
slot_value: "embedding_346.tmp_0"
slot_value: "embedding_347.tmp_0"
slot_value: "embedding_348.tmp_0"
slot_value: "embedding_349.tmp_0"
slot_value: "embedding_350.tmp_0"
slot_value: "embedding_351.tmp_0"
slot_value: "embedding_352.tmp_0"
slot_value: "embedding_353.tmp_0"
slot_value: "embedding_354.tmp_0"
slot_value: "embedding_355.tmp_0"
slot_value: "embedding_356.tmp_0"
slot_value: "embedding_357.tmp_0"
slot_value: "embedding_358.tmp_0"
slot_value: "embedding_359.tmp_0"
slot_value: "embedding_360.tmp_0"
slot_value: "embedding_361.tmp_0"
slot_value: "embedding_362.tmp_0"
slot_value: "embedding_363.tmp_0"
slot_value: "embedding_364.tmp_0"
slot_value: "embedding_365.tmp_0"
slot_value: "embedding_366.tmp_0"
slot_value: "embedding_367.tmp_0"
slot_value: "embedding_368.tmp_0"
slot_value: "embedding_369.tmp_0"
slot_value: "embedding_370.tmp_0"
slot_value: "embedding_371.tmp_0"
slot_value: "embedding_372.tmp_0"
slot_value: "embedding_373.tmp_0"
slot_value: "embedding_374.tmp_0"
slot_value: "embedding_375.tmp_0"
slot_value: "embedding_376.tmp_0"
slot_value: "embedding_377.tmp_0"
slot_value: "embedding_378.tmp_0"
slot_value: "embedding_379.tmp_0"
slot_value: "embedding_380.tmp_0"
slot_value: "embedding_381.tmp_0"
slot_value: "embedding_382.tmp_0"
slot_value: "embedding_383.tmp_0"
slot_value: "embedding_384.tmp_0"
slot_value: "embedding_385.tmp_0"
slot_value: "embedding_386.tmp_0"
slot_value: "embedding_387.tmp_0"
slot_value: "embedding_388.tmp_0"
slot_value: "embedding_389.tmp_0"
slot_value: "embedding_390.tmp_0"
slot_value: "embedding_391.tmp_0"
slot_value: "embedding_392.tmp_0"
slot_value: "embedding_393.tmp_0"
slot_value: "embedding_394.tmp_0"
slot_value: "embedding_395.tmp_0"
slot_value: "embedding_396.tmp_0"
slot_value: "embedding_397.tmp_0"
slot_value: "embedding_398.tmp_0"
slot_value: "embedding_399.tmp_0"
slot_value: "embedding_400.tmp_0"
slot_value: "embedding_401.tmp_0"
slot_value: "embedding_402.tmp_0"
slot_value: "embedding_403.tmp_0"
slot_value: "embedding_404.tmp_0"
slot_value: "embedding_405.tmp_0"
slot_value: "embedding_406.tmp_0"
slot_gradient: "embedding_0.tmp_0@GRAD"
slot_gradient: "embedding_1.tmp_0@GRAD"
slot_gradient: "embedding_2.tmp_0@GRAD"
slot_gradient: "embedding_3.tmp_0@GRAD"
slot_gradient: "embedding_4.tmp_0@GRAD"
slot_gradient: "embedding_5.tmp_0@GRAD"
slot_gradient: "embedding_6.tmp_0@GRAD"
slot_gradient: "embedding_7.tmp_0@GRAD"
slot_gradient: "embedding_8.tmp_0@GRAD"
slot_gradient: "embedding_9.tmp_0@GRAD"
slot_gradient: "embedding_10.tmp_0@GRAD"
slot_gradient: "embedding_11.tmp_0@GRAD"
slot_gradient: "embedding_12.tmp_0@GRAD"
slot_gradient: "embedding_13.tmp_0@GRAD"
slot_gradient: "embedding_14.tmp_0@GRAD"
slot_gradient: "embedding_15.tmp_0@GRAD"
slot_gradient: "embedding_16.tmp_0@GRAD"
slot_gradient: "embedding_17.tmp_0@GRAD"
slot_gradient: "embedding_18.tmp_0@GRAD"
slot_gradient: "embedding_19.tmp_0@GRAD"
slot_gradient: "embedding_20.tmp_0@GRAD"
slot_gradient: "embedding_21.tmp_0@GRAD"
slot_gradient: "embedding_22.tmp_0@GRAD"
slot_gradient: "embedding_23.tmp_0@GRAD"
slot_gradient: "embedding_24.tmp_0@GRAD"
slot_gradient: "embedding_25.tmp_0@GRAD"
slot_gradient: "embedding_26.tmp_0@GRAD"
slot_gradient: "embedding_27.tmp_0@GRAD"
slot_gradient: "embedding_28.tmp_0@GRAD"
slot_gradient: "embedding_29.tmp_0@GRAD"
slot_gradient: "embedding_30.tmp_0@GRAD"
slot_gradient: "embedding_31.tmp_0@GRAD"
slot_gradient: "embedding_32.tmp_0@GRAD"
slot_gradient: "embedding_33.tmp_0@GRAD"
slot_gradient: "embedding_34.tmp_0@GRAD"
slot_gradient: "embedding_35.tmp_0@GRAD"
slot_gradient: "embedding_36.tmp_0@GRAD"
slot_gradient: "embedding_37.tmp_0@GRAD"
slot_gradient: "embedding_38.tmp_0@GRAD"
slot_gradient: "embedding_39.tmp_0@GRAD"
slot_gradient: "embedding_40.tmp_0@GRAD"
slot_gradient: "embedding_41.tmp_0@GRAD"
slot_gradient: "embedding_42.tmp_0@GRAD"
slot_gradient: "embedding_43.tmp_0@GRAD"
slot_gradient: "embedding_44.tmp_0@GRAD"
slot_gradient: "embedding_45.tmp_0@GRAD"
slot_gradient: "embedding_46.tmp_0@GRAD"
slot_gradient: "embedding_47.tmp_0@GRAD"
slot_gradient: "embedding_48.tmp_0@GRAD"
slot_gradient: "embedding_49.tmp_0@GRAD"
slot_gradient: "embedding_50.tmp_0@GRAD"
slot_gradient: "embedding_51.tmp_0@GRAD"
slot_gradient: "embedding_52.tmp_0@GRAD"
slot_gradient: "embedding_53.tmp_0@GRAD"
slot_gradient: "embedding_54.tmp_0@GRAD"
slot_gradient: "embedding_55.tmp_0@GRAD"
slot_gradient: "embedding_56.tmp_0@GRAD"
slot_gradient: "embedding_57.tmp_0@GRAD"
slot_gradient: "embedding_58.tmp_0@GRAD"
slot_gradient: "embedding_59.tmp_0@GRAD"
slot_gradient: "embedding_60.tmp_0@GRAD"
slot_gradient: "embedding_61.tmp_0@GRAD"
slot_gradient: "embedding_62.tmp_0@GRAD"
slot_gradient: "embedding_63.tmp_0@GRAD"
slot_gradient: "embedding_64.tmp_0@GRAD"
slot_gradient: "embedding_65.tmp_0@GRAD"
slot_gradient: "embedding_66.tmp_0@GRAD"
slot_gradient: "embedding_67.tmp_0@GRAD"
slot_gradient: "embedding_68.tmp_0@GRAD"
slot_gradient: "embedding_69.tmp_0@GRAD"
slot_gradient: "embedding_70.tmp_0@GRAD"
slot_gradient: "embedding_71.tmp_0@GRAD"
slot_gradient: "embedding_72.tmp_0@GRAD"
slot_gradient: "embedding_73.tmp_0@GRAD"
slot_gradient: "embedding_74.tmp_0@GRAD"
slot_gradient: "embedding_75.tmp_0@GRAD"
slot_gradient: "embedding_76.tmp_0@GRAD"
slot_gradient: "embedding_77.tmp_0@GRAD"
slot_gradient: "embedding_78.tmp_0@GRAD"
slot_gradient: "embedding_79.tmp_0@GRAD"
slot_gradient: "embedding_80.tmp_0@GRAD"
slot_gradient: "embedding_81.tmp_0@GRAD"
slot_gradient: "embedding_82.tmp_0@GRAD"
slot_gradient: "embedding_83.tmp_0@GRAD"
slot_gradient: "embedding_84.tmp_0@GRAD"
slot_gradient: "embedding_85.tmp_0@GRAD"
slot_gradient: "embedding_86.tmp_0@GRAD"
slot_gradient: "embedding_87.tmp_0@GRAD"
slot_gradient: "embedding_88.tmp_0@GRAD"
slot_gradient: "embedding_89.tmp_0@GRAD"
slot_gradient: "embedding_90.tmp_0@GRAD"
slot_gradient: "embedding_91.tmp_0@GRAD"
slot_gradient: "embedding_92.tmp_0@GRAD"
slot_gradient: "embedding_93.tmp_0@GRAD"
slot_gradient: "embedding_94.tmp_0@GRAD"
slot_gradient: "embedding_95.tmp_0@GRAD"
slot_gradient: "embedding_96.tmp_0@GRAD"
slot_gradient: "embedding_97.tmp_0@GRAD"
slot_gradient: "embedding_98.tmp_0@GRAD"
slot_gradient: "embedding_99.tmp_0@GRAD"
slot_gradient: "embedding_100.tmp_0@GRAD"
slot_gradient: "embedding_101.tmp_0@GRAD"
slot_gradient: "embedding_102.tmp_0@GRAD"
slot_gradient: "embedding_103.tmp_0@GRAD"
slot_gradient: "embedding_104.tmp_0@GRAD"
slot_gradient: "embedding_105.tmp_0@GRAD"
slot_gradient: "embedding_106.tmp_0@GRAD"
slot_gradient: "embedding_107.tmp_0@GRAD"
slot_gradient: "embedding_108.tmp_0@GRAD"
slot_gradient: "embedding_109.tmp_0@GRAD"
slot_gradient: "embedding_110.tmp_0@GRAD"
slot_gradient: "embedding_111.tmp_0@GRAD"
slot_gradient: "embedding_112.tmp_0@GRAD"
slot_gradient: "embedding_113.tmp_0@GRAD"
slot_gradient: "embedding_114.tmp_0@GRAD"
slot_gradient: "embedding_115.tmp_0@GRAD"
slot_gradient: "embedding_116.tmp_0@GRAD"
slot_gradient: "embedding_117.tmp_0@GRAD"
slot_gradient: "embedding_118.tmp_0@GRAD"
slot_gradient: "embedding_119.tmp_0@GRAD"
slot_gradient: "embedding_120.tmp_0@GRAD"
slot_gradient: "embedding_121.tmp_0@GRAD"
slot_gradient: "embedding_122.tmp_0@GRAD"
slot_gradient: "embedding_123.tmp_0@GRAD"
slot_gradient: "embedding_124.tmp_0@GRAD"
slot_gradient: "embedding_125.tmp_0@GRAD"
slot_gradient: "embedding_126.tmp_0@GRAD"
slot_gradient: "embedding_127.tmp_0@GRAD"
slot_gradient: "embedding_128.tmp_0@GRAD"
slot_gradient: "embedding_129.tmp_0@GRAD"
slot_gradient: "embedding_130.tmp_0@GRAD"
slot_gradient: "embedding_131.tmp_0@GRAD"
slot_gradient: "embedding_132.tmp_0@GRAD"
slot_gradient: "embedding_133.tmp_0@GRAD"
slot_gradient: "embedding_134.tmp_0@GRAD"
slot_gradient: "embedding_135.tmp_0@GRAD"
slot_gradient: "embedding_136.tmp_0@GRAD"
slot_gradient: "embedding_137.tmp_0@GRAD"
slot_gradient: "embedding_138.tmp_0@GRAD"
slot_gradient: "embedding_139.tmp_0@GRAD"
slot_gradient: "embedding_140.tmp_0@GRAD"
slot_gradient: "embedding_141.tmp_0@GRAD"
slot_gradient: "embedding_142.tmp_0@GRAD"
slot_gradient: "embedding_143.tmp_0@GRAD"
slot_gradient: "embedding_144.tmp_0@GRAD"
slot_gradient: "embedding_145.tmp_0@GRAD"
slot_gradient: "embedding_146.tmp_0@GRAD"
slot_gradient: "embedding_147.tmp_0@GRAD"
slot_gradient: "embedding_148.tmp_0@GRAD"
slot_gradient: "embedding_149.tmp_0@GRAD"
slot_gradient: "embedding_150.tmp_0@GRAD"
slot_gradient: "embedding_151.tmp_0@GRAD"
slot_gradient: "embedding_152.tmp_0@GRAD"
slot_gradient: "embedding_153.tmp_0@GRAD"
slot_gradient: "embedding_154.tmp_0@GRAD"
slot_gradient: "embedding_155.tmp_0@GRAD"
slot_gradient: "embedding_156.tmp_0@GRAD"
slot_gradient: "embedding_157.tmp_0@GRAD"
slot_gradient: "embedding_158.tmp_0@GRAD"
slot_gradient: "embedding_159.tmp_0@GRAD"
slot_gradient: "embedding_160.tmp_0@GRAD"
slot_gradient: "embedding_161.tmp_0@GRAD"
slot_gradient: "embedding_162.tmp_0@GRAD"
slot_gradient: "embedding_163.tmp_0@GRAD"
slot_gradient: "embedding_164.tmp_0@GRAD"
slot_gradient: "embedding_165.tmp_0@GRAD"
slot_gradient: "embedding_166.tmp_0@GRAD"
slot_gradient: "embedding_167.tmp_0@GRAD"
slot_gradient: "embedding_168.tmp_0@GRAD"
slot_gradient: "embedding_169.tmp_0@GRAD"
slot_gradient: "embedding_170.tmp_0@GRAD"
slot_gradient: "embedding_171.tmp_0@GRAD"
slot_gradient: "embedding_172.tmp_0@GRAD"
slot_gradient: "embedding_173.tmp_0@GRAD"
slot_gradient: "embedding_174.tmp_0@GRAD"
slot_gradient: "embedding_175.tmp_0@GRAD"
slot_gradient: "embedding_176.tmp_0@GRAD"
slot_gradient: "embedding_177.tmp_0@GRAD"
slot_gradient: "embedding_178.tmp_0@GRAD"
slot_gradient: "embedding_179.tmp_0@GRAD"
slot_gradient: "embedding_180.tmp_0@GRAD"
slot_gradient: "embedding_181.tmp_0@GRAD"
slot_gradient: "embedding_182.tmp_0@GRAD"
slot_gradient: "embedding_183.tmp_0@GRAD"
slot_gradient: "embedding_184.tmp_0@GRAD"
slot_gradient: "embedding_185.tmp_0@GRAD"
slot_gradient: "embedding_186.tmp_0@GRAD"
slot_gradient: "embedding_187.tmp_0@GRAD"
slot_gradient: "embedding_188.tmp_0@GRAD"
slot_gradient: "embedding_189.tmp_0@GRAD"
slot_gradient: "embedding_190.tmp_0@GRAD"
slot_gradient: "embedding_191.tmp_0@GRAD"
slot_gradient: "embedding_192.tmp_0@GRAD"
slot_gradient: "embedding_193.tmp_0@GRAD"
slot_gradient: "embedding_194.tmp_0@GRAD"
slot_gradient: "embedding_195.tmp_0@GRAD"
slot_gradient: "embedding_196.tmp_0@GRAD"
slot_gradient: "embedding_197.tmp_0@GRAD"
slot_gradient: "embedding_198.tmp_0@GRAD"
slot_gradient: "embedding_199.tmp_0@GRAD"
slot_gradient: "embedding_200.tmp_0@GRAD"
slot_gradient: "embedding_201.tmp_0@GRAD"
slot_gradient: "embedding_202.tmp_0@GRAD"
slot_gradient: "embedding_203.tmp_0@GRAD"
slot_gradient: "embedding_204.tmp_0@GRAD"
slot_gradient: "embedding_205.tmp_0@GRAD"
slot_gradient: "embedding_206.tmp_0@GRAD"
slot_gradient: "embedding_207.tmp_0@GRAD"
slot_gradient: "embedding_208.tmp_0@GRAD"
slot_gradient: "embedding_209.tmp_0@GRAD"
slot_gradient: "embedding_210.tmp_0@GRAD"
slot_gradient: "embedding_211.tmp_0@GRAD"
slot_gradient: "embedding_212.tmp_0@GRAD"
slot_gradient: "embedding_213.tmp_0@GRAD"
slot_gradient: "embedding_214.tmp_0@GRAD"
slot_gradient: "embedding_215.tmp_0@GRAD"
slot_gradient: "embedding_216.tmp_0@GRAD"
slot_gradient: "embedding_217.tmp_0@GRAD"
slot_gradient: "embedding_218.tmp_0@GRAD"
slot_gradient: "embedding_219.tmp_0@GRAD"
slot_gradient: "embedding_220.tmp_0@GRAD"
slot_gradient: "embedding_221.tmp_0@GRAD"
slot_gradient: "embedding_222.tmp_0@GRAD"
slot_gradient: "embedding_223.tmp_0@GRAD"
slot_gradient: "embedding_224.tmp_0@GRAD"
slot_gradient: "embedding_225.tmp_0@GRAD"
slot_gradient: "embedding_226.tmp_0@GRAD"
slot_gradient: "embedding_227.tmp_0@GRAD"
slot_gradient: "embedding_228.tmp_0@GRAD"
slot_gradient: "embedding_229.tmp_0@GRAD"
slot_gradient: "embedding_230.tmp_0@GRAD"
slot_gradient: "embedding_231.tmp_0@GRAD"
slot_gradient: "embedding_232.tmp_0@GRAD"
slot_gradient: "embedding_233.tmp_0@GRAD"
slot_gradient: "embedding_234.tmp_0@GRAD"
slot_gradient: "embedding_235.tmp_0@GRAD"
slot_gradient: "embedding_236.tmp_0@GRAD"
slot_gradient: "embedding_237.tmp_0@GRAD"
slot_gradient: "embedding_238.tmp_0@GRAD"
slot_gradient: "embedding_239.tmp_0@GRAD"
slot_gradient: "embedding_240.tmp_0@GRAD"
slot_gradient: "embedding_241.tmp_0@GRAD"
slot_gradient: "embedding_242.tmp_0@GRAD"
slot_gradient: "embedding_243.tmp_0@GRAD"
slot_gradient: "embedding_244.tmp_0@GRAD"
slot_gradient: "embedding_245.tmp_0@GRAD"
slot_gradient: "embedding_246.tmp_0@GRAD"
slot_gradient: "embedding_247.tmp_0@GRAD"
slot_gradient: "embedding_248.tmp_0@GRAD"
slot_gradient: "embedding_249.tmp_0@GRAD"
slot_gradient: "embedding_250.tmp_0@GRAD"
slot_gradient: "embedding_251.tmp_0@GRAD"
slot_gradient: "embedding_252.tmp_0@GRAD"
slot_gradient: "embedding_253.tmp_0@GRAD"
slot_gradient: "embedding_254.tmp_0@GRAD"
slot_gradient: "embedding_255.tmp_0@GRAD"
slot_gradient: "embedding_256.tmp_0@GRAD"
slot_gradient: "embedding_257.tmp_0@GRAD"
slot_gradient: "embedding_258.tmp_0@GRAD"
slot_gradient: "embedding_259.tmp_0@GRAD"
slot_gradient: "embedding_260.tmp_0@GRAD"
slot_gradient: "embedding_261.tmp_0@GRAD"
slot_gradient: "embedding_262.tmp_0@GRAD"
slot_gradient: "embedding_263.tmp_0@GRAD"
slot_gradient: "embedding_264.tmp_0@GRAD"
slot_gradient: "embedding_265.tmp_0@GRAD"
slot_gradient: "embedding_266.tmp_0@GRAD"
slot_gradient: "embedding_267.tmp_0@GRAD"
slot_gradient: "embedding_268.tmp_0@GRAD"
slot_gradient: "embedding_269.tmp_0@GRAD"
slot_gradient: "embedding_270.tmp_0@GRAD"
slot_gradient: "embedding_271.tmp_0@GRAD"
slot_gradient: "embedding_272.tmp_0@GRAD"
slot_gradient: "embedding_273.tmp_0@GRAD"
slot_gradient: "embedding_274.tmp_0@GRAD"
slot_gradient: "embedding_275.tmp_0@GRAD"
slot_gradient: "embedding_276.tmp_0@GRAD"
slot_gradient: "embedding_277.tmp_0@GRAD"
slot_gradient: "embedding_278.tmp_0@GRAD"
slot_gradient: "embedding_279.tmp_0@GRAD"
slot_gradient: "embedding_280.tmp_0@GRAD"
slot_gradient: "embedding_281.tmp_0@GRAD"
slot_gradient: "embedding_282.tmp_0@GRAD"
slot_gradient: "embedding_283.tmp_0@GRAD"
slot_gradient: "embedding_284.tmp_0@GRAD"
slot_gradient: "embedding_285.tmp_0@GRAD"
slot_gradient: "embedding_286.tmp_0@GRAD"
slot_gradient: "embedding_287.tmp_0@GRAD"
slot_gradient: "embedding_288.tmp_0@GRAD"
slot_gradient: "embedding_289.tmp_0@GRAD"
slot_gradient: "embedding_290.tmp_0@GRAD"
slot_gradient: "embedding_291.tmp_0@GRAD"
slot_gradient: "embedding_292.tmp_0@GRAD"
slot_gradient: "embedding_293.tmp_0@GRAD"
slot_gradient: "embedding_294.tmp_0@GRAD"
slot_gradient: "embedding_295.tmp_0@GRAD"
slot_gradient: "embedding_296.tmp_0@GRAD"
slot_gradient: "embedding_297.tmp_0@GRAD"
slot_gradient: "embedding_298.tmp_0@GRAD"
slot_gradient: "embedding_299.tmp_0@GRAD"
slot_gradient: "embedding_300.tmp_0@GRAD"
slot_gradient: "embedding_301.tmp_0@GRAD"
slot_gradient: "embedding_302.tmp_0@GRAD"
slot_gradient: "embedding_303.tmp_0@GRAD"
slot_gradient: "embedding_304.tmp_0@GRAD"
slot_gradient: "embedding_305.tmp_0@GRAD"
slot_gradient: "embedding_306.tmp_0@GRAD"
slot_gradient: "embedding_307.tmp_0@GRAD"
slot_gradient: "embedding_308.tmp_0@GRAD"
slot_gradient: "embedding_309.tmp_0@GRAD"
slot_gradient: "embedding_310.tmp_0@GRAD"
slot_gradient: "embedding_311.tmp_0@GRAD"
slot_gradient: "embedding_312.tmp_0@GRAD"
slot_gradient: "embedding_313.tmp_0@GRAD"
slot_gradient: "embedding_314.tmp_0@GRAD"
slot_gradient: "embedding_315.tmp_0@GRAD"
slot_gradient: "embedding_316.tmp_0@GRAD"
slot_gradient: "embedding_317.tmp_0@GRAD"
slot_gradient: "embedding_318.tmp_0@GRAD"
slot_gradient: "embedding_319.tmp_0@GRAD"
slot_gradient: "embedding_320.tmp_0@GRAD"
slot_gradient: "embedding_321.tmp_0@GRAD"
slot_gradient: "embedding_322.tmp_0@GRAD"
slot_gradient: "embedding_323.tmp_0@GRAD"
slot_gradient: "embedding_324.tmp_0@GRAD"
slot_gradient: "embedding_325.tmp_0@GRAD"
slot_gradient: "embedding_326.tmp_0@GRAD"
slot_gradient: "embedding_327.tmp_0@GRAD"
slot_gradient: "embedding_328.tmp_0@GRAD"
slot_gradient: "embedding_329.tmp_0@GRAD"
slot_gradient: "embedding_330.tmp_0@GRAD"
slot_gradient: "embedding_331.tmp_0@GRAD"
slot_gradient: "embedding_332.tmp_0@GRAD"
slot_gradient: "embedding_333.tmp_0@GRAD"
slot_gradient: "embedding_334.tmp_0@GRAD"
slot_gradient: "embedding_335.tmp_0@GRAD"
slot_gradient: "embedding_336.tmp_0@GRAD"
slot_gradient: "embedding_337.tmp_0@GRAD"
slot_gradient: "embedding_338.tmp_0@GRAD"
slot_gradient: "embedding_339.tmp_0@GRAD"
slot_gradient: "embedding_340.tmp_0@GRAD"
slot_gradient: "embedding_341.tmp_0@GRAD"
slot_gradient: "embedding_342.tmp_0@GRAD"
slot_gradient: "embedding_343.tmp_0@GRAD"
slot_gradient: "embedding_344.tmp_0@GRAD"
slot_gradient: "embedding_345.tmp_0@GRAD"
slot_gradient: "embedding_346.tmp_0@GRAD"
slot_gradient: "embedding_347.tmp_0@GRAD"
slot_gradient: "embedding_348.tmp_0@GRAD"
slot_gradient: "embedding_349.tmp_0@GRAD"
slot_gradient: "embedding_350.tmp_0@GRAD"
slot_gradient: "embedding_351.tmp_0@GRAD"
slot_gradient: "embedding_352.tmp_0@GRAD"
slot_gradient: "embedding_353.tmp_0@GRAD"
slot_gradient: "embedding_354.tmp_0@GRAD"
slot_gradient: "embedding_355.tmp_0@GRAD"
slot_gradient: "embedding_356.tmp_0@GRAD"
slot_gradient: "embedding_357.tmp_0@GRAD"
slot_gradient: "embedding_358.tmp_0@GRAD"
slot_gradient: "embedding_359.tmp_0@GRAD"
slot_gradient: "embedding_360.tmp_0@GRAD"
slot_gradient: "embedding_361.tmp_0@GRAD"
slot_gradient: "embedding_362.tmp_0@GRAD"
slot_gradient: "embedding_363.tmp_0@GRAD"
slot_gradient: "embedding_364.tmp_0@GRAD"
slot_gradient: "embedding_365.tmp_0@GRAD"
slot_gradient: "embedding_366.tmp_0@GRAD"
slot_gradient: "embedding_367.tmp_0@GRAD"
slot_gradient: "embedding_368.tmp_0@GRAD"
slot_gradient: "embedding_369.tmp_0@GRAD"
slot_gradient: "embedding_370.tmp_0@GRAD"
slot_gradient: "embedding_371.tmp_0@GRAD"
slot_gradient: "embedding_372.tmp_0@GRAD"
slot_gradient: "embedding_373.tmp_0@GRAD"
slot_gradient: "embedding_374.tmp_0@GRAD"
slot_gradient: "embedding_375.tmp_0@GRAD"
slot_gradient: "embedding_376.tmp_0@GRAD"
slot_gradient: "embedding_377.tmp_0@GRAD"
slot_gradient: "embedding_378.tmp_0@GRAD"
slot_gradient: "embedding_379.tmp_0@GRAD"
slot_gradient: "embedding_380.tmp_0@GRAD"
slot_gradient: "embedding_381.tmp_0@GRAD"
slot_gradient: "embedding_382.tmp_0@GRAD"
slot_gradient: "embedding_383.tmp_0@GRAD"
slot_gradient: "embedding_384.tmp_0@GRAD"
slot_gradient: "embedding_385.tmp_0@GRAD"
slot_gradient: "embedding_386.tmp_0@GRAD"
slot_gradient: "embedding_387.tmp_0@GRAD"
slot_gradient: "embedding_388.tmp_0@GRAD"
slot_gradient: "embedding_389.tmp_0@GRAD"
slot_gradient: "embedding_390.tmp_0@GRAD"
slot_gradient: "embedding_391.tmp_0@GRAD"
slot_gradient: "embedding_392.tmp_0@GRAD"
slot_gradient: "embedding_393.tmp_0@GRAD"
slot_gradient: "embedding_394.tmp_0@GRAD"
slot_gradient: "embedding_395.tmp_0@GRAD"
slot_gradient: "embedding_396.tmp_0@GRAD"
slot_gradient: "embedding_397.tmp_0@GRAD"
slot_gradient: "embedding_398.tmp_0@GRAD"
slot_gradient: "embedding_399.tmp_0@GRAD"
slot_gradient: "embedding_400.tmp_0@GRAD"
slot_gradient: "embedding_401.tmp_0@GRAD"
slot_gradient: "embedding_402.tmp_0@GRAD"
slot_gradient: "embedding_403.tmp_0@GRAD"
slot_gradient: "embedding_404.tmp_0@GRAD"
slot_gradient: "embedding_405.tmp_0@GRAD"
slot_gradient: "embedding_406.tmp_0@GRAD"
}
skip_op: "lookup_table"
skip_op: "lookup_table_grad"
}
fs_client_param {
uri: "afs://xingtian.afs.baidu.com:9902"
user: "mlarch_pro"
passwd: "proisvip"
hadoop_bin: "$HADOOP_HOME/bin/hadoop"
}
feed_deploy/news_jingpai/package/my_nets/scripts/xbox_compressor_mf.py
0 → 100755
浏览文件 @
73334d88
#!/usr/bin/python
"""
xbox model compressor
"""
import
sys
import
math
import
time
import
re
#WISE
#SHOW_COMPRESS_RATIO : 8192
#CLICK_COMPRESS_RATIO : 8192
#LR_COMPRESS_RATIO : 1048576
#MIO_COMPRESS_RATIO:8192
#PC
#MIO_COMPRESS_RATIO : 1024
#SHOW_COMPRESS_RATIO : 128
#CLICK_COMPRESS_RATIO : 1024
#LR_COMPRESS_RATIO : 8192
#STAMP_COL = 2
SHOW_COL
=
3
CLICK_COL
=
4
LR_W_COL
=
5
LR_G2SUM_COL
=
6
FM_COL
=
9
#DAY_SPAN = 300
#show clk lr = float
SHOW_RATIO
=
1
#SHOW_RATIO = 1024
CLK_RATIO
=
8
#CLK_RATIO = 1024
LR_RATIO
=
1024
MF_RATIO
=
1024
base_update_threshold
=
0.965
base_xbox_clk_cof
=
1
base_xbox_nonclk_cof
=
0.2
def
as_num
(
x
):
y
=
'{:.5f}'
.
format
(
x
)
return
(
y
)
def
compress_show
(
xx
):
"""
compress show
"""
preci
=
SHOW_RATIO
x
=
float
(
xx
)
return
str
(
int
(
math
.
floor
(
x
*
preci
+
0.5
)))
def
compress_clk
(
xx
):
"""
compress clk
"""
preci
=
CLK_RATIO
x
=
float
(
xx
)
clk
=
int
(
math
.
floor
(
x
*
preci
+
0.5
))
if
clk
==
0
:
return
""
return
str
(
clk
)
def
compress_lr
(
xx
):
"""
compress lr
"""
preci
=
LR_RATIO
x
=
float
(
xx
)
lr
=
int
(
math
.
floor
(
x
*
preci
+
0.5
))
if
lr
==
0
:
return
""
return
str
(
lr
)
def
compress_mf
(
xx
):
"""
compress mf
"""
preci
=
MF_RATIO
x
=
float
(
xx
)
return
int
(
math
.
floor
(
x
*
preci
+
0.5
))
def
show_clk_score
(
show
,
clk
):
"""
calculate show_clk score
"""
return
(
show
-
clk
)
*
0.2
+
clk
for
l
in
sys
.
stdin
:
cols
=
re
.
split
(
r
'\s+'
,
l
.
strip
())
key
=
cols
[
0
].
strip
()
#day = int(cols[STAMP_COL].strip())
#cur_day = int(time.time()/3600/24)
#if (day + DAY_SPAN) <= cur_day:
# continue
# cvm features
show
=
cols
[
SHOW_COL
]
click
=
cols
[
CLICK_COL
]
pred
=
""
f_show
=
float
(
show
)
f_clk
=
float
(
click
)
"""
if f_show != 0:
show_log = math.log(f_show)
else:
show_log = 0
if f_clk != 0:
click_log = math.log(f_clk) - show_log
else:
click_log = 0
"""
show_log
=
f_show
click_log
=
f_clk
#print f_show, f_clk
#if show_clk_score(f_show, f_clk) < base_update_threshold:
# continue
#show = compress_show(show)
show
=
compress_show
(
show_log
)
#clk = compress_clk(click)
clk
=
compress_clk
(
click_log
)
# personal lr weight
lr_w
=
cols
[
LR_W_COL
].
strip
()
lr_wei
=
compress_lr
(
lr_w
)
# fm weight
fm_wei
=
[]
fm_sum
=
0
if
len
(
cols
)
>
7
:
#fm_dim = int(cols[FM_COL].strip())
#if fm_dim != 0:
for
v
in
xrange
(
FM_COL
,
len
(
cols
),
1
):
mf_v
=
compress_mf
(
cols
[
v
])
#print mf_v
fm_wei
.
append
(
str
(
mf_v
))
fm_sum
+=
(
mf_v
*
mf_v
)
sys
.
stdout
.
write
(
"%s
\t
%s
\t
%s
\t
%s"
%
(
key
,
show
,
clk
,
pred
))
sys
.
stdout
.
write
(
"
\t
"
)
sys
.
stdout
.
write
(
"%s"
%
lr_wei
)
if
len
(
fm_wei
)
>
0
and
fm_sum
>
0
:
sys
.
stdout
.
write
(
"
\t
%s"
%
"
\t
"
.
join
(
fm_wei
))
else
:
sys
.
stdout
.
write
(
"
\t
[
\t
]"
)
sys
.
stdout
.
write
(
"
\n
"
)
feed_deploy/news_jingpai/package/my_nets/scripts/xbox_decompressor_mf.awk
0 → 100755
浏览文件 @
73334d88
#!/bin/awk -f
{
OFS
=
"\t"
;
SHOW_RATIO
=
1
;
CLK_RATIO
=
8
;
LR_RATIO
=
1024
;
MF_RATIO
=
1024
;
}
function
decompress_show
(
x
)
{
x
=
x
*
1.0
/
SHOW_RATIO
;
return
x
;
}
function
decompress_clk
(
x
)
{
if
(
x
==
""
)
{
x
=
0
;
}
x
=
x
*
1.0
/
CLK_RATIO
;
return
x
;
}
function
decompress_lr
(
x
)
{
return
x
*
1.0
/
LR_RATIO
;
}
function
decompress_mf
(
x
)
{
return
x
*
1.0
/
MF_RATIO
;
}
function
show_clk_sore
(
show
,
clk
,
nonclk_coeff
,
clk_coeff
)
{
return
(
show
-
clk
)
*
nonclk_coeff
+
clk
*
clk_coeff
;
}
#key, show, clk, pred, lr_w, mf_w or [\t]
{
l
=
split
(
$0
,
a
,
"\t"
);
show
=
decompress_show
(
a
[
2
]);
click
=
decompress_clk
(
a
[
3
]);
lr
=
decompress_lr
(
a
[
5
]);
printf
(
"%s\t0\t0\t%s\t%s\t%s\t0\t"
,
a
[
1
],
show
,
click
,
lr
);
if
(
l
==
7
)
{
printf
(
"\n"
);
}
else
{
printf
(
"%d"
,
l
-
5
)
for
(
i
=
6
;
i
<=
l
;
i
++
)
{
printf
(
"\t%s"
,
decompress_mf
(
a
[
i
]));
}
printf
(
"\n"
);
}
}
feed_deploy/news_jingpai/package/my_nets/slot/slot
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
6009
6011
6012
6013
6014
6015
6019
6023
6024
6027
6029
6031
6050
6060
6068
6069
6089
6095
6105
6112
6130
6131
6132
6134
6161
6162
6163
6166
6182
6183
6185
6190
6212
6213
6231
6233
6234
6236
6238
6239
6240
6241
6242
6243
6244
6245
6354
7002
7005
7008
7010
7013
7015
7019
7020
7045
7046
7048
7049
7052
7054
7056
7064
7066
7076
7078
7083
7084
7085
7086
7087
7088
7089
7090
7099
7100
7101
7102
7103
7104
7105
7109
7124
7126
7136
7142
7143
7144
7145
7146
7147
7148
7150
7151
7152
7153
7154
7155
7156
7157
7047
7050
6257
6259
6260
6261
7170
7185
7186
6751
6755
6757
6759
6760
6763
6764
6765
6766
6767
6768
6769
6770
7502
7503
7504
7505
7510
7511
7512
7513
6806
6807
6808
6809
6810
6811
6812
6813
6815
6816
6817
6819
6823
6828
6831
6840
6845
6875
6879
6881
6888
6889
6947
6950
6956
6957
6959
10006
10008
10009
10010
10011
10016
10017
10018
10019
10020
10021
10022
10023
10024
10029
10030
10031
10032
10033
10034
10035
10036
10037
10038
10039
10040
10041
10042
10044
10045
10046
10051
10052
10053
10054
10055
10056
10057
10060
10066
10069
6820
6821
6822
13333
13334
13335
13336
13337
13338
13339
13340
13341
13351
13352
13353
13359
13361
13362
13363
13366
13367
13368
13369
13370
13371
13375
13376
5700
5702
13400
13401
13402
13403
13404
13406
13407
13408
13410
13417
13418
13419
13420
13422
13425
13427
13428
13429
13430
13431
13433
13434
13436
13437
13326
13330
13331
5717
13442
13451
13452
13455
13456
13457
13458
13459
13460
13461
13462
13463
13464
13465
13466
13467
13468
1104
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
13812
13813
6740
1490
32915
32950
32952
32953
32954
33077
33085
33086
feed_deploy/news_jingpai/package/my_nets/slot/slot_common
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
feed_deploy/news_jingpai/package/my_nets/tmp/slot/slot
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
6009
6011
6012
6013
6014
6015
6019
6023
6024
6027
6029
6031
6050
6060
6068
6069
6089
6095
6105
6112
6130
6131
6132
6134
6161
6162
6163
6166
6182
6183
6185
6190
6212
6213
6231
6233
6234
6236
6238
6239
6240
6241
6242
6243
6244
6245
6354
7002
7005
7008
7010
7012
7013
7015
7016
7017
7018
7019
7020
7045
7046
7048
7049
7052
7054
7056
7064
7066
7076
7078
7083
7084
7085
7086
7087
7088
7089
7090
7099
7100
7101
7102
7103
7104
7105
7109
7124
7126
7136
7142
7143
7144
7145
7146
7147
7148
7150
7151
7152
7153
7154
7155
7156
7157
7047
7050
6253
6254
6255
6256
6257
6259
6260
6261
7170
7185
7186
6751
6755
6757
6759
6760
6763
6764
6765
6766
6767
6768
6769
6770
7502
7503
7504
7505
7510
7511
7512
7513
6806
6807
6808
6809
6810
6811
6812
6813
6815
6816
6817
6819
6823
6828
6831
6840
6845
6875
6879
6881
6888
6889
6947
6950
6956
6957
6959
10006
10008
10009
10010
10011
10016
10017
10018
10019
10020
10021
10022
10023
10024
10029
10030
10031
10032
10033
10034
10035
10036
10037
10038
10039
10040
10041
10042
10044
10045
10046
10051
10052
10053
10054
10055
10056
10057
10060
10066
10069
6820
6821
6822
13333
13334
13335
13336
13337
13338
13339
13340
13341
13351
13352
13353
13359
13361
13362
13363
13366
13367
13368
13369
13370
13371
13375
13376
5700
5702
13400
13401
13402
13403
13404
13406
13407
13408
13410
13417
13418
13419
13420
13422
13425
13427
13428
13429
13430
13431
13433
13434
13436
13437
13326
13330
13331
5717
13442
13451
13452
13455
13456
13457
13458
13459
13460
13461
13462
13463
13464
13465
13466
13467
13468
1104
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
13812
13813
6740
1490
1491
feed_deploy/news_jingpai/package/my_nets/tmp/slot/slot_common
0 → 100644
浏览文件 @
73334d88
6048
6002
6145
6202
6201
6121
6738
6119
6146
6120
6147
6122
6123
6118
6142
6143
6008
6148
6151
6127
6144
6094
6083
6952
6739
6150
6109
6003
6099
6149
6129
6203
6153
6152
6128
6106
6251
7082
7515
6951
6949
7080
6066
7507
6186
6007
7514
6125
7506
10001
6006
7023
6085
10000
6098
6250
6110
6124
6090
6082
6067
6101
6004
6191
7075
6948
6157
6126
6188
7077
6070
6111
6087
6103
6107
6194
6156
6005
6247
6814
6158
7122
6058
6189
7058
6059
6115
7079
7081
6833
7024
6108
13342
13345
13412
13343
13350
13346
13409
feed_deploy/news_jingpai/package/my_nets/tmp/slot/to.py
0 → 100644
浏览文件 @
73334d88
with
open
(
"session_slot"
,
"r"
)
as
fin
:
res
=
[]
for
i
in
fin
:
res
.
append
(
"
\"
"
+
i
.
strip
()
+
"
\"
"
)
print
", "
.
join
(
res
)
feed_deploy/news_jingpai/package/my_nets/trainer_online.py
0 → 100644
浏览文件 @
73334d88
import
numpy
as
np
import
os
import
sys
import
paddle
import
paddle.fluid
as
fluid
import
threading
import
time
import
config
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
from
paddle.fluid.incubate.fleet.utils.fleet_util
import
FleetUtil
from
paddle.fluid.incubate.fleet.utils.hdfs
import
HDFSClient
from
model_new
import
Model
from
model_new_jc
import
ModelJoinCommon
import
util
from
util
import
*
fleet_util
=
FleetUtil
()
def
time_prefix_str
():
return
"
\n
"
+
time
.
strftime
(
"%Y-%m-%d %H:%M:%S"
,
time
.
localtime
())
+
"[0]:"
def
create_model
(
slot_file
,
slot_common_file
,
all_slot_file
):
join_common_model
=
ModelJoinCommon
(
slot_file
,
slot_common_file
,
all_slot_file
,
20
)
update_model
=
Model
(
slot_file
,
all_slot_file
,
False
,
0
,
True
)
with
open
(
"join_common_main_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
join_common_model
.
_train_program
with
open
(
"join_common_startup_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
join_common_model
.
_startup_program
with
open
(
"update_main_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
update_model
.
_train_program
with
open
(
"update_startup_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
update_model
.
_startup_program
return
[
join_common_model
,
update_model
]
def
create_dataset
(
use_var_list
,
my_filelist
):
dataset
=
fluid
.
DatasetFactory
().
create_dataset
(
config
.
dataset_type
)
dataset
.
set_batch_size
(
config
.
batch_size
)
dataset
.
set_thread
(
config
.
thread_num
)
dataset
.
set_hdfs_config
(
config
.
fs_name
,
config
.
fs_ugi
)
dataset
.
set_pipe_command
(
config
.
pipe_command
)
dataset
.
set_filelist
(
my_filelist
)
dataset
.
set_use_var
(
use_var_list
)
#dataset.set_fleet_send_sleep_seconds(2)
#dataset.set_fleet_send_batch_size(80000)
return
dataset
def
hdfs_ls
(
path
):
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
hdfs_client
=
HDFSClient
(
"$HADOOP_HOME"
,
configs
)
filelist
=
[]
for
i
in
path
:
cur_path
=
hdfs_client
.
ls
(
i
)
if
config
.
fs_name
.
startswith
(
"hdfs:"
):
cur_path
=
[
"hdfs:"
+
j
for
j
in
cur_path
]
elif
config
.
fs_name
.
startswith
(
"afs:"
):
cur_path
=
[
"afs:"
+
j
for
j
in
cur_path
]
filelist
+=
cur_path
return
filelist
def
get_avg_cost_mins
(
value
):
t1
=
time
.
time
()
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
t2
=
time
.
time
()
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
)
t3
=
time
.
time
()
avg_cost
=
float
(
global_cost
[
0
])
/
fleet
.
worker_num
()
avg_cost
/=
60.0
t4
=
time
.
time
()
tc
=
(
t2
-
t1
+
t4
-
t3
)
/
60.0
tb
=
(
t3
-
t2
)
/
60.0
fleet_util
.
rank0_print
(
"get_avg_cost_mins calc time %s barrier time %s"
%
(
tc
,
tb
))
return
avg_cost
def
get_max_cost_mins
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MAX
)
fleet_util
.
rank0_print
(
"max train time %s mins"
%
(
float
(
global_cost
[
0
])
/
60.0
))
def
get_min_cost_mins
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MIN
)
fleet_util
.
rank0_print
(
"min train time %s mins"
%
(
float
(
global_cost
[
0
])
/
60.0
))
def
get_data_max
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MAX
)
fleet_util
.
rank0_print
(
"data size max %s"
%
global_cost
[
0
])
def
get_data_min
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MIN
)
fleet_util
.
rank0_print
(
"data size min %s"
%
global_cost
[
0
])
def
clear_metrics
(
fleet_util
,
model
,
scope
):
fleet_util
.
set_zero
(
model
.
stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
total
.
name
,
scope
,
param_type
=
"float32"
)
def
clear_metrics_2
(
fleet_util
,
model
,
scope
):
fleet_util
.
set_zero
(
model
.
join_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_total
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_total
.
name
,
scope
,
param_type
=
"float32"
)
def
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope_join
,
scope_common
,
scope_update
,
join_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
,
monitor_data
):
stdout_str
=
""
fleet_util
.
rank0_print
(
"begin save delta model"
)
begin
=
time
.
time
()
if
pass_index
==
-
1
:
fleet_util
.
save_xbox_base_model
(
config
.
output_path
,
day
)
else
:
fleet_util
.
save_delta_model
(
config
.
output_path
,
day
,
pass_index
)
end
=
time
.
time
()
fleet_util
.
save_paddle_params
(
exe
,
scope_join
,
join_model
.
_train_program
,
"paddle_dense.model.0"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
join_save_params
)
fleet_util
.
save_paddle_params
(
exe
,
scope_common
,
join_common_model
.
_train_program
,
"paddle_dense.model.1"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
common_save_params
)
fleet_util
.
save_paddle_params
(
exe
,
scope_update
,
update_model
.
_train_program
,
"paddle_dense.model.2"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
update_save_params
)
log_str
=
"end save delta cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
fleet_util
.
rank0_print
(
"begin save cache"
)
begin
=
time
.
time
()
if
pass_index
==
-
1
:
key_num
=
fleet_util
.
save_cache_base_model
(
config
.
output_path
,
day
)
else
:
key_num
=
fleet_util
.
save_cache_model
(
config
.
output_path
,
day
,
pass_index
)
fleet_util
.
write_cache_donefile
(
config
.
output_path
,
day
,
pass_index
,
key_num
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
log_str
=
"end save cache cost %s min, key_num=%s"
%
((
end
-
begin
)
/
60.0
,
key_num
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
write_xbox_donefile
(
day
,
pass_index
,
xbox_base_key
,
","
.
join
(
cur_path
),
monitor_data
=
monitor_data
)
return
stdout_str
if
__name__
==
"__main__"
:
place
=
fluid
.
CPUPlace
()
exe
=
fluid
.
Executor
(
place
)
fleet
.
init
(
exe
)
slot_file
=
"slot/slot"
slot_common_file
=
"slot/slot_common"
all_slot_file
=
"all_slot.dict"
join_common_model
,
update_model
=
create_model
(
slot_file
,
slot_common_file
,
all_slot_file
)
scope2
=
fluid
.
Scope
()
scope3
=
fluid
.
Scope
()
adjust_ins_weight
=
{
"need_adjust"
:
True
,
"nid_slot"
:
"6002"
,
"nid_adjw_threshold"
:
1000
,
"nid_adjw_ratio"
:
20
,
"ins_weight_slot"
:
update_model
.
ins_weight
.
name
}
thread_stat_var_names
=
[]
thread_stat_var_names
.
append
(
join_common_model
.
join_stat_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_stat_neg
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_sqrerr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_abserr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_prob
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_q
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_total
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_stat_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_stat_neg
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_sqrerr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_abserr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_prob
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_q
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_total
.
name
)
thread_stat_var_names
.
append
(
update_model
.
stat_pos
.
name
)
thread_stat_var_names
.
append
(
update_model
.
stat_neg
.
name
)
thread_stat_var_names
.
append
(
update_model
.
sqrerr
.
name
)
thread_stat_var_names
.
append
(
update_model
.
abserr
.
name
)
thread_stat_var_names
.
append
(
update_model
.
prob
.
name
)
thread_stat_var_names
.
append
(
update_model
.
q
.
name
)
thread_stat_var_names
.
append
(
update_model
.
pos
.
name
)
thread_stat_var_names
.
append
(
update_model
.
total
.
name
)
thread_stat_var_names
=
list
(
set
(
thread_stat_var_names
))
adam
=
fluid
.
optimizer
.
Adam
(
learning_rate
=
0.000005
)
adam
=
fleet
.
distributed_optimizer
(
adam
,
strategy
=
{
"use_cvm"
:
True
,
"adjust_ins_weight"
:
adjust_ins_weight
,
"scale_datanorm"
:
1e-4
,
"dump_slot"
:
True
,
"stat_var_names"
:
thread_stat_var_names
,
"fleet_desc_file"
:
"reqi_fleet_desc"
})
adam
.
minimize
([
join_common_model
.
joint_cost
,
update_model
.
avg_cost
],
[
scope2
,
scope3
])
join_common_model
.
_train_program
.
_fleet_opt
[
"program_configs"
][
str
(
id
(
join_common_model
.
joint_cost
.
block
.
program
))][
"push_sparse"
]
=
[]
join_save_params
=
[
"join.batch_size"
,
"join.batch_sum"
,
"join.batch_square_sum"
,
"join_0.w_0"
,
"join_0.b_0"
,
"join_1.w_0"
,
"join_1.b_0"
,
"join_2.w_0"
,
"join_2.b_0"
,
"join_3.w_0"
,
"join_3.b_0"
,
"join_4.w_0"
,
"join_4.b_0"
,
"join_5.w_0"
,
"join_5.b_0"
,
"join_6.w_0"
,
"join_6.b_0"
,
"join_7.w_0"
,
"join_7.b_0"
]
common_save_params
=
[
"common.batch_size"
,
"common.batch_sum"
,
"common.batch_square_sum"
,
"common_0.w_0"
,
"common_0.b_0"
,
"common_1.w_0"
,
"common_1.b_0"
,
"common_2.w_0"
,
"common_2.b_0"
,
"common_3.w_0"
,
"common_3.b_0"
,
"common_4.w_0"
,
"common_4.b_0"
,
"common_5.w_0"
,
"common_5.b_0"
,
"common_6.w_0"
,
"common_6.b_0"
,
"common_7.w_0"
,
"common_7.b_0"
]
update_save_params
=
[
"fc_0.w_0"
,
"fc_0.b_0"
,
"fc_1.w_0"
,
"fc_1.b_0"
,
"fc_2.w_0"
,
"fc_2.b_0"
,
"fc_3.w_0"
,
"fc_3.b_0"
,
"fc_4.w_0"
,
"fc_4.b_0"
,
"fc_5.w_0"
,
"fc_5.b_0"
]
if
fleet
.
is_server
():
fleet
.
run_server
()
elif
fleet
.
is_worker
():
with
fluid
.
scope_guard
(
scope3
):
exe
.
run
(
update_model
.
_startup_program
)
with
fluid
.
scope_guard
(
scope2
):
exe
.
run
(
join_common_model
.
_startup_program
)
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
hdfs_client
=
HDFSClient
(
"$HADOOP_HOME"
,
configs
)
save_first_base
=
config
.
save_first_base
path
=
config
.
train_data_path
online_pass_interval
=
fleet_util
.
get_online_pass_interval
(
config
.
days
,
config
.
hours
,
config
.
split_interval
,
config
.
split_per_pass
,
False
)
pass_per_day
=
len
(
online_pass_interval
)
last_day
,
last_pass
,
last_path
,
xbox_base_key
=
fleet_util
.
get_last_save_model
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
reqi
=
True
if
last_day
!=
-
1
else
False
if
config
.
need_reqi_changeslot
and
config
.
reqi_dnn_plugin_day
>=
last_day
and
config
.
reqi_dnn_plugin_pass
>=
last_pass
:
util
.
reqi_changeslot
(
config
.
hdfs_dnn_plugin_path
,
join_save_params
,
common_save_params
,
update_save_params
,
scope2
,
scope3
)
fleet
.
init_worker
()
dataset
=
None
next_dataset
=
None
cur_path
=
None
next_path
=
None
start_train
=
False
days
=
os
.
popen
(
"echo -n "
+
config
.
days
).
read
().
split
(
" "
)
hours
=
os
.
popen
(
"echo -n "
+
config
.
hours
).
read
().
split
(
" "
)
stdout_str
=
""
begin_days
=
{}
for
day_index
in
range
(
len
(
days
)):
day
=
days
[
day_index
]
if
last_day
!=
-
1
and
int
(
day
)
<
last_day
:
continue
for
pass_index
in
range
(
1
,
pass_per_day
+
1
):
dataset
=
next_dataset
next_dataset
=
None
cur_path
=
next_path
next_path
=
None
if
(
last_day
!=
-
1
and
int
(
day
)
==
last_day
)
and
(
last_pass
!=
-
1
and
int
(
pass_index
)
<
last_pass
):
continue
if
reqi
:
begin
=
time
.
time
()
log_str
=
"going to load model %s"
%
last_path
fleet_util
.
rank0_print
(
log_str
)
if
config
.
need_reqi_changeslot
and
config
.
reqi_dnn_plugin_day
>=
last_day
and
config
.
reqi_dnn_plugin_pass
>=
last_pass
:
fleet
.
load_one_table
(
0
,
last_path
)
else
:
fleet_util
.
load_fleet_model
(
last_path
)
end
=
time
.
time
()
log_str
=
"load model cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
reqi
=
False
if
(
last_day
!=
-
1
and
int
(
day
)
==
last_day
)
and
(
last_pass
!=
-
1
and
int
(
pass_index
)
==
last_pass
):
continue
#log_str = "===========going to train day/pass %s/%s===========" % (day, pass_index)
if
begin_days
.
get
(
day
)
is
None
:
log_str
=
"======== BEGIN DAY:%s ========"
%
day
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
begin_days
[
day
]
=
True
log_str
=
" ==== begin delta:%s ========"
%
pass_index
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
if
save_first_base
:
log_str
=
"save_first_base=True"
fleet_util
.
rank0_print
(
log_str
)
save_first_base
=
False
last_base_day
,
last_base_path
,
tmp_xbox_base_key
=
\
fleet_util
.
get_last_save_xbox_base
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
>
last_base_day
:
log_str
=
"going to save xbox base model"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
xbox_base_key
=
int
(
time
.
time
())
cur
=
[]
for
interval
in
online_pass_interval
[
pass_index
-
1
]:
for
p
in
path
:
cur
.
append
(
p
+
"/"
+
day
+
"/"
+
interval
)
stdout_str
+=
save_delta
(
day
,
-
1
,
xbox_base_key
,
cur
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
,
""
)
elif
int
(
day
)
==
last_base_day
:
xbox_base_key
=
tmp_xbox_base_key
log_str
=
"xbox base model exists"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
else
:
log_str
=
"xbox base model exists"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
start_train
=
True
train_begin
=
time
.
time
()
if
dataset
is
not
None
:
begin
=
time
.
time
()
dataset
.
wait_preload_done
()
end
=
time
.
time
()
log_str
=
"wait data preload done cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
if
dataset
is
None
:
cur_pass
=
online_pass_interval
[
pass_index
-
1
]
cur_path
=
[]
for
interval
in
cur_pass
:
for
p
in
path
:
cur_path
.
append
(
p
+
"/"
+
day
+
"/"
+
interval
)
log_str
=
"data path: "
+
","
.
join
(
cur_path
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
for
i
in
cur_path
:
while
not
hdfs_client
.
is_exist
(
i
+
"/to.hadoop.done"
):
fleet_util
.
rank0_print
(
"wait for data ready: %s"
%
i
)
time
.
sleep
(
config
.
check_exist_seconds
)
my_filelist
=
fleet
.
split_files
(
hdfs_ls
(
cur_path
))
dataset
=
create_dataset
(
join_common_model
.
_all_slots
,
my_filelist
)
fleet_util
.
rank0_print
(
"going to load into memory"
)
begin
=
time
.
time
()
dataset
.
load_into_memory
()
end
=
time
.
time
()
log_str
=
"load into memory done, cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
fleet_util
.
rank0_print
(
"going to global shuffle"
)
begin
=
time
.
time
()
dataset
.
global_shuffle
(
fleet
,
config
.
shuffle_thread
)
end
=
time
.
time
()
log_str
=
"global shuffle done, cost %s min, data size %s"
%
((
end
-
begin
)
/
60.0
,
dataset
.
get_shuffle_data_size
(
fleet
))
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
get_data_max
(
dataset
.
get_shuffle_data_size
())
get_data_min
(
dataset
.
get_shuffle_data_size
())
if
config
.
prefetch
and
(
pass_index
<
pass_per_day
or
pass_index
==
pass_per_day
and
day_index
<
len
(
days
)
-
1
):
if
pass_index
<
pass_per_day
:
next_pass
=
online_pass_interval
[
pass_index
]
next_day
=
day
else
:
next_pass
=
online_pass_interval
[
0
]
next_day
=
days
[
day_index
+
1
]
next_path
=
[]
for
interval
in
next_pass
:
for
p
in
path
:
next_path
.
append
(
p
+
"/"
+
next_day
+
"/"
+
interval
)
next_data_ready
=
True
for
i
in
next_path
:
if
not
hdfs_client
.
is_exist
(
i
+
"/to.hadoop.done"
):
next_data_ready
=
False
fleet_util
.
rank0_print
(
"next data not ready: %s"
%
i
)
if
not
next_data_ready
:
next_dataset
=
None
else
:
my_filelist
=
fleet
.
split_files
(
hdfs_ls
(
next_path
))
next_dataset
=
create_dataset
(
join_common_model
.
_all_slots
,
my_filelist
)
log_str
=
"next pass data preload %s "
%
","
.
join
(
next_path
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
next_dataset
.
preload_into_memory
(
config
.
preload_thread
)
join_cost
=
0
common_cost
=
0
update_cost
=
0
monitor_data
=
""
with
fluid
.
scope_guard
(
scope2
):
fleet_util
.
rank0_print
(
"Begin join + common pass"
)
begin
=
time
.
time
()
exe
.
train_from_dataset
(
join_common_model
.
_train_program
,
dataset
,
scope2
,
thread
=
config
.
join_common_thread
,
debug
=
False
)
end
=
time
.
time
()
avg_cost
=
get_avg_cost_mins
(
end
-
begin
)
fleet_util
.
rank0_print
(
"avg train time %s mins"
%
avg_cost
)
get_max_cost_mins
(
end
-
begin
)
get_min_cost_mins
(
end
-
begin
)
common_cost
=
avg_cost
monitor_data
=
""
log_str
=
print_global_metrics
(
scope2
,
join_common_model
.
join_stat_pos
.
name
,
join_common_model
.
join_stat_neg
.
name
,
join_common_model
.
join_sqrerr
.
name
,
join_common_model
.
join_abserr
.
name
,
join_common_model
.
join_prob
.
name
,
join_common_model
.
join_q
.
name
,
join_common_model
.
join_pos
.
name
,
join_common_model
.
join_total
.
name
,
"joining pass:"
)
#"join pass:")
monitor_data
+=
log_str
stdout_str
+=
time_prefix_str
()
+
"joining pass:"
stdout_str
+=
time_prefix_str
()
+
log_str
log_str
=
print_global_metrics
(
scope2
,
join_common_model
.
common_stat_pos
.
name
,
join_common_model
.
common_stat_neg
.
name
,
join_common_model
.
common_sqrerr
.
name
,
join_common_model
.
common_abserr
.
name
,
join_common_model
.
common_prob
.
name
,
join_common_model
.
common_q
.
name
,
join_common_model
.
common_pos
.
name
,
join_common_model
.
common_total
.
name
,
"common pass:"
)
monitor_data
+=
" "
+
log_str
stdout_str
+=
time_prefix_str
()
+
"common pass:"
stdout_str
+=
time_prefix_str
()
+
log_str
fleet_util
.
rank0_print
(
"End join+common pass"
)
clear_metrics_2
(
fleet_util
,
join_common_model
,
scope2
)
if
config
.
save_xbox_before_update
and
pass_index
%
config
.
save_delta_frequency
==
0
:
fleet_util
.
rank0_print
(
"going to save delta model"
)
last_xbox_day
,
last_xbox_pass
,
last_xbox_path
,
_
=
fleet_util
.
get_last_save_xbox
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
<
last_xbox_day
or
int
(
day
)
==
last_xbox_day
and
int
(
pass_index
)
<=
last_xbox_pass
:
log_str
=
"delta model exists"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
else
:
stdout_str
+=
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
,
monitor_data
)
with
fluid
.
scope_guard
(
scope3
):
fleet_util
.
rank0_print
(
"Begin update pass"
)
begin
=
time
.
time
()
exe
.
train_from_dataset
(
update_model
.
_train_program
,
dataset
,
scope3
,
thread
=
config
.
update_thread
,
debug
=
False
)
end
=
time
.
time
()
avg_cost
=
get_avg_cost_mins
(
end
-
begin
)
get_max_cost_mins
(
end
-
begin
)
get_min_cost_mins
(
end
-
begin
)
update_cost
=
avg_cost
log_str
=
print_global_metrics
(
scope3
,
update_model
.
stat_pos
.
name
,
update_model
.
stat_neg
.
name
,
update_model
.
sqrerr
.
name
,
update_model
.
abserr
.
name
,
update_model
.
prob
.
name
,
update_model
.
q
.
name
,
update_model
.
pos
.
name
,
update_model
.
total
.
name
,
"updating pass:"
)
#"update pass:")
stdout_str
+=
time_prefix_str
()
+
"updating pass:"
stdout_str
+=
time_prefix_str
()
+
log_str
fleet_util
.
rank0_print
(
"End update pass"
)
clear_metrics
(
fleet_util
,
update_model
,
scope3
)
begin
=
time
.
time
()
dataset
.
release_memory
()
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"release_memory cost %s min"
%
((
end
-
begin
)
/
60.0
))
if
(
pass_index
%
config
.
checkpoint_per_pass
)
==
0
and
pass_index
!=
pass_per_day
:
begin
=
time
.
time
()
fleet_util
.
save_model
(
config
.
output_path
,
day
,
pass_index
)
fleet_util
.
write_model_donefile
(
config
.
output_path
,
day
,
pass_index
,
xbox_base_key
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
log_str
=
"save model cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
if
not
config
.
save_xbox_before_update
and
pass_index
%
config
.
save_delta_frequency
==
0
:
fleet_util
.
rank0_print
(
"going to save delta model"
)
last_xbox_day
,
last_xbox_pass
,
last_xbox_path
,
_
=
fleet_util
.
get_last_save_xbox
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
<
last_xbox_day
or
int
(
day
)
==
last_xbox_day
and
int
(
pass_index
)
<=
last_xbox_pass
:
log_str
=
"delta model exists"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
else
:
stdout_str
+=
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
,
monitor_data
)
train_end
=
time
.
time
()
train_cost
=
(
train_end
-
train_begin
)
/
60.0
other_cost
=
train_cost
-
join_cost
-
common_cost
-
update_cost
log_str
=
"finished train day %s pass %s time cost:%s min job time cost"
\
":[join:%s min][join_common:%s min][update:%s min][other:%s min]"
\
%
(
day
,
pass_index
,
train_cost
,
join_cost
,
common_cost
,
update_cost
,
other_cost
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
if
pass_index
%
config
.
write_stdout_frequency
==
0
:
write_stdout
(
stdout_str
)
stdout_str
=
""
xbox_base_key
=
int
(
time
.
time
())
if
not
start_train
:
write_stdout
(
stdout_str
)
stdout_str
=
""
continue
fleet_util
.
rank0_print
(
"shrink table"
)
begin
=
time
.
time
()
fleet
.
shrink_sparse_table
()
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope2
,
table_id
=
1
)
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope2
,
table_id
=
2
)
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope3
,
table_id
=
3
)
end
=
time
.
time
()
log_str
=
"shrink table done, cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
fleet_util
.
rank0_print
(
"going to save batch model/base xbox model"
)
last_base_day
,
last_base_path
,
_
=
fleet_util
.
get_last_save_xbox_base
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
nextday
=
int
(
days
[
day_index
+
1
])
if
nextday
<=
last_base_day
:
log_str
=
"batch model/base xbox model exists"
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
else
:
stdout_str
+=
save_delta
(
nextday
,
-
1
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
,
monitor_data
)
begin
=
time
.
time
()
fleet_util
.
save_batch_model
(
config
.
output_path
,
nextday
)
fleet_util
.
write_model_donefile
(
config
.
output_path
,
nextday
,
-
1
,
xbox_base_key
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
log_str
=
"save batch model cost %s min"
%
((
end
-
begin
)
/
60.0
)
fleet_util
.
rank0_print
(
log_str
)
stdout_str
+=
time_prefix_str
()
+
log_str
write_stdout
(
stdout_str
)
stdout_str
=
""
feed_deploy/news_jingpai/package/my_nets/trainer_online_local.py
0 → 100644
浏览文件 @
73334d88
import
numpy
as
np
import
os
import
sys
import
paddle
import
paddle.fluid
as
fluid
import
threading
import
time
import
config
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
from
paddle.fluid.incubate.fleet.utils.fleet_util
import
FleetUtil
from
paddle.fluid.incubate.fleet.utils.hdfs
import
HDFSClient
from
model_new
import
Model
from
model_new_jc
import
ModelJoinCommon
fleet_util
=
FleetUtil
()
def
create_model
(
slot_file
,
slot_common_file
,
all_slot_file
):
join_common_model
=
ModelJoinCommon
(
slot_file
,
slot_common_file
,
all_slot_file
,
20
)
update_model
=
Model
(
slot_file
,
all_slot_file
,
False
,
0
,
True
)
with
open
(
"join_common_main_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
join_common_model
.
_train_program
with
open
(
"join_common_startup_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
join_common_model
.
_startup_program
with
open
(
"update_main_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
update_model
.
_train_program
with
open
(
"update_startup_program.pbtxt"
,
"w"
)
as
fout
:
print
>>
fout
,
update_model
.
_startup_program
return
[
join_common_model
,
update_model
]
def
create_dataset
(
use_var_list
,
my_filelist
):
dataset
=
fluid
.
DatasetFactory
().
create_dataset
(
config
.
dataset_type
)
dataset
.
set_batch_size
(
config
.
batch_size
)
dataset
.
set_thread
(
config
.
thread_num
)
dataset
.
set_hdfs_config
(
config
.
fs_name
,
config
.
fs_ugi
)
dataset
.
set_pipe_command
(
config
.
pipe_command
)
dataset
.
set_filelist
(
my_filelist
)
dataset
.
set_use_var
(
use_var_list
)
return
dataset
def
hdfs_ls
(
path
):
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
hdfs_client
=
HDFSClient
(
"$HADOOP_HOME"
,
configs
)
filelist
=
[]
for
i
in
path
:
cur_path
=
hdfs_client
.
ls
(
i
)
if
config
.
fs_name
.
startswith
(
"hdfs:"
):
cur_path
=
[
"hdfs:"
+
j
for
j
in
cur_path
]
elif
config
.
fs_name
.
startswith
(
"afs:"
):
cur_path
=
[
"afs:"
+
j
for
j
in
cur_path
]
filelist
+=
cur_path
return
filelist
def
get_avg_cost_mins
(
value
):
t1
=
time
.
time
()
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
t2
=
time
.
time
()
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
)
t3
=
time
.
time
()
avg_cost
=
float
(
global_cost
[
0
])
/
fleet
.
worker_num
()
avg_cost
/=
60.0
t4
=
time
.
time
()
tc
=
(
t2
-
t1
+
t4
-
t3
)
/
60.0
tb
=
(
t3
-
t2
)
/
60.0
fleet_util
.
rank0_print
(
"get_avg_cost_mins calc time %s barrier time %s"
%
(
tc
,
tb
))
return
avg_cost
def
get_max_cost_mins
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MAX
)
fleet_util
.
rank0_print
(
"max train time %s mins"
%
(
float
(
global_cost
[
0
])
/
60.0
))
def
get_min_cost_mins
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MIN
)
fleet_util
.
rank0_print
(
"min train time %s mins"
%
(
float
(
global_cost
[
0
])
/
60.0
))
def
get_data_max
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MAX
)
fleet_util
.
rank0_print
(
"data size max %s"
%
global_cost
[
0
])
def
get_data_min
(
value
):
from
mpi4py
import
MPI
local_cost
=
np
.
array
([
value
])
global_cost
=
np
.
copy
(
local_cost
)
*
0
fleet
.
_role_maker
.
_node_type_comm
.
Allreduce
(
local_cost
,
global_cost
,
op
=
MPI
.
MIN
)
fleet_util
.
rank0_print
(
"data size min %s"
%
global_cost
[
0
])
def
clear_metrics
(
fleet_util
,
model
,
scope
):
fleet_util
.
set_zero
(
model
.
stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
total
.
name
,
scope
,
param_type
=
"float32"
)
def
clear_metrics_2
(
fleet_util
,
model
,
scope
):
fleet_util
.
set_zero
(
model
.
join_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
join_abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
join_total
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_batch_stat_pos
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_batch_stat_neg
.
name
,
scope
)
fleet_util
.
set_zero
(
model
.
common_abserr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_sqrerr
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_prob
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_q
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_pos
.
name
,
scope
,
param_type
=
"float32"
)
fleet_util
.
set_zero
(
model
.
common_total
.
name
,
scope
,
param_type
=
"float32"
)
def
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope_join
,
scope_common
,
scope_update
,
join_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
):
fleet_util
.
rank0_print
(
"begin save delta model"
)
begin
=
time
.
time
()
if
pass_index
==
-
1
:
fleet_util
.
save_xbox_base_model
(
config
.
output_path
,
day
)
else
:
fleet_util
.
save_delta_model
(
config
.
output_path
,
day
,
pass_index
)
end
=
time
.
time
()
fleet_util
.
save_paddle_params
(
exe
,
scope_join
,
join_model
.
_train_program
,
"paddle_dense.model.0"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
join_save_params
)
fleet_util
.
save_paddle_params
(
exe
,
scope_common
,
join_common_model
.
_train_program
,
"paddle_dense.model.1"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
common_save_params
)
fleet_util
.
save_paddle_params
(
exe
,
scope_update
,
update_model
.
_train_program
,
"paddle_dense.model.2"
,
config
.
output_path
,
day
,
pass_index
,
config
.
fs_name
,
config
.
fs_ugi
,
var_names
=
update_save_params
)
fleet_util
.
rank0_print
(
"end save delta cost %s min"
%
((
end
-
begin
)
/
60.0
))
fleet_util
.
rank0_print
(
"begin save cache"
)
begin
=
time
.
time
()
if
pass_index
==
-
1
:
key_num
=
fleet_util
.
save_cache_base_model
(
config
.
output_path
,
day
)
else
:
key_num
=
fleet_util
.
save_cache_model
(
config
.
output_path
,
day
,
pass_index
)
fleet_util
.
write_cache_donefile
(
config
.
output_path
,
day
,
pass_index
,
key_num
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"end save cache cost %s min, key_num=%s"
%
((
end
-
begin
)
/
60.0
,
key_num
))
fleet_util
.
write_xbox_donefile
(
config
.
output_path
,
day
,
pass_index
,
xbox_base_key
,
","
.
join
(
cur_path
),
config
.
fs_name
,
config
.
fs_ugi
)
if
__name__
==
"__main__"
:
place
=
fluid
.
CPUPlace
()
exe
=
fluid
.
Executor
(
place
)
fleet
.
init
(
exe
)
slot_file
=
"slot/slot"
slot_common_file
=
"slot/slot_common"
all_slot_file
=
"all_slot.dict"
join_common_model
,
update_model
=
create_model
(
slot_file
,
slot_common_file
,
all_slot_file
)
scope2
=
fluid
.
Scope
()
scope3
=
fluid
.
Scope
()
adjust_ins_weight
=
{
"need_adjust"
:
True
,
"nid_slot"
:
"6002"
,
"nid_adjw_threshold"
:
1000
,
"nid_adjw_ratio"
:
20
,
"ins_weight_slot"
:
update_model
.
ins_weight
.
name
}
thread_stat_var_names
=
[]
thread_stat_var_names
.
append
(
join_common_model
.
join_stat_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_stat_neg
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_sqrerr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_abserr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_prob
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_q
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
join_total
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_stat_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_stat_neg
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_sqrerr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_abserr
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_prob
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_q
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_pos
.
name
)
thread_stat_var_names
.
append
(
join_common_model
.
common_total
.
name
)
thread_stat_var_names
.
append
(
update_model
.
stat_pos
.
name
)
thread_stat_var_names
.
append
(
update_model
.
stat_neg
.
name
)
thread_stat_var_names
.
append
(
update_model
.
sqrerr
.
name
)
thread_stat_var_names
.
append
(
update_model
.
abserr
.
name
)
thread_stat_var_names
.
append
(
update_model
.
prob
.
name
)
thread_stat_var_names
.
append
(
update_model
.
q
.
name
)
thread_stat_var_names
.
append
(
update_model
.
pos
.
name
)
thread_stat_var_names
.
append
(
update_model
.
total
.
name
)
thread_stat_var_names
=
list
(
set
(
thread_stat_var_names
))
adam
=
fluid
.
optimizer
.
Adam
(
learning_rate
=
0.000005
)
adam
=
fleet
.
distributed_optimizer
(
adam
,
strategy
=
{
"use_cvm"
:
True
,
"adjust_ins_weight"
:
adjust_ins_weight
,
"scale_datanorm"
:
1e-4
,
"dump_slot"
:
True
,
"stat_var_names"
:
thread_stat_var_names
,
"fleet_desc_file"
:
"fleet_desc_combinejoincommon.prototxt"
})
adam
.
minimize
([
join_common_model
.
joint_cost
,
update_model
.
avg_cost
],
[
scope2
,
scope3
])
join_common_model
.
_train_program
.
_fleet_opt
[
"program_configs"
][
str
(
id
(
join_common_model
.
joint_cost
.
block
.
program
))][
"push_sparse"
]
=
[]
join_save_params
=
[
"join.batch_size"
,
"join.batch_sum"
,
"join.batch_square_sum"
,
"join_0.w_0"
,
"join_0.b_0"
,
"join_1.w_0"
,
"join_1.b_0"
,
"join_2.w_0"
,
"join_2.b_0"
,
"join_3.w_0"
,
"join_3.b_0"
,
"join_4.w_0"
,
"join_4.b_0"
,
"join_5.w_0"
,
"join_5.b_0"
,
"join_6.w_0"
,
"join_6.b_0"
,
"join_7.w_0"
,
"join_7.b_0"
]
common_save_params
=
[
"common.batch_size"
,
"common.batch_sum"
,
"common.batch_square_sum"
,
"common_0.w_0"
,
"common_0.b_0"
,
"common_1.w_0"
,
"common_1.b_0"
,
"common_2.w_0"
,
"common_2.b_0"
,
"common_3.w_0"
,
"common_3.b_0"
,
"common_4.w_0"
,
"common_4.b_0"
,
"common_5.w_0"
,
"common_5.b_0"
,
"common_6.w_0"
,
"common_6.b_0"
,
"common_7.w_0"
,
"common_7.b_0"
]
update_save_params
=
[
"fc_0.w_0"
,
"fc_0.b_0"
,
"fc_1.w_0"
,
"fc_1.b_0"
,
"fc_2.w_0"
,
"fc_2.b_0"
,
"fc_3.w_0"
,
"fc_3.b_0"
,
"fc_4.w_0"
,
"fc_4.b_0"
,
"fc_5.w_0"
,
"fc_5.b_0"
]
if
fleet
.
is_server
():
fleet
.
run_server
()
elif
fleet
.
is_worker
():
with
fluid
.
scope_guard
(
scope3
):
exe
.
run
(
update_model
.
_startup_program
)
with
fluid
.
scope_guard
(
scope2
):
exe
.
run
(
join_common_model
.
_startup_program
)
fleet
.
init_worker
()
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
hdfs_client
=
HDFSClient
(
"$HADOOP_HOME"
,
configs
)
save_first_base
=
config
.
save_first_base
path
=
config
.
train_data_path
online_pass_interval
=
fleet_util
.
get_online_pass_interval
(
config
.
days
,
config
.
hours
,
config
.
split_interval
,
config
.
split_per_pass
,
False
)
pass_per_day
=
len
(
online_pass_interval
)
last_day
,
last_pass
,
last_path
,
xbox_base_key
=
fleet_util
.
get_last_save_model
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
reqi
=
True
if
last_day
!=
-
1
else
False
dataset
=
None
next_dataset
=
None
cur_path
=
None
next_path
=
None
start_train
=
False
days
=
os
.
popen
(
"echo -n "
+
config
.
days
).
read
().
split
(
" "
)
hours
=
os
.
popen
(
"echo -n "
+
config
.
hours
).
read
().
split
(
" "
)
for
day_index
in
range
(
len
(
days
)):
day
=
days
[
day_index
]
if
last_day
!=
-
1
and
int
(
day
)
<
last_day
:
continue
for
pass_index
in
range
(
1
,
pass_per_day
+
1
):
dataset
=
next_dataset
next_dataset
=
None
cur_path
=
next_path
next_path
=
None
if
(
last_day
!=
-
1
and
int
(
day
)
==
last_day
)
and
(
last_pass
!=
-
1
and
int
(
pass_index
)
<
last_pass
):
continue
if
reqi
:
begin
=
time
.
time
()
fleet_util
.
rank0_print
(
"going to load model %s"
%
last_path
)
# fleet_util.load_fleet_model(last_path)
# fleet.load_one_table(0, last_path)
# tmppath = "afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0921_72/new_model"
#"afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0920_108/new_model"
#"afs:/user/feed/mlarch/sequence_generator/wuzhihua02/xujiaqi/test_combinejoincommon_0915/new_model"
# fleet.load_one_table(1,tmppath)
# fleet.load_one_table(2,tmppath)
# fleet.load_one_table(3,tmppath)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"load model cost %s min"
%
((
end
-
begin
)
/
60.0
))
reqi
=
False
if
(
last_day
!=
-
1
and
int
(
day
)
==
last_day
)
and
(
last_pass
!=
-
1
and
int
(
pass_index
)
==
last_pass
):
continue
fleet_util
.
rank0_print
(
"===========going to train day/pass %s/%s==========="
%
(
day
,
pass_index
))
if
save_first_base
:
fleet_util
.
rank0_print
(
"save_first_base=True"
)
save_first_base
=
False
last_base_day
,
last_base_path
,
tmp_xbox_base_key
=
\
fleet_util
.
get_last_save_xbox_base
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
>
last_base_day
:
fleet_util
.
rank0_print
(
"going to save xbox base model"
)
xbox_base_key
=
int
(
time
.
time
())
cur
=
[]
for
interval
in
online_pass_interval
[
pass_index
-
1
]:
for
p
in
path
:
cur
.
append
(
p
+
"/"
+
day
+
"/"
+
interval
)
save_delta
(
day
,
-
1
,
xbox_base_key
,
cur
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
)
elif
int
(
day
)
==
last_base_day
:
xbox_base_key
=
tmp_xbox_base_key
fleet_util
.
rank0_print
(
"xbox base model exists"
)
else
:
fleet_util
.
rank0_print
(
"xbox base model exists"
)
start_train
=
True
train_begin
=
time
.
time
()
if
dataset
is
not
None
:
begin
=
time
.
time
()
dataset
.
wait_preload_done
()
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"wait data preload done cost %s min"
%
((
end
-
begin
)
/
60.0
))
if
dataset
is
None
:
cur_pass
=
online_pass_interval
[
pass_index
-
1
]
cur_path
=
[]
for
interval
in
cur_pass
:
for
p
in
path
:
cur_path
.
append
(
p
+
"/"
+
day
+
"/"
+
interval
)
fleet_util
.
rank0_print
(
"data path: "
+
","
.
join
(
cur_path
))
#for i in cur_path:
# while not hdfs_client.is_exist(i + "/to.hadoop.done"):
# fleet_util.rank0_print("wait for data ready: %s" % i)
# time.sleep(config.check_exist_seconds)
my_filelist
=
[
"part-00000_1"
]
#fleet.split_files(hdfs_ls(cur_path))
dataset
=
create_dataset
(
join_common_model
.
_all_slots
,
my_filelist
)
fleet_util
.
rank0_print
(
"going to load into memory"
)
begin
=
time
.
time
()
dataset
.
load_into_memory
()
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"load into memory done, cost %s min"
%
((
end
-
begin
)
/
60.0
))
if
config
.
prefetch
and
(
pass_index
<
pass_per_day
or
pass_index
==
pass_per_day
and
day_index
<
len
(
days
)
-
1
):
if
pass_index
<
pass_per_day
:
next_pass
=
online_pass_interval
[
pass_index
]
next_day
=
day
else
:
next_pass
=
online_pass_interval
[
0
]
next_day
=
days
[
day_index
+
1
]
next_path
=
[]
for
interval
in
next_pass
:
for
p
in
path
:
next_path
.
append
(
p
+
"/"
+
next_day
+
"/"
+
interval
)
next_data_ready
=
True
#for i in next_path:
# if not hdfs_client.is_exist(i + "/to.hadoop.done"):
# next_data_ready = False
# fleet_util.rank0_print("next data not ready: %s" % i)
if
not
next_data_ready
:
next_dataset
=
None
else
:
my_filelist
=
[
"part-00000_1"
]
#fleet.split_files(hdfs_ls(next_path))
next_dataset
=
create_dataset
(
join_common_model
.
_all_slots
,
my_filelist
)
fleet_util
.
rank0_print
(
"next pass data preload %s "
%
","
.
join
(
next_path
))
next_dataset
.
preload_into_memory
(
config
.
preload_thread
)
fleet_util
.
rank0_print
(
"going to global shuffle"
)
begin
=
time
.
time
()
dataset
.
global_shuffle
(
fleet
,
config
.
shuffle_thread
)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"global shuffle done, cost %s min, data size %s"
%
((
end
-
begin
)
/
60.0
,
dataset
.
get_shuffle_data_size
(
fleet
)))
get_data_max
(
dataset
.
get_shuffle_data_size
())
get_data_min
(
dataset
.
get_shuffle_data_size
())
join_cost
=
0
common_cost
=
0
update_cost
=
0
with
fluid
.
scope_guard
(
scope2
):
fleet_util
.
rank0_print
(
"Begin join + common pass"
)
begin
=
time
.
time
()
exe
.
train_from_dataset
(
join_common_model
.
_train_program
,
dataset
,
scope2
,
thread
=
config
.
join_common_thread
,
debug
=
False
)
end
=
time
.
time
()
avg_cost
=
get_avg_cost_mins
(
end
-
begin
)
fleet_util
.
rank0_print
(
"avg train time %s mins"
%
avg_cost
)
get_max_cost_mins
(
end
-
begin
)
get_min_cost_mins
(
end
-
begin
)
common_cost
=
avg_cost
fleet_util
.
print_global_metrics
(
scope2
,
join_common_model
.
join_stat_pos
.
name
,
join_common_model
.
join_stat_neg
.
name
,
join_common_model
.
join_sqrerr
.
name
,
join_common_model
.
join_abserr
.
name
,
join_common_model
.
join_prob
.
name
,
join_common_model
.
join_q
.
name
,
join_common_model
.
join_pos
.
name
,
join_common_model
.
join_total
.
name
,
"join pass:"
)
fleet_util
.
print_global_metrics
(
scope2
,
join_common_model
.
common_stat_pos
.
name
,
join_common_model
.
common_stat_neg
.
name
,
join_common_model
.
common_sqrerr
.
name
,
join_common_model
.
common_abserr
.
name
,
join_common_model
.
common_prob
.
name
,
join_common_model
.
common_q
.
name
,
join_common_model
.
common_pos
.
name
,
join_common_model
.
common_total
.
name
,
"common pass:"
)
fleet_util
.
rank0_print
(
"End join+common pass"
)
clear_metrics_2
(
fleet_util
,
join_common_model
,
scope2
)
if
config
.
save_xbox_before_update
and
pass_index
%
config
.
save_delta_frequency
==
0
:
fleet_util
.
rank0_print
(
"going to save delta model"
)
last_xbox_day
,
last_xbox_pass
,
last_xbox_path
,
_
=
fleet_util
.
get_last_save_xbox
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
<
last_xbox_day
or
int
(
day
)
==
last_xbox_day
and
int
(
pass_index
)
<=
last_xbox_pass
:
fleet_util
.
rank0_print
(
"delta model exists"
)
else
:
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
)
with
fluid
.
scope_guard
(
scope3
):
fleet_util
.
rank0_print
(
"Begin update pass"
)
begin
=
time
.
time
()
exe
.
train_from_dataset
(
update_model
.
_train_program
,
dataset
,
scope3
,
thread
=
config
.
update_thread
,
debug
=
False
)
end
=
time
.
time
()
avg_cost
=
get_avg_cost_mins
(
end
-
begin
)
update_cost
=
avg_cost
fleet_util
.
print_global_metrics
(
scope3
,
update_model
.
stat_pos
.
name
,
update_model
.
stat_neg
.
name
,
update_model
.
sqrerr
.
name
,
update_model
.
abserr
.
name
,
update_model
.
prob
.
name
,
update_model
.
q
.
name
,
update_model
.
pos
.
name
,
update_model
.
total
.
name
,
"update pass:"
)
fleet_util
.
rank0_print
(
"End update pass"
)
clear_metrics
(
fleet_util
,
update_model
,
scope3
)
begin
=
time
.
time
()
dataset
.
release_memory
()
end
=
time
.
time
()
print
pass_index
print
config
.
checkpoint_per_pass
if
(
pass_index
%
config
.
checkpoint_per_pass
)
==
0
and
pass_index
!=
pass_per_day
:
print
"save"
begin
=
time
.
time
()
fleet_util
.
save_model
(
config
.
output_path
,
day
,
pass_index
)
fleet_util
.
write_model_donefile
(
config
.
output_path
,
day
,
pass_index
,
xbox_base_key
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"save model cost %s min"
%
((
end
-
begin
)
/
60.0
))
if
not
config
.
save_xbox_before_update
and
pass_index
%
config
.
save_delta_frequency
==
0
:
fleet_util
.
rank0_print
(
"going to save delta model"
)
last_xbox_day
,
last_xbox_pass
,
last_xbox_path
,
_
=
fleet_util
.
get_last_save_xbox
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
if
int
(
day
)
<
last_xbox_day
or
int
(
day
)
==
last_xbox_day
and
int
(
pass_index
)
<=
last_xbox_pass
:
fleet_util
.
rank0_print
(
"delta model exists"
)
else
:
save_delta
(
day
,
pass_index
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
)
train_end
=
time
.
time
()
train_cost
=
(
train_end
-
train_begin
)
/
60.0
other_cost
=
train_cost
-
join_cost
-
common_cost
-
update_cost
fleet_util
.
rank0_print
(
\
"finished train day %s pass %s time cost:%s min job time cost"
":[join:%s min][join_common:%s min][update:%s min][other:%s min]"
\
%
(
day
,
pass_index
,
train_cost
,
join_cost
,
common_cost
,
update_cost
,
other_cost
))
xbox_base_key
=
int
(
time
.
time
())
if
not
start_train
:
continue
fleet_util
.
rank0_print
(
"shrink table"
)
begin
=
time
.
time
()
fleet
.
shrink_sparse_table
()
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope2
,
table_id
=
1
)
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope2
,
table_id
=
2
)
fleet
.
shrink_dense_table
(
0.98
,
scope
=
scope3
,
table_id
=
3
)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"shrink table done, cost %s min"
%
((
end
-
begin
)
/
60.0
))
fleet_util
.
rank0_print
(
"going to save batch model/base xbox model"
)
last_base_day
,
last_base_path
,
_
=
fleet_util
.
get_last_save_xbox_base
(
config
.
output_path
,
config
.
fs_name
,
config
.
fs_ugi
)
nextday
=
int
(
days
[
day_index
+
1
])
if
nextday
<=
last_base_day
:
fleet_util
.
rank0_print
(
"batch model/base xbox model exists"
)
else
:
save_delta
(
nextday
,
-
1
,
xbox_base_key
,
cur_path
,
exe
,
scope2
,
scope2
,
scope3
,
join_common_model
,
join_common_model
,
update_model
,
join_save_params
,
common_save_params
,
update_save_params
)
begin
=
time
.
time
()
fleet_util
.
save_batch_model
(
config
.
output_path
,
nextday
)
fleet_util
.
write_model_donefile
(
config
.
output_path
,
nextday
,
-
1
,
xbox_base_key
,
config
.
fs_name
,
config
.
fs_ugi
)
end
=
time
.
time
()
fleet_util
.
rank0_print
(
"save batch model cost %s min"
%
((
end
-
begin
)
/
60.0
))
feed_deploy/news_jingpai/package/my_nets/util.bak.py
0 → 100644
浏览文件 @
73334d88
import
paddle
import
paddle.fluid
as
fluid
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
import
os
import
numpy
as
np
import
config
def
jingpai_load_paddle_model
(
old_startup_program_bin
,
old_train_program_bin
,
old_model_path
,
old_slot_list
,
new_slot_list
,
model_all_vars
,
new_scope
,
modify_layer_names
):
place
=
fluid
.
CPUPlace
()
exe
=
fluid
.
Executor
(
place
)
old_scope
=
fluid
.
Scope
()
old_program
=
fluid
.
Program
()
old_program
=
old_program
.
parse_from_string
(
open
(
old_train_program_bin
,
"rb"
).
read
())
old_startup_program
=
fluid
.
Program
()
old_startup_program
=
old_startup_program
.
parse_from_string
(
open
(
old_startup_program_bin
,
"rb"
).
read
())
with
fluid
.
scope_guard
(
old_scope
):
exe
.
run
(
old_startup_program
)
variables
=
[
old_program
.
global_block
().
var
(
i
)
for
i
in
model_all_vars
]
if
os
.
path
.
isfile
(
old_model_path
):
path
=
os
.
path
.
dirname
(
old_model_path
)
path
=
"./"
if
path
==
""
else
path
filename
=
os
.
path
.
basename
(
old_model_path
)
fluid
.
io
.
load_vars
(
exe
,
path
,
old_program
,
vars
=
variables
,
filename
=
filename
)
else
:
fluid
.
io
.
load_vars
(
exe
,
old_model_path
,
old_program
,
vars
=
variables
)
old_pos
=
{}
idx
=
0
for
i
in
old_slot_list
:
old_pos
[
i
]
=
idx
idx
+=
1
for
i
in
modify_layer_names
:
if
old_scope
.
find_var
(
i
)
is
None
:
print
(
"%s not found in old scope, skip"
%
i
)
continue
elif
new_scope
.
find_var
(
i
)
is
None
:
print
(
"%s not found in new scope, skip"
%
i
)
continue
old_param
=
old_scope
.
var
(
i
).
get_tensor
()
old_param_array
=
np
.
array
(
old_param
).
astype
(
"float32"
)
old_shape
=
old_param_array
.
shape
#print i," old_shape ", old_shape
new_param
=
new_scope
.
var
(
i
).
get_tensor
()
new_param_array
=
np
.
array
(
new_param
).
astype
(
"float32"
)
new_shape
=
new_param_array
.
shape
#print i," new_shape ", new_shape
per_dim
=
len
(
new_param_array
)
/
len
(
new_slot_list
)
#print "len(new_param_array) ",len(new_param_array),\
# "len(new_slot_list) ", len(new_slot_list)," per_dim ", per_dim
idx
=
-
per_dim
for
s
in
new_slot_list
:
idx
+=
per_dim
if
old_pos
.
get
(
s
)
is
None
:
continue
for
j
in
range
(
0
,
per_dim
):
#print i," row/value ", idx + j, " copy from ", old_pos[s] * per_dim + j
# a row or a value
new_param_array
[
idx
+
j
]
=
old_param_array
[
old_pos
[
s
]
*
per_dim
+
j
]
new_param
.
set
(
new_param_array
,
place
)
for
i
in
model_all_vars
:
if
i
in
modify_layer_names
:
continue
old_param
=
old_scope
.
find_var
(
i
).
get_tensor
()
old_param_array
=
np
.
array
(
old_param
).
astype
(
"float32"
)
new_param
=
new_scope
.
find_var
(
i
).
get_tensor
()
new_param
.
set
(
old_param_array
,
place
)
def
reqi_changeslot
(
hdfs_dnn_plugin_path
,
join_save_params
,
common_save_params
,
update_save_params
,
scope2
,
scope3
):
if
fleet
.
worker_index
()
!=
0
:
return
print
(
"load paddle model %s"
%
hdfs_dnn_plugin_path
)
os
.
system
(
"rm -rf dnn_plugin/ ; hadoop fs -D hadoop.job.ugi=%s -D fs.default.name=%s -get %s ."
%
(
config
.
fs_ugi
,
config
.
fs_name
,
hdfs_dnn_plugin_path
))
new_join_slot
=
[]
for
line
in
open
(
"slot/slot"
,
'r'
):
slot
=
line
.
strip
()
new_join_slot
.
append
(
slot
)
old_join_slot
=
[]
for
line
in
open
(
"old_slot/slot"
,
'r'
):
slot
=
line
.
strip
()
old_join_slot
.
append
(
slot
)
new_common_slot
=
[]
for
line
in
open
(
"slot/slot_common"
,
'r'
):
slot
=
line
.
strip
()
new_common_slot
.
append
(
slot
)
old_common_slot
=
[]
for
line
in
open
(
"old_slot/slot_common"
,
'r'
):
slot
=
line
.
strip
()
old_common_slot
.
append
(
slot
)
jingpai_load_paddle_model
(
"old_program/old_join_common_startup_program.bin"
,
"old_program/old_join_common_train_program.bin"
,
"dnn_plugin/paddle_dense.model.0"
,
old_join_slot
,
new_join_slot
,
join_save_params
,
scope2
,
[
"join.batch_size"
,
"join.batch_sum"
,
"join.batch_square_sum"
,
"join_0.w_0"
])
jingpai_load_paddle_model
(
"old_program/old_join_common_startup_program.bin"
,
"old_program/old_join_common_train_program.bin"
,
"dnn_plugin/paddle_dense.model.1"
,
old_common_slot
,
new_common_slot
,
common_save_params
,
scope2
,
[
"common.batch_size"
,
"common.batch_sum"
,
"common.batch_square_sum"
,
"common_0.w_0"
])
jingpai_load_paddle_model
(
"old_program/old_update_startup_program.bin"
,
"old_program/old_update_main_program.bin"
,
"dnn_plugin/paddle_dense.model.2"
,
old_join_slot
,
new_join_slot
,
update_save_params
,
scope3
,
[
"fc_0.w_0"
])
feed_deploy/news_jingpai/package/my_nets/util.py
0 → 100644
浏览文件 @
73334d88
import
paddle
import
paddle.fluid
as
fluid
from
paddle.fluid.incubate.fleet.parameter_server.pslib
import
fleet
import
os
import
numpy
as
np
import
config
from
paddle.fluid.incubate.fleet.utils.fleet_util
import
FleetUtil
from
paddle.fluid.incubate.fleet.utils.hdfs
import
HDFSClient
import
collections
import
json
import
time
fleet_util
=
FleetUtil
()
def
print_global_metrics
(
scope
,
stat_pos_name
,
stat_neg_name
,
sqrerr_name
,
abserr_name
,
prob_name
,
q_name
,
pos_ins_num_name
,
total_ins_num_name
,
print_prefix
):
auc
,
bucket_error
,
mae
,
rmse
,
actual_ctr
,
predicted_ctr
,
copc
,
\
mean_predict_qvalue
,
total_ins_num
=
fleet_util
.
get_global_metrics
(
\
scope
,
stat_pos_name
,
stat_neg_name
,
sqrerr_name
,
abserr_name
,
\
prob_name
,
q_name
,
pos_ins_num_name
,
total_ins_num_name
)
log_str
=
"AUC=%.6f BUCKET_ERROR=%.6f MAE=%.6f "
\
"RMSE=%.6f Actural_CTR=%.6f Predicted_CTR=%.6f "
\
"COPC=%.6f MEAN Q_VALUE=%.6f Ins number=%s"
%
\
(
auc
,
bucket_error
,
mae
,
rmse
,
\
actual_ctr
,
predicted_ctr
,
copc
,
mean_predict_qvalue
,
\
total_ins_num
)
fleet_util
.
rank0_print
(
print_prefix
+
" "
+
log_str
)
return
print_prefix
+
" "
+
log_str
#print_prefix + "\n " + log_str
def
write_stdout
(
stdout_str
):
if
fleet
.
worker_index
()
!=
0
:
fleet
.
_role_maker
.
_barrier_worker
()
return
hadoop_home
=
"$HADOOP_HOME"
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
client
=
HDFSClient
(
hadoop_home
,
configs
)
out_dir
=
config
.
output_path
+
"/stdout/"
if
not
client
.
is_exist
(
out_dir
):
client
.
makedirs
(
out_dir
)
job_id_with_host
=
os
.
popen
(
"echo -n ${JOB_ID}"
).
read
().
strip
()
instance_id
=
os
.
popen
(
"echo -n ${INSTANCE_ID}"
).
read
().
strip
()
start_pos
=
instance_id
.
find
(
job_id_with_host
)
end_pos
=
instance_id
.
find
(
"--"
)
if
start_pos
!=
-
1
and
end_pos
!=
-
1
:
job_id_with_host
=
instance_id
[
start_pos
:
end_pos
]
file_path
=
out_dir
+
job_id_with_host
if
client
.
is_file
(
file_path
):
pre_content
=
client
.
cat
(
file_path
)
with
open
(
job_id_with_host
,
"w"
)
as
f
:
f
.
write
(
pre_content
+
"
\n
"
)
f
.
write
(
stdout_str
+
"
\n
"
)
client
.
delete
(
file_path
)
client
.
upload
(
out_dir
,
job_id_with_host
,
multi_processes
=
1
,
overwrite
=
False
)
else
:
with
open
(
job_id_with_host
,
"w"
)
as
f
:
f
.
write
(
stdout_str
+
"
\n
"
)
client
.
upload
(
out_dir
,
job_id_with_host
,
multi_processes
=
1
,
overwrite
=
False
)
fleet_util
.
rank0_info
(
"write %s succeed"
%
file_path
)
fleet
.
_role_maker
.
_barrier_worker
()
def
_get_xbox_str
(
day
,
model_path
,
xbox_base_key
,
data_path
,
monitor_data
,
mode
=
"patch"
):
xbox_dict
=
collections
.
OrderedDict
()
if
mode
==
"base"
:
xbox_dict
[
"id"
]
=
str
(
xbox_base_key
)
elif
mode
==
"patch"
:
xbox_dict
[
"id"
]
=
str
(
int
(
time
.
time
()))
else
:
print
(
"warning: unknown mode %s, set it to patch"
%
mode
)
mode
=
"patch"
xbox_dict
[
"id"
]
=
str
(
int
(
time
.
time
()))
xbox_dict
[
"key"
]
=
str
(
xbox_base_key
)
if
model_path
.
startswith
(
"hdfs:"
)
or
model_path
.
startswith
(
"afs:"
):
model_path
=
model_path
[
model_path
.
find
(
":"
)
+
1
:]
xbox_dict
[
"input"
]
=
config
.
fs_name
+
model_path
.
rstrip
(
"/"
)
+
"/000"
xbox_dict
[
"record_count"
]
=
"111111"
xbox_dict
[
"partition_type"
]
=
"2"
xbox_dict
[
"job_name"
]
=
"default_job_name"
xbox_dict
[
"ins_tag"
]
=
"feasign"
xbox_dict
[
"ins_path"
]
=
data_path
job_id_with_host
=
os
.
popen
(
"echo -n ${JOB_ID}"
).
read
().
strip
()
instance_id
=
os
.
popen
(
"echo -n ${INSTANCE_ID}"
).
read
().
strip
()
start_pos
=
instance_id
.
find
(
job_id_with_host
)
end_pos
=
instance_id
.
find
(
"--"
)
if
start_pos
!=
-
1
and
end_pos
!=
-
1
:
job_id_with_host
=
instance_id
[
start_pos
:
end_pos
]
xbox_dict
[
"job_id"
]
=
job_id_with_host
xbox_dict
[
"monitor_data"
]
=
monitor_data
xbox_dict
[
"monitor_path"
]
=
config
.
output_path
.
rstrip
(
"/"
)
+
"/monitor/"
\
+
day
+
".txt"
xbox_dict
[
"mpi_size"
]
=
str
(
fleet
.
worker_num
())
return
json
.
dumps
(
xbox_dict
)
def
write_xbox_donefile
(
day
,
pass_id
,
xbox_base_key
,
data_path
,
donefile_name
=
None
,
monitor_data
=
""
):
if
fleet
.
worker_index
()
!=
0
:
fleet
.
_role_maker
.
_barrier_worker
()
return
day
=
str
(
day
)
pass_id
=
str
(
pass_id
)
xbox_base_key
=
int
(
xbox_base_key
)
mode
=
None
if
pass_id
!=
"-1"
:
mode
=
"patch"
suffix_name
=
"/%s/delta-%s/"
%
(
day
,
pass_id
)
model_path
=
config
.
output_path
.
rstrip
(
"/"
)
+
suffix_name
if
donefile_name
is
None
:
donefile_name
=
"xbox_patch_done.txt"
else
:
mode
=
"base"
suffix_name
=
"/%s/base/"
%
day
model_path
=
config
.
output_path
.
rstrip
(
"/"
)
+
suffix_name
if
donefile_name
is
None
:
donefile_name
=
"xbox_base_done.txt"
if
isinstance
(
data_path
,
list
):
data_path
=
","
.
join
(
data_path
)
if
fleet
.
worker_index
()
==
0
:
donefile_path
=
config
.
output_path
+
"/"
+
donefile_name
xbox_str
=
_get_xbox_str
(
day
,
model_path
,
xbox_base_key
,
data_path
,
monitor_data
,
mode
)
configs
=
{
"fs.default.name"
:
config
.
fs_name
,
"hadoop.job.ugi"
:
config
.
fs_ugi
}
client
=
HDFSClient
(
"$HADOOP_HOME"
,
configs
)
if
client
.
is_file
(
donefile_path
):
pre_content
=
client
.
cat
(
donefile_path
)
last_dict
=
json
.
loads
(
pre_content
.
split
(
"
\n
"
)[
-
1
])
last_day
=
last_dict
[
"input"
].
split
(
"/"
)[
-
3
]
last_pass
=
last_dict
[
"input"
].
split
(
"/"
)[
-
2
].
split
(
"-"
)[
-
1
]
exist
=
False
if
int
(
day
)
<
int
(
last_day
)
or
\
int
(
day
)
==
int
(
last_day
)
and
\
int
(
pass_id
)
<=
int
(
last_pass
):
exist
=
True
if
not
exist
:
with
open
(
donefile_name
,
"w"
)
as
f
:
f
.
write
(
pre_content
+
"
\n
"
)
f
.
write
(
xbox_str
+
"
\n
"
)
client
.
delete
(
donefile_path
)
client
.
upload
(
config
.
output_path
,
donefile_name
,
multi_processes
=
1
,
overwrite
=
False
)
fleet_util
.
rank0_info
(
"write %s/%s %s succeed"
%
\
(
day
,
pass_id
,
donefile_name
))
else
:
fleet_util
.
rank0_error
(
"not write %s because %s/%s already "
"exists"
%
(
donefile_name
,
day
,
pass_id
))
else
:
with
open
(
donefile_name
,
"w"
)
as
f
:
f
.
write
(
xbox_str
+
"
\n
"
)
client
.
upload
(
config
.
output_path
,
donefile_name
,
multi_processes
=
1
,
overwrite
=
False
)
fleet_util
.
rank0_error
(
"write %s/%s %s succeed"
%
\
(
day
,
pass_id
,
donefile_name
))
fleet
.
_role_maker
.
_barrier_worker
()
def
jingpai_load_paddle_model
(
old_startup_program_bin
,
old_train_program_bin
,
old_model_path
,
old_slot_list
,
new_slot_list
,
model_all_vars
,
new_scope
,
modify_layer_names
):
place
=
fluid
.
CPUPlace
()
exe
=
fluid
.
Executor
(
place
)
old_scope
=
fluid
.
Scope
()
old_program
=
fluid
.
Program
()
old_program
=
old_program
.
parse_from_string
(
open
(
old_train_program_bin
,
"rb"
).
read
())
old_startup_program
=
fluid
.
Program
()
old_startup_program
=
old_startup_program
.
parse_from_string
(
open
(
old_startup_program_bin
,
"rb"
).
read
())
with
fluid
.
scope_guard
(
old_scope
):
exe
.
run
(
old_startup_program
)
variables
=
[
old_program
.
global_block
().
var
(
i
)
for
i
in
model_all_vars
]
if
os
.
path
.
isfile
(
old_model_path
):
path
=
os
.
path
.
dirname
(
old_model_path
)
path
=
"./"
if
path
==
""
else
path
filename
=
os
.
path
.
basename
(
old_model_path
)
fluid
.
io
.
load_vars
(
exe
,
path
,
old_program
,
vars
=
variables
,
filename
=
filename
)
else
:
fluid
.
io
.
load_vars
(
exe
,
old_model_path
,
old_program
,
vars
=
variables
)
old_pos
=
{}
idx
=
0
for
i
in
old_slot_list
:
old_pos
[
i
]
=
idx
idx
+=
1
for
i
in
modify_layer_names
:
if
old_scope
.
find_var
(
i
)
is
None
:
print
(
"%s not found in old scope, skip"
%
i
)
continue
elif
new_scope
.
find_var
(
i
)
is
None
:
print
(
"%s not found in new scope, skip"
%
i
)
continue
old_param
=
old_scope
.
var
(
i
).
get_tensor
()
old_param_array
=
np
.
array
(
old_param
).
astype
(
"float32"
)
old_shape
=
old_param_array
.
shape
#print i," old_shape ", old_shape
new_param
=
new_scope
.
var
(
i
).
get_tensor
()
new_param_array
=
np
.
array
(
new_param
).
astype
(
"float32"
)
new_shape
=
new_param_array
.
shape
#print i," new_shape ", new_shape
per_dim
=
len
(
new_param_array
)
/
len
(
new_slot_list
)
#print "len(new_param_array) ",len(new_param_array),\
# "len(new_slot_list) ", len(new_slot_list)," per_dim ", per_dim
idx
=
-
per_dim
for
s
in
new_slot_list
:
idx
+=
per_dim
if
old_pos
.
get
(
s
)
is
None
:
continue
for
j
in
range
(
0
,
per_dim
):
#print i," row/value ", idx + j, " copy from ", old_pos[s] * per_dim + j
# a row or a value
new_param_array
[
idx
+
j
]
=
old_param_array
[
old_pos
[
s
]
*
per_dim
+
j
]
new_param
.
set
(
new_param_array
,
place
)
for
i
in
model_all_vars
:
if
i
in
modify_layer_names
:
continue
old_param
=
old_scope
.
find_var
(
i
).
get_tensor
()
old_param_array
=
np
.
array
(
old_param
).
astype
(
"float32"
)
new_param
=
new_scope
.
find_var
(
i
).
get_tensor
()
new_param
.
set
(
old_param_array
,
place
)
def
reqi_changeslot
(
hdfs_dnn_plugin_path
,
join_save_params
,
common_save_params
,
update_save_params
,
scope2
,
scope3
):
if
fleet
.
worker_index
()
!=
0
:
return
print
(
"load paddle model %s"
%
hdfs_dnn_plugin_path
)
os
.
system
(
"rm -rf dnn_plugin/ ; hadoop fs -D hadoop.job.ugi=%s -D fs.default.name=%s -get %s ."
%
(
config
.
fs_ugi
,
config
.
fs_name
,
hdfs_dnn_plugin_path
))
new_join_slot
=
[]
for
line
in
open
(
"slot/slot"
,
'r'
):
slot
=
line
.
strip
()
new_join_slot
.
append
(
slot
)
old_join_slot
=
[]
for
line
in
open
(
"old_slot/slot"
,
'r'
):
slot
=
line
.
strip
()
old_join_slot
.
append
(
slot
)
new_common_slot
=
[]
for
line
in
open
(
"slot/slot_common"
,
'r'
):
slot
=
line
.
strip
()
new_common_slot
.
append
(
slot
)
old_common_slot
=
[]
for
line
in
open
(
"old_slot/slot_common"
,
'r'
):
slot
=
line
.
strip
()
old_common_slot
.
append
(
slot
)
jingpai_load_paddle_model
(
"old_program/old_join_common_startup_program.bin"
,
"old_program/old_join_common_train_program.bin"
,
"dnn_plugin/paddle_dense.model.0"
,
old_join_slot
,
new_join_slot
,
join_save_params
,
scope2
,
[
"join.batch_size"
,
"join.batch_sum"
,
"join.batch_square_sum"
,
"join_0.w_0"
])
jingpai_load_paddle_model
(
"old_program/old_join_common_startup_program.bin"
,
"old_program/old_join_common_train_program.bin"
,
"dnn_plugin/paddle_dense.model.1"
,
old_common_slot
,
new_common_slot
,
common_save_params
,
scope2
,
[
"common.batch_size"
,
"common.batch_sum"
,
"common.batch_square_sum"
,
"common_0.w_0"
])
jingpai_load_paddle_model
(
"old_program/old_update_startup_program.bin"
,
"old_program/old_update_main_program.bin"
,
"dnn_plugin/paddle_dense.model.2"
,
old_join_slot
,
new_join_slot
,
update_save_params
,
scope3
,
[
"fc_0.w_0"
])
feed_deploy/news_jingpai/qsub_f.conf
0 → 100644
浏览文件 @
73334d88
SERVER
=
yq01
-
hpc
-
lvliang01
-
smart
-
master
.
dmop
.
baidu
.
com
QUEUE
=
feed5
PRIORITY
=
very_high
USE_FLAGS_ADVRES
=
yes
feed_deploy/news_jingpai/run.sh
0 → 100755
浏览文件 @
73334d88
#!/bin/sh
source
~/.bashrc
# Author: Wu.ZG
# Created Time : 2017-08-14 21:31:56
# File Name: guard.sh
# Version:
# Description:
# Last modified: 2018-01-29 11:00:42
set
-x
SLEEP
=
10
HOST
=
`
hostname
`
WORKROOT
=
${
PWD
}
RUN_SCRIPT
=
"
${
WORKROOT
}
/submit.sh"
#ALARM="./alarm.sh"
on_duty
=(
# RD
# OP
# QA
15101120768
)
function
alarm
()
{
content
=
$1
for
phone_num
in
${
on_duty
[@]
}
;
do
echo
${
phone_num
}
${
content
}
gsmsend
-s
emp01.baidu.com:15001
"
${
phone_num
}
"
@
"
$1
"
done
echo
"
$1
"
| mail
-s
"
$1
"
$email
}
pid
=
$$
echo
${
pid
}
>
pid
if
[
!
-d
"./log"
]
;
then
mkdir
log
fi
while
[
1
]
do
sh
${
RUN_SCRIPT
}
>
log/
"
`
date
+
"%Y%m%d_%H%M%S"
`
"
.log
RET
=
$?
#source ${ALARM}
if
[
${
RET
}
-ne
0
]
;
then
content
=
"
`
date
+
"%Y%m%d %H:%M:%S "
`
Job fail. Exit
${
RET
}
. From
${
HOST
}
:
${
WORKROOT
}
. Pid=
${
pid
}
"
echo
"
${
content
}
"
alarm
"
${
content
}
"
else
content
=
"
`
date
+
"%Y%m%d %H:%M:%S "
`
Job finish. From
${
HOST
}
:
${
WORKROOT
}
. Pid=
${
pid
}
"
echo
"
${
content
}
"
alarm
"
${
content
}
"
break
fi
sleep
${
SLEEP
}
done
echo
"
`
date
+
"%Y%m%d %H:%M:%S "
`
guard exit."
feed_deploy/news_jingpai/submit.sh
0 → 100755
浏览文件 @
73334d88
#!/bin/bash
source
./package/my_nets/config.py
rm
-r
tmp/
*
mkdir
tmp
cd
tmp
mkdir
./package
cp
-r
../package/python ./package
cp
-r
../package/my_nets/
*
./package
cp
../qsub_f.conf ./
cp
../job.sh ./
cp
../job.sh ./package
if
[
"a
${
sparse_table_storage
}
"
=
"assd"
]
;
then
sed
-i
's/DownpourSparseTable/DownpourSparseSSDTable'
./package/my_nets/reqi_fleet_desc
fi
current
=
`
date
"+%Y-%m-%d %H:%M:%S"
`
timeStamp
=
`
date
-d
"
$current
"
+%s
`
output_path
=
${
output_path
#*
:
}
hdfs_output
=
${
output_path
}
/
$timeStamp
export
HADOOP_HOME
=
"
${
local_hadoop_home
}
"
MPI_NODE_MEM
=
${
node_memory
}
echo
"SERVER=
${
mpi_server
}
"
>
qsub_f.conf
echo
"QUEUE=
${
mpi_queue
}
"
>>
qsub_f.conf
echo
"PRIORITY=
${
mpi_priority
}
"
>>
qsub_f.conf
echo
"USE_FLAGS_ADVRES=yes"
>>
qsub_f.conf
if
[
"a
${
sparse_table_storage
}
"
=
"assd"
]
;
then
${
smart_client_home
}
/bin/qsub_f
\
-N
$task_name
\
--conf
./qsub_f.conf
\
--hdfs
$fs_name
\
--ugi
$fs_ugi
\
--hout
$hdfs_output
\
--am-type
smart_am
\
--files
./package
\
--workspace
/home/ssd1/normandy/maybach
\
-l
nodes
=
$nodes
,walltime
=
1000:00:00,pmem-hard
=
$MPI_NODE_MEM
,pcpu-soft
=
280,pnetin-soft
=
1000,pnetout-soft
=
1000 ./job.sh
else
${
smart_client_home
}
/bin/qsub_f
\
-N
$task_name
\
--conf
./qsub_f.conf
\
--hdfs
$fs_name
\
--ugi
$fs_ugi
\
--hout
$hdfs_output
\
--am-type
smart_am
\
--files
./package
\
-l
nodes
=
$nodes
,walltime
=
1000:00:00,pmem-hard
=
$MPI_NODE_MEM
,pcpu-soft
=
280,pnetin-soft
=
1000,pnetout-soft
=
1000 ./job.sh
fi
paddle/fluid/feed/CMakeLists.txt
浏览文件 @
73334d88
add_subdirectory
(
src
)
add_subdirectory
(
src
)
add_subdirectory
(
pybind
)
add_subdirectory
(
pybind
)
add_subdirectory
(
tool
)
paddle/fluid/feed/tool/CMakeLists.txt
0 → 100755
浏览文件 @
73334d88
add_executable
(
parse_feasign parse_feasign.cpp
)
paddle/fluid/feed/tool/parse_feasign.cpp
0 → 100644
浏览文件 @
73334d88
#include <stdlib.h>
#include <stdio.h>
#include <unordered_map>
#include <fstream>
#include <iostream>
#include <vector>
using
namespace
std
;
// 将extractor产出的feasign 转 paddle instance
int
main
(
int
argc
,
char
*
argv
[])
{
ifstream
fin
(
argv
[
1
]);
int
slot_idx
=
0
;
unordered_map
<
int
,
int
>
slot_map
;
int
slot
=
0
;
while
(
fin
>>
slot
)
{
slot_map
[
slot
]
=
slot_idx
++
;
}
int
slot_num
=
slot_map
.
size
();
int
max_feasign_num
=
10000
;
vector
<
vector
<
unsigned
long
>
>
slots
;
for
(
int
i
=
0
;
i
<
slot_num
;
++
i
)
{
vector
<
unsigned
long
>
tmp
;
tmp
.
reserve
(
max_feasign_num
);
slots
.
push_back
(
tmp
);
}
char
*
linebuf
=
(
char
*
)
calloc
(
1024
*
1024
*
40
,
sizeof
(
char
));
if
(
NULL
==
linebuf
)
{
fprintf
(
stderr
,
"memory not enough, exit
\n
"
);
exit
(
-
1
);
}
int
click
=
0
;
int
show
=
0
;
unsigned
long
feasign
=
0
;
int
i
=
0
;
while
(
fgets
(
linebuf
,
1024
*
1024
*
40
,
stdin
))
{
char
*
head_ptr
=
linebuf
;
for
(
i
=
0
;
*
(
head_ptr
+
i
)
!=
' '
;
++
i
)
;
head_ptr
+=
i
+
1
;
show
=
strtoul
(
head_ptr
,
&
head_ptr
,
10
);
click
=
strtoul
(
head_ptr
,
&
head_ptr
,
10
);
int
feasign_num
=
0
;
while
(
head_ptr
!=
NULL
)
{
feasign
=
strtoul
(
head_ptr
,
&
head_ptr
,
10
);
if
(
head_ptr
!=
NULL
&&
*
head_ptr
==
':'
)
{
head_ptr
++
;
slot
=
strtoul
(
head_ptr
,
&
head_ptr
,
10
);
feasign_num
++
;
if
(
slot_map
.
find
(
slot
)
==
slot_map
.
end
())
{
continue
;
}
slots
[
slot_map
[
slot
]].
push_back
(
feasign
);
}
else
{
break
;
}
}
int
tag
=
0
;
float
weight
=
1
;
bool
has_tag
=
false
;
bool
has_weight
=
false
;
for
(
int
j
=
0
;
*
(
head_ptr
+
j
)
!=
'\0'
;
++
j
)
{
if
(
*
(
head_ptr
+
j
)
==
'$'
)
{
has_tag
=
true
;
}
else
if
(
*
(
head_ptr
+
j
)
==
'*'
)
{
has_weight
=
true
;
}
}
if
(
has_tag
)
{
for
(
i
=
0
;
*
(
head_ptr
+
i
)
!=
'\0'
&&
*
(
head_ptr
+
i
)
!=
'$'
;
++
i
)
;
if
(
head_ptr
+
i
!=
'\0'
)
{
head_ptr
+=
i
+
1
;
if
(
*
head_ptr
==
'D'
)
{
tag
=
0
;
head_ptr
+=
1
;
}
else
{
tag
=
strtoul
(
head_ptr
,
&
head_ptr
,
10
);
}
}
}
if
(
has_weight
)
{
for
(
i
=
0
;
*
(
head_ptr
+
i
)
!=
'\0'
&&
*
(
head_ptr
+
i
)
!=
'*'
;
++
i
)
;
if
(
head_ptr
+
i
!=
'\0'
)
{
head_ptr
+=
i
+
1
;
weight
=
strtod
(
head_ptr
,
&
head_ptr
);
}
}
fprintf
(
stdout
,
"1 %d 1 %d"
,
show
,
click
);
for
(
size_t
i
=
0
;
i
<
slots
.
size
()
-
2
;
++
i
)
{
if
(
slots
[
i
].
size
()
==
0
)
{
fprintf
(
stdout
,
" 1 0"
);
}
else
{
fprintf
(
stdout
,
" %lu"
,
slots
[
i
].
size
());
for
(
size_t
j
=
0
;
j
<
slots
[
i
].
size
();
++
j
)
{
fprintf
(
stdout
,
" %lu"
,
slots
[
i
][
j
]);
}
}
slots
[
i
].
clear
();
slots
[
i
].
reserve
(
max_feasign_num
);
}
if
(
weight
==
1.0
)
{
fprintf
(
stdout
,
" 1 %d 1 %d
\n
"
,
int
(
weight
),
tag
);
}
else
{
fprintf
(
stdout
,
" 1 %f 1 %d
\n
"
,
weight
,
tag
);
}
}
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录