Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
PaddleRec
提交
a09255fb
P
PaddleRec
项目概览
BaiXuePrincess
/
PaddleRec
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleRec
通知
1
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleRec
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
a09255fb
编写于
5月 28, 2020
作者:
X
xjqbest
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix
上级
37a77dcd
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
67 addition
and
41 deletion
+67
-41
core/model.py
core/model.py
+22
-10
core/trainers/single_infer.py
core/trainers/single_infer.py
+12
-10
core/trainers/single_trainer.py
core/trainers/single_trainer.py
+20
-17
core/utils/dataloader_instance.py
core/utils/dataloader_instance.py
+12
-4
models/rank/dnn/config.yaml
models/rank/dnn/config.yaml
+1
-0
未找到文件。
core/model.py
浏览文件 @
a09255fb
...
...
@@ -59,11 +59,17 @@ class Model(object):
dataset
=
i
break
name
=
"dataset."
+
dataset
[
"name"
]
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
if
sparse_slots
is
not
None
or
dense_slots
is
not
None
:
sparse_slots
=
sparse_slots
.
strip
().
split
(
" "
)
dense_slots
=
dense_slots
.
strip
().
split
(
" "
)
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
()
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
()
if
sparse_slots
!=
""
or
dense_slots
!=
""
:
if
sparse_slots
==
""
:
sparse_slots
=
[]
else
:
sparse_slots
=
sparse_slots
.
strip
().
split
(
" "
)
if
dense_slots
==
""
:
dense_slots
=
[]
else
:
dense_slots
=
dense_slots
.
strip
().
split
(
" "
)
dense_slots_shape
=
[[
int
(
j
)
for
j
in
i
.
split
(
":"
)[
1
].
strip
(
"[]"
).
split
(
","
)
]
for
i
in
dense_slots
]
...
...
@@ -151,11 +157,17 @@ class Model(object):
def
input_data
(
self
,
is_infer
=
False
,
**
kwargs
):
name
=
"dataset."
+
kwargs
.
get
(
"dataset_name"
)
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
if
sparse_slots
is
not
None
or
dense_slots
is
not
None
:
sparse_slots
=
sparse_slots
.
strip
().
split
(
" "
)
dense_slots
=
dense_slots
.
strip
().
split
(
" "
)
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
()
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
()
if
sparse_slots
!=
""
or
dense_slots
!=
""
:
if
sparse_slots
==
""
:
sparse_slots
=
[]
else
:
sparse_slots
=
sparse_slots
.
strip
().
split
(
" "
)
if
dense_slots
==
""
:
dense_slots
=
[]
else
:
dense_slots
=
dense_slots
.
strip
().
split
(
" "
)
dense_slots_shape
=
[[
int
(
j
)
for
j
in
i
.
split
(
":"
)[
1
].
strip
(
"[]"
).
split
(
","
)
]
for
i
in
dense_slots
]
...
...
core/trainers/single_infer.py
浏览文件 @
a09255fb
...
...
@@ -67,15 +67,14 @@ class SingleInfer(TranspileTrainer):
def
_get_dataset
(
self
,
dataset_name
):
name
=
"dataset."
+
dataset_name
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
thread_num
=
envs
.
get_global_env
(
name
+
"thread_num"
)
batch_size
=
envs
.
get_global_env
(
name
+
"batch_size"
)
reader_class
=
envs
.
get_global_env
(
name
+
"data_converter"
)
abs_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
reader
=
os
.
path
.
join
(
abs_dir
,
'../utils'
,
'dataset_instance.py'
)
if
sparse_slots
is
None
and
dense_slots
is
None
:
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
()
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
()
if
sparse_slots
==
""
and
dense_slots
==
""
:
pipe_cmd
=
"python {} {} {} {}"
.
format
(
reader
,
reader_class
,
"TRAIN"
,
self
.
_config_yaml
)
else
:
...
...
@@ -107,13 +106,13 @@ class SingleInfer(TranspileTrainer):
def
_get_dataloader
(
self
,
dataset_name
,
dataloader
):
name
=
"dataset."
+
dataset_name
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
thread_num
=
envs
.
get_global_env
(
name
+
"thread_num"
)
batch_size
=
envs
.
get_global_env
(
name
+
"batch_size"
)
reader_class
=
envs
.
get_global_env
(
name
+
"data_converter"
)
abs_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
if
sparse_slots
is
None
and
dense_slots
is
None
:
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
()
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
()
if
sparse_slots
==
""
and
dense_slots
==
""
:
reader
=
dataloader_instance
.
dataloader_by_name
(
reader_class
,
dataset_name
,
self
.
_config_yaml
)
reader_class
=
envs
.
lazy_instance_by_fliename
(
reader_class
,
...
...
@@ -228,7 +227,9 @@ class SingleInfer(TranspileTrainer):
model_class
=
self
.
_model
[
model_name
][
3
]
fetch_vars
=
[]
fetch_alias
=
[]
fetch_period
=
20
fetch_period
=
int
(
envs
.
get_global_env
(
"runner."
+
self
.
_runner_name
+
".fetch_period"
,
20
))
metrics
=
model_class
.
get_infer_results
()
if
metrics
:
fetch_vars
=
metrics
.
values
()
...
...
@@ -251,14 +252,15 @@ class SingleInfer(TranspileTrainer):
program
=
self
.
_model
[
model_name
][
0
].
clone
()
fetch_vars
=
[]
fetch_alias
=
[]
fetch_period
=
20
metrics
=
model_class
.
get_infer_results
()
if
metrics
:
fetch_vars
=
metrics
.
values
()
fetch_alias
=
metrics
.
keys
()
metrics_varnames
=
[]
metrics_format
=
[]
fetch_period
=
20
fetch_period
=
int
(
envs
.
get_global_env
(
"runner."
+
self
.
_runner_name
+
".fetch_period"
,
20
))
metrics_format
.
append
(
"{}: {{}}"
.
format
(
"batch"
))
for
name
,
var
in
metrics
.
items
():
metrics_varnames
.
append
(
var
.
name
)
...
...
core/trainers/single_trainer.py
浏览文件 @
a09255fb
...
...
@@ -61,21 +61,20 @@ class SingleTrainer(TranspileTrainer):
def
_get_dataset
(
self
,
dataset_name
):
name
=
"dataset."
+
dataset_name
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
thread_num
=
envs
.
get_global_env
(
name
+
"thread_num"
)
batch_size
=
envs
.
get_global_env
(
name
+
"batch_size"
)
reader_class
=
envs
.
get_global_env
(
name
+
"data_converter"
)
abs_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
reader
=
os
.
path
.
join
(
abs_dir
,
'../utils'
,
'dataset_instance.py'
)
if
sparse_slots
is
None
and
dense_slots
is
None
:
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
()
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
()
if
sparse_slots
!=
""
and
dense_slots
!=
""
:
pipe_cmd
=
"python {} {} {} {}"
.
format
(
reader
,
reader_class
,
"TRAIN"
,
self
.
_config_yaml
)
else
:
if
sparse_slots
is
None
:
if
sparse_slots
==
""
:
sparse_slots
=
"#"
if
dense_slots
is
None
:
if
dense_slots
==
""
:
dense_slots
=
"#"
padding
=
envs
.
get_global_env
(
name
+
"padding"
,
0
)
pipe_cmd
=
"python {} {} {} {} {} {} {} {}"
.
format
(
...
...
@@ -101,13 +100,13 @@ class SingleTrainer(TranspileTrainer):
def
_get_dataloader
(
self
,
dataset_name
,
dataloader
):
name
=
"dataset."
+
dataset_name
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
(
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
(
)
thread_num
=
envs
.
get_global_env
(
name
+
"thread_num"
)
batch_size
=
envs
.
get_global_env
(
name
+
"batch_size"
)
reader_class
=
envs
.
get_global_env
(
name
+
"data_converter"
)
abs_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
if
sparse_slots
is
None
and
dense_slots
is
None
:
if
sparse_slots
==
""
and
dense_slots
==
""
:
reader
=
dataloader_instance
.
dataloader_by_name
(
reader_class
,
dataset_name
,
self
.
_config_yaml
)
reader_class
=
envs
.
lazy_instance_by_fliename
(
reader_class
,
...
...
@@ -125,8 +124,8 @@ class SingleTrainer(TranspileTrainer):
def
_create_dataset
(
self
,
dataset_name
):
name
=
"dataset."
+
dataset_name
+
"."
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
)
sparse_slots
=
envs
.
get_global_env
(
name
+
"sparse_slots"
,
""
).
strip
(
)
dense_slots
=
envs
.
get_global_env
(
name
+
"dense_slots"
,
""
).
strip
(
)
thread_num
=
envs
.
get_global_env
(
name
+
"thread_num"
)
batch_size
=
envs
.
get_global_env
(
name
+
"batch_size"
)
type_name
=
envs
.
get_global_env
(
name
+
"type"
)
...
...
@@ -225,7 +224,9 @@ class SingleTrainer(TranspileTrainer):
model_class
=
self
.
_model
[
model_name
][
3
]
fetch_vars
=
[]
fetch_alias
=
[]
fetch_period
=
20
fetch_period
=
int
(
envs
.
get_global_env
(
"runner."
+
self
.
_runner_name
+
".fetch_period"
,
20
))
metrics
=
model_class
.
get_metrics
()
if
metrics
:
fetch_vars
=
metrics
.
values
()
...
...
@@ -250,14 +251,15 @@ class SingleTrainer(TranspileTrainer):
loss_name
=
model_class
.
get_avg_cost
().
name
)
fetch_vars
=
[]
fetch_alias
=
[]
fetch_period
=
20
fetch_period
=
int
(
envs
.
get_global_env
(
"runner."
+
self
.
_runner_name
+
".fetch_period"
,
20
))
metrics
=
model_class
.
get_metrics
()
if
metrics
:
fetch_vars
=
metrics
.
values
()
fetch_alias
=
metrics
.
keys
()
metrics_varnames
=
[]
metrics_format
=
[]
fetch_period
=
20
metrics_format
.
append
(
"{}: {{}}"
.
format
(
"batch"
))
for
name
,
var
in
metrics
.
items
():
metrics_varnames
.
append
(
var
.
name
)
...
...
@@ -312,10 +314,11 @@ class SingleTrainer(TranspileTrainer):
if
not
need_save
(
epoch_id
,
save_interval
,
False
):
return
feed_varnames
=
envs
.
get_global_env
(
name
+
"save_inference_feed_varnames"
,
None
)
name
+
"save_inference_feed_varnames"
,
[]
)
fetch_varnames
=
envs
.
get_global_env
(
name
+
"save_inference_fetch_varnames"
,
None
)
if
feed_varnames
is
None
or
fetch_varnames
is
None
or
feed_varnames
==
""
:
name
+
"save_inference_fetch_varnames"
,
[])
if
feed_varnames
is
None
or
fetch_varnames
is
None
or
feed_varnames
==
""
or
fetch_varnames
==
""
or
\
len
(
feed_varnames
)
==
0
or
len
(
fetch_varnames
)
==
0
:
return
fetch_vars
=
[
fluid
.
default_main_program
().
global_block
().
vars
[
varname
]
...
...
core/utils/dataloader_instance.py
浏览文件 @
a09255fb
...
...
@@ -68,8 +68,12 @@ def slotdataloader_by_name(readerclass, dataset_name, yaml_file):
data_path
=
os
.
path
.
join
(
package_base
,
data_path
.
split
(
"::"
)[
1
])
files
=
[
str
(
data_path
)
+
"/%s"
%
x
for
x
in
os
.
listdir
(
data_path
)]
sparse
=
get_global_env
(
name
+
"sparse_slots"
)
dense
=
get_global_env
(
name
+
"dense_slots"
)
sparse
=
get_global_env
(
name
+
"sparse_slots"
,
"#"
)
if
sparse
==
""
:
sparse
=
"#"
dense
=
get_global_env
(
name
+
"dense_slots"
,
"#"
)
if
dense
==
""
:
dense
=
"#"
padding
=
get_global_env
(
name
+
"padding"
,
0
)
reader
=
SlotReader
(
yaml_file
)
reader
.
init
(
sparse
,
dense
,
int
(
padding
))
...
...
@@ -158,8 +162,12 @@ def slotdataloader(readerclass, train, yaml_file):
files
=
[
str
(
data_path
)
+
"/%s"
%
x
for
x
in
os
.
listdir
(
data_path
)]
sparse
=
get_global_env
(
"sparse_slots"
,
None
,
namespace
)
dense
=
get_global_env
(
"dense_slots"
,
None
,
namespace
)
sparse
=
get_global_env
(
"sparse_slots"
,
"#"
,
namespace
)
if
sparse
==
""
:
sparse
=
"#"
dense
=
get_global_env
(
"dense_slots"
,
"#"
,
namespace
)
if
dense
==
""
:
dense
=
"#"
padding
=
get_global_env
(
"padding"
,
0
,
namespace
)
reader
=
SlotReader
(
yaml_file
)
reader
.
init
(
sparse
,
dense
,
int
(
padding
))
...
...
models/rank/dnn/config.yaml
浏览文件 @
a09255fb
...
...
@@ -62,6 +62,7 @@ runner:
save_inference_feed_varnames
:
[]
# feed vars of save inference
save_inference_fetch_varnames
:
[]
# fetch vars of save inference
init_model_path
:
"
"
# load model path
fetch_period
:
10
-
name
:
runner2
class
:
single_infer
# num of epochs
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录