Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
PaddleRec
提交
99d466a1
P
PaddleRec
项目概览
BaiXuePrincess
/
PaddleRec
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleRec
通知
1
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleRec
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
99d466a1
编写于
6月 11, 2020
作者:
T
tangwei
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix windows adapter
上级
211f7e38
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
19 addition
and
31 deletion
+19
-31
core/model.py
core/model.py
+3
-26
core/trainers/framework/network.py
core/trainers/framework/network.py
+10
-4
core/trainers/framework/runner.py
core/trainers/framework/runner.py
+1
-1
core/utils/envs.py
core/utils/envs.py
+5
-0
未找到文件。
core/model.py
浏览文件 @
99d466a1
...
@@ -88,7 +88,7 @@ class ModelBase(object):
...
@@ -88,7 +88,7 @@ class ModelBase(object):
self
.
_data_var
.
append
(
l
)
self
.
_data_var
.
append
(
l
)
self
.
_sparse_data_var
.
append
(
l
)
self
.
_sparse_data_var
.
append
(
l
)
dataset_class
=
dataset
[
"type"
]
dataset_class
=
envs
.
get_global_env
(
name
+
"type"
)
if
dataset_class
==
"DataLoader"
:
if
dataset_class
==
"DataLoader"
:
self
.
_init_dataloader
()
self
.
_init_dataloader
()
...
@@ -204,31 +204,8 @@ class ModelBase(object):
...
@@ -204,31 +204,8 @@ class ModelBase(object):
def
net
(
self
,
is_infer
=
False
):
def
net
(
self
,
is_infer
=
False
):
return
None
return
None
def
_construct_reader
(
self
,
is_infer
=
False
):
if
is_infer
:
self
.
_infer_data_loader
=
fluid
.
io
.
DataLoader
.
from_generator
(
feed_list
=
self
.
_infer_data_var
,
capacity
=
64
,
use_double_buffer
=
False
,
iterable
=
False
)
else
:
dataset_class
=
envs
.
get_global_env
(
"dataset_class"
,
None
,
"train.reader"
)
if
dataset_class
==
"DataLoader"
:
self
.
_data_loader
=
fluid
.
io
.
DataLoader
.
from_generator
(
feed_list
=
self
.
_data_var
,
capacity
=
64
,
use_double_buffer
=
False
,
iterable
=
False
)
def
train_net
(
self
):
def
train_net
(
self
):
input_data
=
self
.
input_data
(
is_infer
=
False
)
pass
self
.
_data_var
=
input_data
self
.
_construct_reader
(
is_infer
=
False
)
self
.
net
(
input_data
,
is_infer
=
False
)
def
infer_net
(
self
):
def
infer_net
(
self
):
input_data
=
self
.
input_data
(
is_infer
=
True
)
pass
self
.
_infer_data_var
=
input_data
self
.
_construct_reader
(
is_infer
=
True
)
self
.
net
(
input_data
,
is_infer
=
True
)
core/trainers/framework/network.py
浏览文件 @
99d466a1
...
@@ -97,7 +97,8 @@ class SingleNetwork(NetworkBase):
...
@@ -97,7 +97,8 @@ class SingleNetwork(NetworkBase):
context
[
"dataset"
]
=
{}
context
[
"dataset"
]
=
{}
for
dataset
in
context
[
"env"
][
"dataset"
]:
for
dataset
in
context
[
"env"
][
"dataset"
]:
if
dataset
[
"type"
]
!=
"DataLoader"
:
type
=
envs
.
get_global_env
(
"dataset."
+
dataset
[
"name"
]
+
".type"
)
if
type
!=
"DataLoader"
:
dataset_class
=
QueueDataset
(
context
)
dataset_class
=
QueueDataset
(
context
)
context
[
"dataset"
][
dataset
[
context
[
"dataset"
][
dataset
[
"name"
]]
=
dataset_class
.
create_dataset
(
dataset
[
"name"
],
"name"
]]
=
dataset_class
.
create_dataset
(
dataset
[
"name"
],
...
@@ -155,7 +156,9 @@ class PSNetwork(NetworkBase):
...
@@ -155,7 +156,9 @@ class PSNetwork(NetworkBase):
context
[
"fleet"
].
init_worker
()
context
[
"fleet"
].
init_worker
()
context
[
"dataset"
]
=
{}
context
[
"dataset"
]
=
{}
for
dataset
in
context
[
"env"
][
"dataset"
]:
for
dataset
in
context
[
"env"
][
"dataset"
]:
if
dataset
[
"type"
]
!=
"DataLoader"
:
type
=
envs
.
get_global_env
(
"dataset."
+
dataset
[
"name"
]
+
".type"
)
if
type
!=
"DataLoader"
:
dataset_class
=
QueueDataset
(
context
)
dataset_class
=
QueueDataset
(
context
)
context
[
"dataset"
][
dataset
[
context
[
"dataset"
][
dataset
[
"name"
]]
=
dataset_class
.
create_dataset
(
"name"
]]
=
dataset_class
.
create_dataset
(
...
@@ -248,7 +251,9 @@ class PslibNetwork(NetworkBase):
...
@@ -248,7 +251,9 @@ class PslibNetwork(NetworkBase):
else
:
else
:
context
[
"dataset"
]
=
{}
context
[
"dataset"
]
=
{}
for
dataset
in
context
[
"env"
][
"dataset"
]:
for
dataset
in
context
[
"env"
][
"dataset"
]:
if
dataset
[
"type"
]
!=
"DataLoader"
:
type
=
envs
.
get_global_env
(
"dataset."
+
dataset
[
"name"
]
+
".type"
)
if
type
!=
"DataLoader"
:
dataset_class
=
QueueDataset
(
context
)
dataset_class
=
QueueDataset
(
context
)
context
[
"dataset"
][
dataset
[
context
[
"dataset"
][
dataset
[
"name"
]]
=
dataset_class
.
create_dataset
(
"name"
]]
=
dataset_class
.
create_dataset
(
...
@@ -312,7 +317,8 @@ class CollectiveNetwork(NetworkBase):
...
@@ -312,7 +317,8 @@ class CollectiveNetwork(NetworkBase):
context
[
"dataset"
]
=
{}
context
[
"dataset"
]
=
{}
for
dataset
in
context
[
"env"
][
"dataset"
]:
for
dataset
in
context
[
"env"
][
"dataset"
]:
if
dataset
[
"type"
]
!=
"DataLoader"
:
type
=
envs
.
get_global_env
(
"dataset."
+
dataset
[
"name"
]
+
".type"
)
if
type
!=
"DataLoader"
:
dataset_class
=
QueueDataset
(
context
)
dataset_class
=
QueueDataset
(
context
)
context
[
"dataset"
][
dataset
[
context
[
"dataset"
][
dataset
[
"name"
]]
=
dataset_class
.
create_dataset
(
dataset
[
"name"
],
"name"
]]
=
dataset_class
.
create_dataset
(
dataset
[
"name"
],
...
...
core/trainers/framework/runner.py
浏览文件 @
99d466a1
...
@@ -155,7 +155,7 @@ class RunnerBase(object):
...
@@ -155,7 +155,7 @@ class RunnerBase(object):
gradient_scale_strategy
=
fluid
.
BuildStrategy
.
GradientScaleStrategy
.
Customized
gradient_scale_strategy
=
fluid
.
BuildStrategy
.
GradientScaleStrategy
.
Customized
else
:
else
:
raise
ValueError
(
raise
ValueError
(
"Unsu
r
pported config. gradient_scale_strategy must be one of [0, 1, 2]."
"Unsupported config. gradient_scale_strategy must be one of [0, 1, 2]."
)
)
_build_strategy
.
gradient_scale_strategy
=
gradient_scale_strategy
_build_strategy
.
gradient_scale_strategy
=
gradient_scale_strategy
...
...
core/utils/envs.py
浏览文件 @
99d466a1
...
@@ -96,6 +96,11 @@ def set_global_envs(envs):
...
@@ -96,6 +96,11 @@ def set_global_envs(envs):
value
=
os_path_adapter
(
workspace_adapter
(
value
))
value
=
os_path_adapter
(
workspace_adapter
(
value
))
global_envs
[
name
]
=
value
global_envs
[
name
]
=
value
if
get_platform
()
!=
"LINUX"
:
for
dataset
in
envs
[
"dataset"
]:
name
=
"."
.
join
(
"dataset"
,
dataset
[
"name"
],
"type"
)
global_envs
[
name
]
=
"DataLoader"
def
get_global_env
(
env_name
,
default_value
=
None
,
namespace
=
None
):
def
get_global_env
(
env_name
,
default_value
=
None
,
namespace
=
None
):
"""
"""
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录