Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
620d951e
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
620d951e
编写于
7月 28, 2020
作者:
G
guansongsong
浏览文件
操作
浏览文件
下载
差异文件
Merge branch 'ql/fix_cache_core' of
https://gitee.com/qianlong21st/mindspore
into ql/fix_cache_core
上级
16079e63
bf87b1d1
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
36 addition
and
6 deletion
+36
-6
mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc
...re/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc
+3
-0
tests/ut/python/dataset/test_cache_map.py
tests/ut/python/dataset/test_cache_map.py
+33
-6
未找到文件。
mindspore/ccsrc/minddata/dataset/engine/datasetops/dataset_op.cc
浏览文件 @
620d951e
...
...
@@ -387,6 +387,9 @@ uint32_t DatasetOp::GenerateCRC(const std::shared_ptr<DatasetOp> &op) {
ss_str
=
std
::
regex_replace
(
ss_str
,
std
::
regex
(
"Num workers.*
\n
"
),
""
);
ss_str
=
std
::
regex_replace
(
ss_str
,
std
::
regex
(
"
\\
[workers.*
\\
]"
),
""
);
// Filter out Number of rows when generating the check sum
ss_str
=
std
::
regex_replace
(
ss_str
,
std
::
regex
(
"Number of rows.*
\n
"
),
""
);
// Filter out the Operator control flags field when generating the check sum
ss_str
=
std
::
regex_replace
(
ss_str
,
std
::
regex
(
"Operator control flags.*
\n
"
),
""
);
...
...
tests/ut/python/dataset/test_cache_map.py
浏览文件 @
620d951e
...
...
@@ -104,11 +104,11 @@ def test_cache_map_basic3():
decode_op
=
c_vision
.
Decode
()
ds1
=
ds1
.
repeat
(
4
)
ds1
=
ds1
.
map
(
input_columns
=
[
"image"
],
operations
=
decode_op
,
cache
=
some_cache
)
print
(
"ds1.dataset_size is "
,
ds1
.
get_dataset_size
())
logger
.
info
(
"ds1.dataset_size is "
,
ds1
.
get_dataset_size
())
num_iter
=
0
for
_
in
ds1
.
create_dict_iterator
():
print
(
"get data from dataset"
)
logger
.
info
(
"get data from dataset"
)
num_iter
+=
1
logger
.
info
(
"Number of data in ds1: {} "
.
format
(
num_iter
))
...
...
@@ -116,6 +116,31 @@ def test_cache_map_basic3():
logger
.
info
(
'test_cache_basic3 Ended.
\n
'
)
def
test_cache_map_basic4
():
"""
Test different rows result in core dump
"""
logger
.
info
(
"Test cache basic 4"
)
some_cache
=
ds
.
DatasetCache
(
session_id
=
1
,
size
=
0
,
spilling
=
True
)
# This DATA_DIR only has 2 images in it
ds1
=
ds
.
ImageFolderDatasetV2
(
dataset_dir
=
DATA_DIR
,
cache
=
some_cache
)
decode_op
=
c_vision
.
Decode
()
ds1
=
ds1
.
repeat
(
4
)
ds1
=
ds1
.
map
(
input_columns
=
[
"image"
],
operations
=
decode_op
)
logger
.
info
(
"ds1.dataset_size is "
,
ds1
.
get_dataset_size
())
shape
=
ds1
.
output_shapes
()
num_iter
=
0
for
_
in
ds1
.
create_dict_iterator
():
logger
.
info
(
"get data from dataset"
)
num_iter
+=
1
logger
.
info
(
"Number of data in ds1: {} "
.
format
(
num_iter
))
assert
num_iter
==
8
logger
.
info
(
'test_cache_basic3 Ended.
\n
'
)
def
test_cache_map_failure1
():
"""
Test nested cache (failure)
...
...
@@ -154,10 +179,12 @@ def test_cache_map_failure1():
if
__name__
==
'__main__'
:
test_cache_map_basic1
()
print
(
"test_cache_map_basic1 success."
)
logger
.
info
(
"test_cache_map_basic1 success."
)
test_cache_map_basic2
()
print
(
"test_cache_map_basic2 success."
)
logger
.
info
(
"test_cache_map_basic2 success."
)
test_cache_map_basic3
()
print
(
"test_cache_map_basic3 success."
)
logger
.
info
(
"test_cache_map_basic3 success."
)
test_cache_map_basic4
()
logger
.
info
(
"test_cache_map_basic3 success."
)
test_cache_map_failure1
()
print
(
"test_cache_map_failure1 success."
)
logger
.
info
(
"test_cache_map_failure1 success."
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录