Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
MindSpore
mindinsight
提交
cdfcf256
M
mindinsight
项目概览
MindSpore
/
mindinsight
通知
8
Star
4
Fork
2
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindinsight
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
cdfcf256
编写于
6月 18, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
6月 18, 2020
浏览文件
操作
浏览文件
下载
差异文件
!280 Add minddata profiling proposer
Merge pull request !280 from yuximiao/master
上级
02a3e1e4
98b5030b
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
138 addition
and
30 deletion
+138
-30
mindinsight/backend/profiler/profile_api.py
mindinsight/backend/profiler/profile_api.py
+8
-7
mindinsight/profiler/analyser/minddata_analyser.py
mindinsight/profiler/analyser/minddata_analyser.py
+44
-22
mindinsight/profiler/proposer/allproposers/__init__.py
mindinsight/profiler/proposer/allproposers/__init__.py
+2
-1
mindinsight/profiler/proposer/allproposers/minddata_proposer.py
...sight/profiler/proposer/allproposers/minddata_proposer.py
+84
-0
未找到文件。
mindinsight/backend/profiler/profile_api.py
浏览文件 @
cdfcf256
...
...
@@ -21,23 +21,24 @@ import json
import
os
from
flask
import
Blueprint
from
flask
import
Response
from
flask
import
jsonify
from
flask
import
request
from
flask
import
Response
from
marshmallow
import
ValidationError
from
mindinsight.conf
import
settings
from
mindinsight.datavisual.utils.tools
import
get_train_id
,
get_profiler_dir
,
\
unquote_args
,
to_int
,
get_device_id
from
mindinsight.datavisual.utils.tools
import
get_train_id
,
get_profiler_dir
,
to_int
,
get_device_id
from
mindinsight.datavisual.utils.tools
import
unquote_args
from
mindinsight.profiler.analyser.analyser_factory
import
AnalyserFactory
from
mindinsight.profiler.analyser.minddata_analyser
import
MinddataAnalyser
from
mindinsight.profiler.common.exceptions.exceptions
import
ProfilerFileNotFoundException
from
mindinsight.profiler.proposer.compose_proposer
import
ComposeProposal
from
mindinsight.profiler.common.util
import
analyse_device_list_from_profiler_dir
from
mindinsight.profiler.common.validator.validate
import
validate_condition
,
\
validate_ui_proc
,
validate_minddata_pipeline_condition
from
mindinsight.profiler.common.validator.validate
import
validate_condition
,
validate_ui_proc
from
mindinsight.profiler.common.validator.validate
import
validate_minddata_pipeline_condition
from
mindinsight.profiler.common.validator.validate_path
import
\
validate_and_normalize_profiler_path
,
validate_and_normalize_path
validate_and_normalize_path
from
mindinsight.profiler.common.validator.validate_path
import
validate_and_normalize_profiler_path
from
mindinsight.profiler.proposer.compose_proposer
import
ComposeProposal
from
mindinsight.utils.exceptions
import
ParamValueError
BLUEPRINT
=
Blueprint
(
"profile"
,
__name__
,
url_prefix
=
settings
.
URL_PREFIX
)
...
...
mindinsight/profiler/analyser/minddata_analyser.py
浏览文件 @
cdfcf256
...
...
@@ -21,6 +21,9 @@ from mindinsight.profiler.analyser.base_analyser import BaseAnalyser
class
MinddataAnalyser
(
BaseAnalyser
):
"""The Minddata profiling analyser."""
DEVICE_QUEUE_EMPTY_WARNING_THRESHOLD
=
0.7
DEVICE_QUEUE_NOT_EMPTY_THRESHOLD
=
0.95
def
analyse_get_next_info
(
self
,
info_type
=
"all"
):
"""
Analyse the get_next operation info.
...
...
@@ -59,7 +62,7 @@ class MinddataAnalyser(BaseAnalyser):
one_step_cost_time
=
(
float
(
node_info
[
2
])
-
float
(
node_info
[
1
]))
/
1e3
time_list
.
append
(
one_step_cost_time
)
total_cost
+=
one_step_cost_time
if
info_type
in
[
"all"
,
"
tim
e"
]:
if
info_type
in
[
"all"
,
"
queu
e"
]:
queue_info
[
"size"
]
=
len
(
queue_size_list
)
queue_info
[
"info"
]
=
{
"queue"
:
queue_size_list
}
queue_info
[
"summary"
]
=
{
...
...
@@ -100,12 +103,12 @@ class MinddataAnalyser(BaseAnalyser):
queue_size_list
=
[]
empty_step
,
full_step
=
0
,
0
device_queue_file_name
=
"device_queue_profiling"
+
self
.
_device_id
+
".txt"
device_queue_file_name
=
"device_queue_profiling
_
"
+
self
.
_device_id
+
".txt"
device_queue_file_path
=
MinddataAnalyser
.
find_target_file
(
self
.
_profiling_dir
,
device_queue_file_name
)
feed_file_name
=
"dataset_iterator_profiling_"
+
self
.
_device_id
+
".txt"
feed_file_path
=
MinddataAnalyser
.
find_target_file
(
self
.
_profiling_dir
,
feed_file_name
)
if
device_queue_file_path
:
file_path
=
device_queue_file_
name
file_path
=
device_queue_file_
path
elif
not
device_queue_file_path
and
feed_file_path
:
file_path
=
feed_file_path
else
:
...
...
@@ -169,15 +172,12 @@ class MinddataAnalyser(BaseAnalyser):
Returns:
dict, the summary of queue.
"""
if
not
get_next_queue_info
and
not
device_queue_info
:
return
{}
get_next_queue_empty_count
=
0
if
get_next_queue_info
:
if
get_next_queue_info
and
device_queue_info
:
result
=
{
"data_process"
:
{
"status"
:
"normal"
},
"device_queue_op"
:
{
"status"
:
"normal"
},
"tdt"
:
{
"status"
:
"normal"
},
"get_next"
:
{
"status"
:
"normal"
}}
get_next_queue_empty_count
=
get_next_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"empty_queue"
,
0
)
result
[
"get_next_queue_info"
]
=
{
...
...
@@ -186,27 +186,49 @@ class MinddataAnalyser(BaseAnalyser):
"total_batch"
:
get_next_queue_info
.
get
(
"size"
)
}
}
else
:
device_queue_empty_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"empty_queue"
,
0
)
device_queue_full_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"full_queue"
,
0
)
result
[
"device_queue_info"
]
=
{
"summary"
:
{
"empty_batch_count"
:
device_queue_empty_count
,
"full_batch_count"
:
device_queue_full_count
,
"total_batch"
:
device_queue_info
.
get
(
"size"
)
}
}
if
get_next_queue_empty_count
:
if
device_queue_empty_count
>
device_queue_info
.
get
(
"size"
,
0
)
*
\
MinddataAnalyser
.
DEVICE_QUEUE_EMPTY_WARNING_THRESHOLD
:
result
[
"data_process"
][
"status"
]
=
"warning"
elif
device_queue_empty_count
<
device_queue_info
.
get
(
"size"
,
0
)
*
\
MinddataAnalyser
.
DEVICE_QUEUE_NOT_EMPTY_THRESHOLD
:
result
[
"tdt"
][
"status"
]
=
"warning"
result
[
"device_queue_op"
][
"status"
]
=
"warning"
elif
device_queue_info
and
not
get_next_queue_info
:
result
=
{
"data_process"
:
{
"status"
:
"normal"
},
"fpbp"
:
{
"status"
:
"normal"
}}
device_queue_empty_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"empty_queue"
,
0
)
device_queue_full_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"full_queue"
,
0
)
result
[
"device_queue_info"
]
=
{
"summary"
:
{
"empty_batch_count"
:
device_queue_empty_count
,
"full_batch_count"
:
device_queue_full_count
,
"total_batch"
:
device_queue_info
.
get
(
"size"
)
device_queue_empty_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"empty_queue"
,
0
)
device_queue_full_count
=
device_queue_info
.
get
(
"summary"
,
{}).
get
(
"queue_summary"
,
{}).
get
(
"full_queue"
,
0
)
result
[
"device_queue_info"
]
=
{
"summary"
:
{
"empty_batch_count"
:
device_queue_empty_count
,
"full_batch_count"
:
device_queue_full_count
,
"total_batch"
:
device_queue_info
.
get
(
"size"
)
}
}
}
if
not
get_next_queue_info
or
(
get_next_queue_info
and
get_next_queue_empty_count
==
0
):
if
device_queue_empty_count
>
device_queue_info
.
get
(
"size"
,
0
)
*
0.7
:
result
[
"data_process"
][
"status"
]
=
"warning"
elif
device_queue_empty_count
<
device_queue_info
.
get
(
"size"
,
0
)
*
0.9
:
result
[
"fpbp"
][
"status"
]
=
"warning"
else
:
result
=
{}
return
result
...
...
mindinsight/profiler/proposer/allproposers/__init__.py
浏览文件 @
cdfcf256
...
...
@@ -14,9 +14,10 @@
# ============================================================================
"""All proposers."""
from
mindinsight.profiler.proposer.allproposers.common_proposer
import
CommonProposer
from
mindinsight.profiler.proposer.allproposers.minddata_proposer
import
MinddataProposer
from
mindinsight.profiler.proposer.allproposers.step_trace_proposer
import
StepTraceProposer
from
mindinsight.profiler.proposer.allproposers.minddata_pipeline_proposer
import
\
MinddataPipelineProposer
__all__
=
[
"CommonProposer"
,
"StepTraceProposer"
,
"MinddataPipelineProposer"
]
__all__
=
[
"CommonProposer"
,
"StepTraceProposer"
,
"MinddataP
roposer"
,
"MinddataP
ipelineProposer"
]
mindinsight/profiler/proposer/allproposers/minddata_proposer.py
0 → 100644
浏览文件 @
cdfcf256
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The minddata proposer."""
from
collections
import
OrderedDict
from
mindinsight.profiler.analyser.analyser_factory
import
AnalyserFactory
from
mindinsight.profiler.analyser.minddata_analyser
import
MinddataAnalyser
from
mindinsight.profiler.proposer.allproposers.base_proposer
import
Proposer
class
MinddataProposer
(
Proposer
):
"""The Minddata proposer."""
def
__init__
(
self
,
profiling_dir
,
device_id
):
super
().
__init__
(
profiling_dir
,
device_id
)
self
.
__proposer_type
=
"minddata"
self
.
__proposal_dict
=
OrderedDict
()
def
analyze
(
self
,
options
=
None
):
"""
Get the proposal from proposer.
Args:
options (dict): The options for proposer analysis.
Returns:
dict, the proposal from proposer instance,the dictionary key is a language internationalization
label, and the value is used to format the value in the language internationalization string.
Examples:
>>> proposer_type = 'minddata'
>>> proposer = ProposerFactory.instance().get_proposer(proposer_type, self.profiling_dir, self.device_id)
>>> result = proposer.analyze(options)
"""
self
.
minddata_outer_bounds_analyze
()
return
self
.
__proposal_dict
def
minddata_outer_bounds_analyze
(
self
):
"""Get the proposals of minddata outer bounds."""
minddata_dict
=
OrderedDict
()
minddata_analyser
=
AnalyserFactory
.
instance
().
get_analyser
(
'minddata'
,
self
.
profiling_path
,
self
.
device_id
)
get_next_queue_info
,
_
=
minddata_analyser
.
analyse_get_next_info
(
info_type
=
"queue"
)
device_queue_info
,
_
=
minddata_analyser
.
analyse_device_queue_info
(
info_type
=
"queue"
)
result
=
MinddataAnalyser
.
analyse_queue_summary
(
get_next_queue_info
,
device_queue_info
)
if
"get_next_queue_info"
in
result
:
get_next_queue_info_summary
=
result
.
get
(
"get_next_queue_info"
).
get
(
"summary"
,
{})
empty_batch
=
get_next_queue_info_summary
.
get
(
"empty_batch_count"
)
total_batch
=
get_next_queue_info_summary
.
get
(
"total_batch"
)
minddata_dict
[
"minddata_get_next_queue"
]
=
[
empty_batch
,
total_batch
]
self
.
__proposal_dict
.
update
(
minddata_dict
)
if
"device_queue_info"
in
result
:
get_next_queue_info_summary
=
result
.
get
(
"device_queue_info"
).
get
(
"summary"
,
{})
full_batch
=
get_next_queue_info_summary
.
get
(
"full_batch_count"
,
0
)
empty_batch
=
get_next_queue_info_summary
.
get
(
"empty_batch_count"
,
0
)
total_batch
=
get_next_queue_info_summary
.
get
(
"total_batch"
,
0
)
minddata_dict
[
"minddata_device_queue"
]
=
[
empty_batch
,
total_batch
,
full_batch
,
total_batch
]
self
.
__proposal_dict
.
update
(
minddata_dict
)
warning_op
=
list
()
for
key
,
value
in
result
.
items
():
if
isinstance
(
value
,
dict
):
status
=
value
.
get
(
"status"
)
if
status
==
"warning"
:
warning_op
.
append
(
key
)
if
warning_op
:
minddata_dict
[
"minddata_warning_op"
]
=
[
","
.
join
(
warning_op
)]
self
.
__proposal_dict
.
update
(
minddata_dict
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录