Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
3822b483
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
3822b483
编写于
4月 20, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 20, 2020
浏览文件
操作
浏览文件
下载
差异文件
!340 Add a HistogramSummary ops to record tensor value
Merge pull request !340 from ougongchang/master
上级
15c8ef66
0ed6d917
变更
22
隐藏空白更改
内联
并排
Showing
22 changed file
with
181 addition
and
98 deletion
+181
-98
mindspore/ccsrc/ir/anf.cc
mindspore/ccsrc/ir/anf.cc
+7
-4
mindspore/ccsrc/operator/ops.cc
mindspore/ccsrc/operator/ops.cc
+1
-0
mindspore/ccsrc/operator/ops.h
mindspore/ccsrc/operator/ops.h
+1
-0
mindspore/ccsrc/operator/prim_debug.cc
mindspore/ccsrc/operator/prim_debug.cc
+1
-1
mindspore/ccsrc/optimizer/irpass/branch_culling.cc
mindspore/ccsrc/optimizer/irpass/branch_culling.cc
+8
-19
mindspore/ccsrc/parallel/node_check.cc
mindspore/ccsrc/parallel/node_check.cc
+1
-0
mindspore/ccsrc/parallel/ops_info/ops_utils.h
mindspore/ccsrc/parallel/ops_info/ops_utils.h
+1
-0
mindspore/ccsrc/pipeline/static_analysis/prim.cc
mindspore/ccsrc/pipeline/static_analysis/prim.cc
+1
-0
mindspore/ccsrc/session/anf_runtime_algorithm.cc
mindspore/ccsrc/session/anf_runtime_algorithm.cc
+2
-1
mindspore/ccsrc/session/session_basic.cc
mindspore/ccsrc/session/session_basic.cc
+2
-2
mindspore/ccsrc/transform/convert.cc
mindspore/ccsrc/transform/convert.cc
+1
-0
mindspore/ccsrc/utils/callbacks_ge.cc
mindspore/ccsrc/utils/callbacks_ge.cc
+1
-1
mindspore/ops/_grad/grad_debug_ops.py
mindspore/ops/_grad/grad_debug_ops.py
+9
-0
mindspore/ops/operations/__init__.py
mindspore/ops/operations/__init__.py
+2
-1
mindspore/ops/operations/debug_ops.py
mindspore/ops/operations/debug_ops.py
+27
-0
tests/st/summary/test_gpu_summary.py
tests/st/summary/test_gpu_summary.py
+8
-49
tests/ut/cpp/transform/convert_test.cc
tests/ut/cpp/transform/convert_test.cc
+6
-0
tests/ut/cpp/transform/transform_base_test.cc
tests/ut/cpp/transform/transform_base_test.cc
+2
-1
tests/ut/python/ops/test_nn_ops.py
tests/ut/python/ops/test_nn_ops.py
+17
-0
tests/ut/python/ops/test_ops.py
tests/ut/python/ops/test_ops.py
+19
-0
tests/ut/python/train/summary/test_summary.py
tests/ut/python/train/summary/test_summary.py
+2
-0
tests/ut/python/train/summary/test_summary_ops_params_valid_check.py
...thon/train/summary/test_summary_ops_params_valid_check.py
+62
-19
未找到文件。
mindspore/ccsrc/ir/anf.cc
浏览文件 @
3822b483
...
...
@@ -103,7 +103,8 @@ std::string CNode::fullname_with_scope() {
return
fullname_with_scope_
;
}
if
(
IsApply
(
prim
::
kPrimScalarSummary
)
||
IsApply
(
prim
::
kPrimTensorSummary
)
||
IsApply
(
prim
::
kPrimImageSummary
))
{
if
(
IsApply
(
prim
::
kPrimScalarSummary
)
||
IsApply
(
prim
::
kPrimTensorSummary
)
||
IsApply
(
prim
::
kPrimImageSummary
)
||
IsApply
(
prim
::
kPrimHistogramSummary
))
{
std
::
string
tag
=
GetValue
<
std
::
string
>
(
GetValueNode
(
input
(
1
)));
if
(
tag
==
""
)
{
MS_LOG
(
EXCEPTION
)
<<
"The tag name is null, should be valid string"
;
...
...
@@ -111,10 +112,12 @@ std::string CNode::fullname_with_scope() {
std
::
string
name
;
if
(
IsApply
(
prim
::
kPrimScalarSummary
))
{
name
=
tag
+
"[:Scalar]"
;
}
else
if
(
IsApply
(
prim
::
kPrimTensorSummary
))
{
name
=
tag
+
"[:Tensor]"
;
}
else
{
}
else
if
(
IsApply
(
prim
::
kPrimImageSummary
))
{
name
=
tag
+
"[:Image]"
;
}
else
if
(
IsApply
(
prim
::
kPrimHistogramSummary
))
{
name
=
tag
+
"[:Histogram]"
;
}
else
{
name
=
tag
+
"[:Tensor]"
;
}
fullname_with_scope_
=
name
;
}
else
{
...
...
mindspore/ccsrc/operator/ops.cc
浏览文件 @
3822b483
...
...
@@ -236,6 +236,7 @@ const PrimitivePtr kPrimAllReduce = std::make_shared<Primitive>("AllReduce");
const
PrimitivePtr
kPrimScalarSummary
=
std
::
make_shared
<
Primitive
>
(
"ScalarSummary"
);
const
PrimitivePtr
kPrimImageSummary
=
std
::
make_shared
<
Primitive
>
(
"ImageSummary"
);
const
PrimitivePtr
kPrimTensorSummary
=
std
::
make_shared
<
Primitive
>
(
"TensorSummary"
);
const
PrimitivePtr
kPrimHistogramSummary
=
std
::
make_shared
<
Primitive
>
(
"HistogramSummary"
);
ValuePtr
GetPythonOps
(
const
std
::
string
&
op_name
,
const
std
::
string
&
module_name
)
{
py
::
object
obj
=
parse
::
python_adapter
::
GetPyFn
(
module_name
,
op_name
);
...
...
mindspore/ccsrc/operator/ops.h
浏览文件 @
3822b483
...
...
@@ -225,6 +225,7 @@ extern const PrimitivePtr kPrimStateSetItem;
extern
const
PrimitivePtr
kPrimScalarSummary
;
extern
const
PrimitivePtr
kPrimImageSummary
;
extern
const
PrimitivePtr
kPrimTensorSummary
;
extern
const
PrimitivePtr
kPrimHistogramSummary
;
extern
const
PrimitivePtr
kPrimBroadcastGradientArgs
;
extern
const
PrimitivePtr
kPrimControlDepend
;
extern
const
PrimitivePtr
kPrimIs_
;
...
...
mindspore/ccsrc/operator/prim_debug.cc
浏览文件 @
3822b483
...
...
@@ -69,7 +69,7 @@ AbstractBasePtr InferImplTensorSummary(const AnalysisEnginePtr &, const Primitiv
int
tensor_rank
=
SizeToInt
(
tensor_value
->
shape
()
->
shape
().
size
());
if
(
tensor_rank
==
0
)
{
MS_LOG
(
EXCEPTION
)
<<
"Tensor/Image Summary evaluator second arg should be an tensor, but got a scalar
"
;
MS_LOG
(
EXCEPTION
)
<<
op_name
<<
" summary evaluator second arg should be an tensor, but got a scalar, rank is 0
"
;
}
// Reomve the force check to support batch set summary use 'for' loop
...
...
mindspore/ccsrc/optimizer/irpass/branch_culling.cc
浏览文件 @
3822b483
...
...
@@ -51,25 +51,14 @@ bool InConvertWhiteList(const AnfNodePtr &node, size_t index) {
// node because it is attribute or ge specific reason.
// Example : when convert CNode(kPrimReduceSum, x, axis), node of index 2 in CNode->inputs is axis which should not be
// converted to switch guarded.
std
::
vector
<
std
::
pair
<
PrimitivePtr
,
std
::
vector
<
size_t
>>>
white_list
({{
prim
::
kPrimApplyMomentum
,
{
1
,
2
}},
{
prim
::
kPrimMomentum
,
{
2
,
3
}},
{
prim
::
kPrimStateSetItem
,
{
1
}},
{
prim
::
kPrimEnvGetItem
,
{
1
}},
{
prim
::
kPrimEnvSetItem
,
{
1
}},
{
prim
::
kPrimReduceSum
,
{
2
}},
{
prim
::
kPrimReduceMean
,
{
2
}},
{
prim
::
kPrimReduceAll
,
{
2
}},
{
prim
::
kPrimCast
,
{
2
}},
{
prim
::
kPrimTranspose
,
{
2
}},
{
prim
::
kPrimOneHot
,
{
2
}},
{
prim
::
kPrimGatherV2
,
{
3
}},
{
prim
::
kPrimReshape
,
{
2
}},
{
prim
::
kPrimAssign
,
{
1
}},
{
prim
::
kPrimAssignAdd
,
{
1
}},
{
prim
::
kPrimAssignSub
,
{
1
}},
{
prim
::
kPrimTensorSummary
,
{
1
}},
{
prim
::
kPrimImageSummary
,
{
1
}},
{
prim
::
kPrimScalarSummary
,
{
1
}}});
std
::
vector
<
std
::
pair
<
PrimitivePtr
,
std
::
vector
<
size_t
>>>
white_list
(
{{
prim
::
kPrimApplyMomentum
,
{
1
,
2
}},
{
prim
::
kPrimMomentum
,
{
2
,
3
}},
{
prim
::
kPrimStateSetItem
,
{
1
}},
{
prim
::
kPrimEnvGetItem
,
{
1
}},
{
prim
::
kPrimEnvSetItem
,
{
1
}},
{
prim
::
kPrimReduceSum
,
{
2
}},
{
prim
::
kPrimReduceMean
,
{
2
}},
{
prim
::
kPrimReduceAll
,
{
2
}},
{
prim
::
kPrimCast
,
{
2
}},
{
prim
::
kPrimTranspose
,
{
2
}},
{
prim
::
kPrimOneHot
,
{
2
}},
{
prim
::
kPrimGatherV2
,
{
3
}},
{
prim
::
kPrimReshape
,
{
2
}},
{
prim
::
kPrimAssign
,
{
1
}},
{
prim
::
kPrimAssignAdd
,
{
1
}},
{
prim
::
kPrimAssignSub
,
{
1
}},
{
prim
::
kPrimTensorSummary
,
{
1
}},
{
prim
::
kPrimImageSummary
,
{
1
}},
{
prim
::
kPrimScalarSummary
,
{
1
}},
{
prim
::
kPrimHistogramSummary
,
{
1
}}});
for
(
auto
&
item
:
white_list
)
{
auto
matched
=
std
::
any_of
(
item
.
second
.
begin
(),
item
.
second
.
end
(),
[
&
item
,
&
node
,
&
index
](
size_t
idx
)
{
return
IsPrimitiveCNode
(
node
,
item
.
first
)
&&
idx
==
index
;
...
...
mindspore/ccsrc/parallel/node_check.cc
浏览文件 @
3822b483
...
...
@@ -66,6 +66,7 @@ const std::set<std::string> BLACK_LIST = {TUPLE_GETITEM,
SCALARSUMMARY
,
IMAGESUMMARY
,
TENSORSUMMARY
,
HISTOGRAMSUMMARY
,
COL2IMV1
,
RESOLVE
,
BROADCASTGRADIENTARGS
,
...
...
mindspore/ccsrc/parallel/ops_info/ops_utils.h
浏览文件 @
3822b483
...
...
@@ -246,6 +246,7 @@ constexpr char STATESETITEM[] = "state_setitem";
constexpr
char
SCALARSUMMARY
[]
=
"ScalarSummary"
;
constexpr
char
IMAGESUMMARY
[]
=
"ImageSummary"
;
constexpr
char
TENSORSUMMARY
[]
=
"TensorSummary"
;
constexpr
char
HISTOGRAMSUMMARY
[]
=
"HistogramSummary"
;
constexpr
char
BROADCASTGRADIENTARGS
[]
=
"BroadcastGradientArgs"
;
constexpr
char
INVERTPERMUTATION
[]
=
"InvertPermutation"
;
constexpr
char
CONTROLDEPEND
[]
=
"ControlDepend"
;
...
...
mindspore/ccsrc/pipeline/static_analysis/prim.cc
浏览文件 @
3822b483
...
...
@@ -131,6 +131,7 @@ PrimitiveEvalImplMap &GetPrimitiveToEvalImplMap() {
{
prim
::
kPrimScalarSummary
,
{
InferImplScalarSummary
,
true
}},
{
prim
::
kPrimImageSummary
,
{
InferImplTensorSummary
,
true
}},
{
prim
::
kPrimTensorSummary
,
{
InferImplTensorSummary
,
true
}},
{
prim
::
kPrimHistogramSummary
,
{
InferImplTensorSummary
,
true
}},
};
return
prim_eval_implement_map
;
}
...
...
mindspore/ccsrc/session/anf_runtime_algorithm.cc
浏览文件 @
3822b483
...
...
@@ -714,7 +714,8 @@ bool AnfRuntimeAlgorithm::IsRealKernel(const AnfNodePtr &node) {
}
auto
input
=
cnode
->
inputs
()[
0
];
bool
is_virtual_node
=
IsPrimitive
(
input
,
prim
::
kPrimImageSummary
)
||
IsPrimitive
(
input
,
prim
::
kPrimScalarSummary
)
||
IsPrimitive
(
input
,
prim
::
kPrimTensorSummary
)
||
IsPrimitive
(
input
,
prim
::
kPrimMakeTuple
)
||
IsPrimitive
(
input
,
prim
::
kPrimTensorSummary
)
||
IsPrimitive
(
input
,
prim
::
kPrimHistogramSummary
)
||
IsPrimitive
(
input
,
prim
::
kPrimMakeTuple
)
||
IsPrimitive
(
input
,
prim
::
kPrimStateSetItem
)
||
IsPrimitive
(
input
,
prim
::
kPrimDepend
)
||
IsPrimitive
(
input
,
prim
::
kPrimTupleGetItem
)
||
IsPrimitive
(
input
,
prim
::
kPrimControlDepend
)
||
IsPrimitive
(
input
,
prim
::
kPrimReturn
);
...
...
mindspore/ccsrc/session/session_basic.cc
浏览文件 @
3822b483
...
...
@@ -45,7 +45,7 @@ void GetSummaryNodes(const KernelGraph *graph, std::unordered_map<std::string, s
for
(
auto
&
n
:
apply_list
)
{
MS_EXCEPTION_IF_NULL
(
n
);
if
(
IsPrimitiveCNode
(
n
,
prim
::
kPrimScalarSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimTensorSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimImageSummary
))
{
IsPrimitiveCNode
(
n
,
prim
::
kPrimImageSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimHistogramSummary
)
)
{
int
index
=
0
;
auto
cnode
=
n
->
cast
<
CNodePtr
>
();
MS_EXCEPTION_IF_NULL
(
cnode
);
...
...
@@ -83,7 +83,7 @@ bool ExistSummaryNode(const KernelGraph *graph) {
auto
all_nodes
=
DeepLinkedGraphSearch
(
ret
);
for
(
auto
&
n
:
all_nodes
)
{
if
(
IsPrimitiveCNode
(
n
,
prim
::
kPrimScalarSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimTensorSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimImageSummary
))
{
IsPrimitiveCNode
(
n
,
prim
::
kPrimImageSummary
)
||
IsPrimitiveCNode
(
n
,
prim
::
kPrimHistogramSummary
)
)
{
return
true
;
}
}
...
...
mindspore/ccsrc/transform/convert.cc
浏览文件 @
3822b483
...
...
@@ -353,6 +353,7 @@ std::unordered_map<std::string, OpAdapterDescPtr> &DfGraphConvertor::get_adpt_ma
{
prim
::
kPrimScalarSummary
->
name
(),
ADPT_DESC
(
Summary
)},
{
prim
::
kPrimImageSummary
->
name
(),
ADPT_DESC
(
Summary
)},
{
prim
::
kPrimTensorSummary
->
name
(),
ADPT_DESC
(
Summary
)},
{
prim
::
kPrimHistogramSummary
->
name
(),
ADPT_DESC
(
Summary
)},
{
prim
::
kPrimTensorAdd
->
name
(),
std
::
make_shared
<
OpAdapterDesc
>
(
std
::
make_shared
<
OpAdapter
<
Add
>>
(
ExtraAttr
({{
"mode"
,
MakeValue
(
1
)}})),
std
::
make_shared
<
OpAdapter
<
Add
>>
(
ExtraAttr
({{
"mode"
,
MakeValue
(
1
)}})))},
...
...
mindspore/ccsrc/utils/callbacks_ge.cc
浏览文件 @
3822b483
...
...
@@ -131,7 +131,7 @@ static TensorPtr GetMeTensorForSummary(const std::string& name, const std::share
auto
shape
=
std
::
vector
<
int
>
({
ONE_SHAPE
});
return
TransformUtil
::
ConvertGeTensor
(
ge_tensor_ptr
,
shape
);
}
if
(
tname
==
"[:Tensor]"
)
{
if
(
tname
==
"[:Tensor]"
||
tname
==
"[:Histogram]"
)
{
MS_LOG
(
DEBUG
)
<<
"The summary("
<<
name
<<
") is Tensor"
;
// process the tensor summary
// Now we can't get the real shape, so we keep same shape with GE
...
...
mindspore/ops/_grad/grad_debug_ops.py
浏览文件 @
3822b483
...
...
@@ -49,6 +49,15 @@ def get_bprop_image_summary(self):
return
bprop
@
bprop_getters
.
register
(
P
.
HistogramSummary
)
def
get_bprop_histogram_summary
(
self
):
"""Generate bprop for HistogramSummary"""
def
bprop
(
tag
,
x
,
out
,
dout
):
return
tag
,
zeros_like
(
x
)
return
bprop
@
bprop_getters
.
register
(
P
.
InsertGradientOf
)
def
get_bprop_insert_gradient_of
(
self
):
"""Generate bprop for InsertGradientOf"""
...
...
mindspore/ops/operations/__init__.py
浏览文件 @
3822b483
...
...
@@ -34,7 +34,7 @@ from .comm_ops import (AllGather, AllReduce, _AlltoAll, ReduceScatter, Broadcast
_MirrorOperator
,
ReduceOp
,
_VirtualDataset
,
_VirtualDiv
,
_GetTensorSlice
)
from
.debug_ops
import
(
ImageSummary
,
InsertGradientOf
,
ScalarSummary
,
TensorSummary
,
Print
)
TensorSummary
,
HistogramSummary
,
Print
)
from
.control_ops
import
ControlDepend
,
GeSwitch
,
Merge
from
.inner_ops
import
ScalarCast
from
.math_ops
import
(
Abs
,
ACos
,
AddN
,
AssignAdd
,
AssignSub
,
Atan2
,
BatchMatMul
,
...
...
@@ -148,6 +148,7 @@ __all__ = [
'ScalarSummary'
,
'ImageSummary'
,
'TensorSummary'
,
'HistogramSummary'
,
"Print"
,
'InsertGradientOf'
,
'InvertPermutation'
,
...
...
mindspore/ops/operations/debug_ops.py
浏览文件 @
3822b483
...
...
@@ -98,6 +98,33 @@ class TensorSummary(Primitive):
"""init"""
class
HistogramSummary
(
Primitive
):
"""
Output tensor to protocol buffer through histogram summary operator.
Inputs:
- **name** (str) - The name of the input variable.
- **value** (Tensor) - The value of tensor, and the rank of tensor should be greater than 0.
Examples:
>>> class SummaryDemo(nn.Cell):
>>> def __init__(self,):
>>> super(SummaryDemo, self).__init__()
>>> self.summary = P.HistogramSummary()
>>> self.add = P.TensorAdd()
>>>
>>> def construct(self, x, y):
>>> x = self.add(x, y)
>>> name = "x"
>>> self.summary(name, x)
>>> return x
"""
@
prim_attr_register
def
__init__
(
self
):
"""init"""
class
InsertGradientOf
(
PrimitiveWithInfer
):
"""
Attach callback to graph node that will be invoked on the node's gradient.
...
...
tests/st/summary/test_gpu_summary.py
浏览文件 @
3822b483
...
...
@@ -24,17 +24,6 @@ from mindspore.common.tensor import Tensor
from
mindspore.ops
import
operations
as
P
from
mindspore.train.summary.summary_record
import
SummaryRecord
'''
This testcase is used for save summary data only. You need install MindData first and uncomment the commented
packages to analyse summary data.
Using "minddata start --datalog='./test_me_summary_event_file/' --host=0.0.0.0" to make data visible.
'''
# from minddata.datavisual.data_transform.data_manager import DataManager
# from minddata.datavisual.visual.train_visual.train_task_manager import TrainTaskManager
# from minddata.datavisual.visual.train_visual.scalars_processor import ScalarsProcessor
# from minddata.datavisual.common.enums import PluginNameEnum
# from minddata.datavisual.common.enums import DataManagerStatus
context
.
set_context
(
mode
=
context
.
GRAPH_MODE
,
device_target
=
"GPU"
)
...
...
@@ -43,6 +32,7 @@ CUR_DIR = os.getcwd()
SUMMARY_DIR_ME
=
CUR_DIR
+
"/test_me_summary_event_file/"
SUMMARY_DIR_ME_TEMP
=
CUR_DIR
+
"/test_me_temp_summary_event_file/"
def
clean_environment_file
(
srcDir
):
if
os
.
path
.
exists
(
srcDir
):
ls
=
os
.
listdir
(
srcDir
)
...
...
@@ -50,6 +40,8 @@ def clean_environment_file(srcDir):
filePath
=
os
.
path
.
join
(
srcDir
,
line
)
os
.
remove
(
filePath
)
os
.
removedirs
(
srcDir
)
def
save_summary_events_file
(
srcDir
,
desDir
):
if
not
os
.
path
.
exists
(
desDir
):
print
(
"-- create desDir"
)
...
...
@@ -64,12 +56,14 @@ def save_summary_events_file(srcDir, desDir):
os
.
remove
(
filePath
)
os
.
removedirs
(
srcDir
)
class
SummaryNet
(
nn
.
Cell
):
def
__init__
(
self
,
tag_tuple
=
None
,
scalar
=
1
):
super
(
SummaryNet
,
self
).
__init__
()
self
.
summary_s
=
P
.
ScalarSummary
()
self
.
summary_i
=
P
.
ImageSummary
()
self
.
summary_t
=
P
.
TensorSummary
()
self
.
histogram_summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
self
.
tag_tuple
=
tag_tuple
self
.
scalar
=
scalar
...
...
@@ -79,8 +73,10 @@ class SummaryNet(nn.Cell):
self
.
summary_s
(
"x1"
,
x
)
z
=
self
.
add
(
x
,
y
)
self
.
summary_t
(
"z1"
,
z
)
self
.
histogram_summary
(
"histogram"
,
z
)
return
z
def
train_summary_record_scalar_for_1
(
test_writer
,
steps
,
fwd_x
,
fwd_y
):
net
=
SummaryNet
()
out_me_dict
=
{}
...
...
@@ -93,6 +89,7 @@ def train_summary_record_scalar_for_1(test_writer, steps, fwd_x, fwd_y):
out_me_dict
[
i
]
=
out_put
.
asnumpy
()
return
out_me_dict
def
me_scalar_summary
(
steps
,
tag
=
None
,
value
=
None
):
test_writer
=
SummaryRecord
(
SUMMARY_DIR_ME_TEMP
)
...
...
@@ -104,44 +101,6 @@ def me_scalar_summary(steps, tag=None, value=None):
test_writer
.
close
()
return
out_me_dict
def
print_scalar_data
():
print
(
"============start print_scalar_data
\n
"
)
data_manager
=
DataManager
()
data_manager
.
start_load_data
(
path
=
SUMMARY_DIR_ME
)
while
data_manager
.
get_status
()
!=
DataManagerStatus
.
DONE
:
time
.
sleep
(
0.1
)
task_manager
=
TrainTaskManager
(
data_manager
)
train_jobs
=
task_manager
.
get_all_train_tasks
(
PluginNameEnum
.
scalar
)
print
(
train_jobs
)
"""
train_jobs
['train_jobs': {
'id': '12-123',
'name': 'train_job_name',
'tags': ['x1', 'y1']
}]
"""
scalar_processor
=
ScalarsProcessor
(
data_manager
)
metadata
=
scalar_processor
.
get_metadata_list
(
train_job_ids
=
train_jobs
[
'train_jobs'
][
0
][
'id'
],
tag
=
train_jobs
[
'train_jobs'
][
0
][
'tags'
][
0
])
print
(
metadata
)
'''
metadata
{
'scalars' : [
{
'train_job_id' : '12-12',
'metadatas' : [
{
'wall_time' : 0.1,
'step' : 1,
'value' : 0.1
}
]
}
]
}
'''
print
(
"============end print_scalar_data
\n
"
)
@
pytest
.
mark
.
level0
@
pytest
.
mark
.
platform_x86_gpu_training
...
...
tests/ut/cpp/transform/convert_test.cc
浏览文件 @
3822b483
...
...
@@ -621,6 +621,12 @@ TEST_F(TestConvert, TestTensorSummaryOps) {
ASSERT_TRUE
(
ret
);
}
TEST_F
(
TestConvert
,
TestHistogramSummaryOps
)
{
auto
prim
=
prim
::
kPrimHistogramSummary
;
bool
ret
=
MakeDfGraph
(
prim
,
2
);
ASSERT_TRUE
(
ret
);
}
TEST_F
(
TestConvert
,
TestGreaterOps
)
{
auto
prim
=
std
::
make_shared
<
Primitive
>
(
"Greater"
);
bool
ret
=
MakeDfGraph
(
prim
,
2
);
...
...
tests/ut/cpp/transform/transform_base_test.cc
浏览文件 @
3822b483
...
...
@@ -73,7 +73,8 @@ FuncGraphPtr MakeFuncGraph(const PrimitivePtr prim, unsigned int nparam) {
std
::
vector
<
AnfNodePtr
>
inputs
;
inputs
.
push_back
(
NewValueNode
(
prim
));
for
(
unsigned
int
i
=
0
;
i
<
nparam
;
i
++
)
{
if
((
prim
->
name
()
==
"ScalarSummary"
||
prim
->
name
()
==
"TensorSummary"
||
prim
->
name
()
==
"ImageSummary"
)
&&
if
((
prim
->
name
()
==
"ScalarSummary"
||
prim
->
name
()
==
"TensorSummary"
||
prim
->
name
()
==
"ImageSummary"
||
prim
->
name
()
==
"HistogramSummary"
)
&&
i
==
0
)
{
auto
input
=
NewValueNode
(
"testSummary"
);
inputs
.
push_back
(
input
);
...
...
tests/ut/python/ops/test_nn_ops.py
浏览文件 @
3822b483
...
...
@@ -198,6 +198,19 @@ class ScalarSummaryNet(nn.Cell):
return
out
class
HistogramSummaryNet
(
nn
.
Cell
):
"""HistogramSummaryNet definition"""
def
__init__
(
self
):
super
(
HistogramSummaryNet
,
self
).
__init__
()
self
.
summary
=
P
.
HistogramSummary
()
def
construct
(
self
,
tensor
):
string_in
=
"wight_value"
out
=
self
.
summary
(
string_in
,
tensor
)
return
out
class
FusedBatchNormGrad
(
nn
.
Cell
):
""" FusedBatchNormGrad definition """
...
...
@@ -443,6 +456,10 @@ test_cases = [
'block'
:
ScalarSummaryNet
(),
'desc_inputs'
:
[
2.2
],
}),
(
'HistogramSummary'
,
{
'block'
:
HistogramSummaryNet
(),
'desc_inputs'
:
[[
1
,
2
,
3
]],
}),
(
'FusedBatchNormGrad'
,
{
'block'
:
FusedBatchNormGrad
(
nn
.
BatchNorm2d
(
num_features
=
512
,
eps
=
1e-5
,
momentum
=
0.1
)),
'desc_inputs'
:
[[
64
,
512
,
7
,
7
],
[
64
,
512
,
7
,
7
]],
...
...
tests/ut/python/ops/test_ops.py
浏览文件 @
3822b483
...
...
@@ -160,6 +160,19 @@ class SummaryNet(nn.Cell):
return
self
.
add
(
x
,
y
)
class
HistogramSummaryNet
(
nn
.
Cell
):
def
__init__
(
self
,):
super
(
HistogramSummaryNet
,
self
).
__init__
()
self
.
summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
def
construct
(
self
,
x
,
y
):
out
=
self
.
add
(
x
,
y
)
string_in
=
"out"
self
.
summary
(
string_in
,
out
)
return
out
test_case_math_ops
=
[
(
'Neg'
,
{
'block'
:
P
.
Neg
(),
...
...
@@ -1104,6 +1117,12 @@ test_case_other_ops = [
'desc_inputs'
:
[
Tensor
(
np
.
array
([
1.1
]).
astype
(
np
.
float32
)),
Tensor
(
np
.
array
([
1.2
]).
astype
(
np
.
float32
))],
'skip'
:
[
'backward'
]}),
(
'HistogramSummary'
,
{
'block'
:
HistogramSummaryNet
(),
'desc_inputs'
:
[
Tensor
(
np
.
array
([
1.1
]).
astype
(
np
.
float32
)),
Tensor
(
np
.
array
([
1.2
]).
astype
(
np
.
float32
))],
'skip'
:
[
'backward'
]}),
]
test_case_lists
=
[
test_case_nn_ops
,
test_case_math_ops
,
test_case_array_ops
,
test_case_other_ops
]
...
...
tests/ut/python/train/summary/test_summary.py
浏览文件 @
3822b483
...
...
@@ -132,6 +132,7 @@ class SummaryDemo(nn.Cell):
def
__init__
(
self
,):
super
(
SummaryDemo
,
self
).
__init__
()
self
.
s
=
P
.
ScalarSummary
()
self
.
histogram_summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
def
construct
(
self
,
x
,
y
):
...
...
@@ -139,6 +140,7 @@ class SummaryDemo(nn.Cell):
z
=
self
.
add
(
x
,
y
)
self
.
s
(
"z1"
,
z
)
self
.
s
(
"y1"
,
y
)
self
.
histogram_summary
(
"histogram"
,
z
)
return
z
...
...
tests/ut/python/train/summary/test_summary_ops_params_valid_check.py
浏览文件 @
3822b483
...
...
@@ -40,6 +40,7 @@ class SummaryDemoTag(nn.Cell):
def
__init__
(
self
,
tag1
,
tag2
,
tag3
):
super
(
SummaryDemoTag
,
self
).
__init__
()
self
.
s
=
P
.
ScalarSummary
()
self
.
histogram_summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
self
.
tag1
=
tag1
self
.
tag2
=
tag2
...
...
@@ -50,6 +51,7 @@ class SummaryDemoTag(nn.Cell):
z
=
self
.
add
(
x
,
y
)
self
.
s
(
self
.
tag2
,
z
)
self
.
s
(
self
.
tag3
,
y
)
self
.
histogram_summary
(
self
.
tag1
,
x
)
return
z
...
...
@@ -58,6 +60,7 @@ class SummaryDemoTagForSet(nn.Cell):
def
__init__
(
self
,
tag_tuple
):
super
(
SummaryDemoTagForSet
,
self
).
__init__
()
self
.
s
=
P
.
ScalarSummary
()
self
.
histogram_summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
self
.
tag_tuple
=
tag_tuple
...
...
@@ -65,6 +68,7 @@ class SummaryDemoTagForSet(nn.Cell):
z
=
self
.
add
(
x
,
y
)
for
tag
in
self
.
tag_tuple
:
self
.
s
(
tag
,
x
)
self
.
histogram_summary
(
tag
,
x
)
return
z
...
...
@@ -98,6 +102,19 @@ class SummaryDemoValueForSet(nn.Cell):
self
.
s
(
tag
,
self
.
v
)
return
z
class
HistogramSummaryNet
(
nn
.
Cell
):
"HistogramSummaryNet definition"
def
__init__
(
self
,
value
):
self
.
histogram_summary
=
P
.
HistogramSummary
()
self
.
add
=
P
.
TensorAdd
()
self
.
value
=
value
def
construct
(
self
,
tensors1
,
tensor2
):
self
.
histogram_summary
(
"value"
,
self
.
value
)
return
self
.
add
(
tensors1
,
tensor2
)
def
run_case
(
net
):
""" run_case """
# step 0: create the thread
...
...
@@ -121,8 +138,8 @@ def run_case(net):
# Test 1: use the repeat tag
def
test_s
calar_s
ummary_use_repeat_tag
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_repeat_tag"
)
def
test_summary_use_repeat_tag
():
log
.
debug
(
"begin test_summary_use_repeat_tag"
)
net
=
SummaryDemoTag
(
"x"
,
"x"
,
"x"
)
try
:
run_case
(
net
)
...
...
@@ -130,12 +147,12 @@ def test_scalar_summary_use_repeat_tag():
assert
False
else
:
assert
True
log
.
debug
(
"finished test_s
calar_s
ummary_use_repeat_tag"
)
log
.
debug
(
"finished test_summary_use_repeat_tag"
)
# Test 2: repeat tag use for set summary
def
test_s
calar_s
ummary_use_repeat_tag_for_set
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_repeat_tag_for_set"
)
def
test_summary_use_repeat_tag_for_set
():
log
.
debug
(
"begin test_summary_use_repeat_tag_for_set"
)
net
=
SummaryDemoTagForSet
((
"x"
,
"x"
,
"x"
))
try
:
run_case
(
net
)
...
...
@@ -143,12 +160,12 @@ def test_scalar_summary_use_repeat_tag_for_set():
assert
False
else
:
assert
True
log
.
debug
(
"finished test_s
calar_s
ummary_use_repeat_tag_for_set"
)
log
.
debug
(
"finished test_summary_use_repeat_tag_for_set"
)
# Test3: test with invalid tag(None, bool, "", int)
def
test_s
calar_s
ummary_use_invalid_tag_None
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_invalid_tag_None"
)
def
test_summary_use_invalid_tag_None
():
log
.
debug
(
"begin test_summary_use_invalid_tag_None"
)
net
=
SummaryDemoTag
(
None
,
None
,
None
)
try
:
run_case
(
net
)
...
...
@@ -156,31 +173,31 @@ def test_scalar_summary_use_invalid_tag_None():
assert
True
else
:
assert
False
log
.
debug
(
"finished test_s
calar_s
ummary_use_invalid_tag_None"
)
log
.
debug
(
"finished test_summary_use_invalid_tag_None"
)
# Test4: test with invalid tag(None, bool, "", int)
def
test_s
calar_s
ummary_use_invalid_tag_Bool
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_invalid_tag_Bool"
)
def
test_summary_use_invalid_tag_Bool
():
log
.
debug
(
"begin test_summary_use_invalid_tag_Bool"
)
net
=
SummaryDemoTag
(
True
,
True
,
True
)
run_case
(
net
)
log
.
debug
(
"finished test_s
calar_s
ummary_use_invalid_tag_Bool"
)
log
.
debug
(
"finished test_summary_use_invalid_tag_Bool"
)
# Test5: test with invalid tag(None, bool, "", int)
def
test_s
calar_s
ummary_use_invalid_tag_null
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_invalid_tag_null"
)
def
test_summary_use_invalid_tag_null
():
log
.
debug
(
"begin test_summary_use_invalid_tag_null"
)
net
=
SummaryDemoTag
(
""
,
""
,
""
)
run_case
(
net
)
log
.
debug
(
"finished test_s
calar_s
ummary_use_invalid_tag_null"
)
log
.
debug
(
"finished test_summary_use_invalid_tag_null"
)
# Test6: test with invalid tag(None, bool, "", int)
def
test_s
calar_s
ummary_use_invalid_tag_Int
():
log
.
debug
(
"begin test_s
calar_s
ummary_use_invalid_tag_Int"
)
def
test_summary_use_invalid_tag_Int
():
log
.
debug
(
"begin test_summary_use_invalid_tag_Int"
)
net
=
SummaryDemoTag
(
1
,
2
,
3
)
run_case
(
net
)
log
.
debug
(
"finished test_s
calar_s
ummary_use_invalid_tag_Int"
)
log
.
debug
(
"finished test_summary_use_invalid_tag_Int"
)
# Test7: test with invalid value(None, "")
...
...
@@ -196,7 +213,6 @@ def test_scalar_summary_use_invalid_value_None():
log
.
debug
(
"finished test_scalar_summary_use_invalid_tag_Int"
)
# Test8: test with invalid value(None, "")
def
test_scalar_summary_use_invalid_value_None_ForSet
():
log
.
debug
(
"begin test_scalar_summary_use_invalid_value_None_ForSet"
)
...
...
@@ -221,3 +237,30 @@ def test_scalar_summary_use_invalid_value_null():
else
:
assert
False
log
.
debug
(
"finished test_scalar_summary_use_invalid_value_null"
)
def
test_histogram_summary_use_valid_value
():
"""Test histogram summary with valid value"""
log
.
debug
(
"Begin test_histogram_summary_use_valid_value"
)
try
:
net
=
HistogramSummaryNet
(
Tensor
(
np
.
array
([
1
,
2
,
3
])))
run_case
(
net
)
except
:
assert
True
else
:
assert
False
log
.
debug
(
"Finished test_histogram_summary_use_valid_value"
)
def
test_histogram_summary_use_scalar_value
():
"""Test histogram summary use scalar value"""
log
.
debug
(
"Begin test_histogram_summary_use_scalar_value"
)
try
:
scalar
=
Tensor
(
1
)
net
=
HistogramSummaryNet
(
scalar
)
run_case
(
net
)
except
:
assert
True
else
:
assert
False
log
.
debug
(
"Finished test_histogram_summary_use_scalar_value"
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录