Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
420ef2a3
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
420ef2a3
编写于
4月 28, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
4月 28, 2020
浏览文件
操作
浏览文件
下载
差异文件
!731 remove extra empty string from log text
Merge pull request !731 from fary86/remove_extra_empty_string_in_log
上级
fe900081
b314c1d6
变更
18
隐藏空白更改
内联
并排
Showing
18 changed file
with
64 addition
and
70 deletion
+64
-70
mindspore/ccsrc/ir/dtype.cc
mindspore/ccsrc/ir/dtype.cc
+5
-5
mindspore/ccsrc/ir/manager.cc
mindspore/ccsrc/ir/manager.cc
+7
-8
mindspore/ccsrc/ir/primitive.cc
mindspore/ccsrc/ir/primitive.cc
+2
-2
mindspore/ccsrc/operator/composite/composite.cc
mindspore/ccsrc/operator/composite/composite.cc
+2
-3
mindspore/ccsrc/operator/prim_nn.cc
mindspore/ccsrc/operator/prim_nn.cc
+2
-3
mindspore/ccsrc/operator/prim_statement.cc
mindspore/ccsrc/operator/prim_statement.cc
+1
-2
mindspore/ccsrc/optimizer/optimizer.h
mindspore/ccsrc/optimizer/optimizer.h
+1
-1
mindspore/ccsrc/pipeline/parse/function_block.cc
mindspore/ccsrc/pipeline/parse/function_block.cc
+2
-2
mindspore/ccsrc/pipeline/pipeline.cc
mindspore/ccsrc/pipeline/pipeline.cc
+3
-3
mindspore/ccsrc/pipeline/static_analysis/analysis_context.cc
mindspore/ccsrc/pipeline/static_analysis/analysis_context.cc
+1
-1
mindspore/ccsrc/pipeline/static_analysis/evaluator.cc
mindspore/ccsrc/pipeline/static_analysis/evaluator.cc
+5
-6
mindspore/ccsrc/pipeline/static_analysis/param_validator.cc
mindspore/ccsrc/pipeline/static_analysis/param_validator.cc
+6
-7
mindspore/ccsrc/pipeline/static_analysis/param_validator.h
mindspore/ccsrc/pipeline/static_analysis/param_validator.h
+1
-1
mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc
mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc
+2
-2
mindspore/ccsrc/pipeline/static_analysis/utils.cc
mindspore/ccsrc/pipeline/static_analysis/utils.cc
+1
-1
mindspore/ccsrc/transform/convert.cc
mindspore/ccsrc/transform/convert.cc
+1
-1
mindspore/ccsrc/vm/vm.cc
mindspore/ccsrc/vm/vm.cc
+21
-21
mindspore/ccsrc/vm/vmimpl.cc
mindspore/ccsrc/vm/vmimpl.cc
+1
-1
未找到文件。
mindspore/ccsrc/ir/dtype.cc
浏览文件 @
420ef2a3
...
...
@@ -345,7 +345,7 @@ TypePtr StringToNumberType(const std::string &type_name, const std::string &num_
auto
bits
=
std
::
stoi
(
type_name
.
substr
(
num_type_name
.
size
()));
type
=
std
::
make_shared
<
T
>
(
bits
);
}
catch
(
const
std
::
exception
&
e
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
num_type_name
<<
" convert from string error "
<<
e
.
what
();
MS_LOG
(
EXCEPTION
)
<<
num_type_name
<<
" convert from string error "
<<
e
.
what
();
}
}
return
type
;
...
...
@@ -389,7 +389,7 @@ TypePtr TensorStrToType(const std::string &type_name) {
}
type
=
std
::
make_shared
<
TensorType
>
(
element_type
);
}
catch
(
const
std
::
exception
&
e
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
MS_LOG
(
EXCEPTION
)
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
}
}
...
...
@@ -416,7 +416,7 @@ TypePtr ListStrToType(const std::string &type_name) {
}
type
=
std
::
make_shared
<
List
>
(
element_types
);
}
catch
(
const
std
::
exception
&
e
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
MS_LOG
(
EXCEPTION
)
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
}
}
...
...
@@ -443,7 +443,7 @@ TypePtr TupleStrToType(const std::string &type_name) {
}
type
=
std
::
make_shared
<
Tuple
>
(
element_types
);
}
catch
(
const
std
::
exception
&
e
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
MS_LOG
(
EXCEPTION
)
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
}
}
return
type
;
...
...
@@ -484,7 +484,7 @@ TypePtr FunctionStrToType(const std::string &type_name) {
}
type
=
std
::
make_shared
<
Function
>
(
args_type
,
retval
);
}
catch
(
const
std
::
exception
&
e
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
MS_LOG
(
EXCEPTION
)
<<
type_name
<<
" convert from string error "
<<
e
.
what
();
}
}
return
type
;
...
...
mindspore/ccsrc/ir/manager.cc
浏览文件 @
420ef2a3
...
...
@@ -888,7 +888,7 @@ void FuncGraphUserNodesCollector::OnMoveAllCNode(FuncGraphPtr src, FuncGraphPtr
void
FuncGraphJDirectCollector
::
OnModEdge
(
AnfNodePtr
node
,
int
,
AnfNodePtr
inp
,
EdgeProcessDirection
direction
)
{
if
(
IsValueNode
<
FuncGraph
>
(
inp
)
&&
IsPrimitiveCNode
(
node
,
prim
::
kPrimJ
))
{
(
void
)
Mod
(
node
->
func_graph
(),
GetValueNode
<
FuncGraphPtr
>
(
inp
),
direction
);
MS_LOG
(
DEBUG
)
<<
""
<<
node
->
func_graph
()
->
ToString
()
<<
" users func graph "
MS_LOG
(
DEBUG
)
<<
node
->
func_graph
()
->
ToString
()
<<
" users func graph "
<<
GetValueNode
<
FuncGraphPtr
>
(
inp
)
->
ToString
()
<<
" which contains J(func_graph), dir: "
<<
direction
;
}
}
...
...
@@ -945,7 +945,7 @@ FuncGraphSetPtr FuncGraphParentsTotalComputer::SeekParents(const FuncGraphPtr &f
void
FuncGraphParentsTotalComputer
::
RealRecompute
(
FuncGraphPtr
fg
)
{
MS_EXCEPTION_IF_NULL
(
fg
);
all_parents_direct_
=
&
(
manager_
->
func_graph_parents_direct
());
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" total func graph dep size:"
<<
(
*
all_parents_direct_
)[
fg
].
size
();
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" total func graph dep size:"
<<
(
*
all_parents_direct_
)[
fg
].
size
();
func_graph_parents_total_analysis_
[
fg
].
update
(
SeekParents
(
fg
));
MS_LOG
(
DEBUG
)
<<
"FuncGraphParentsTotalComputer end: "
<<
func_graph_parents_total_analysis_
[
fg
].
size
();
}
...
...
@@ -1074,7 +1074,7 @@ void FuncGraphsUsedTotalComputer::RealRecompute(FuncGraphPtr fg) {
if
(
func_graph_used_total_analysis_
[
fg
].
count
(
used_fg
)
==
0
)
{
todo_new
.
push_back
(
used_fg
);
}
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" add func graph "
<<
used_fg
->
ToString
();
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" add func graph "
<<
used_fg
->
ToString
();
func_graph_used_total_analysis_
[
fg
].
add
(
used_fg
);
}
}
...
...
@@ -1138,7 +1138,7 @@ void RecursiveComputer::CheckRecursiveGraphs(const FuncGraphPtr &fg, std::list<F
bool
FuncGraphJTotalComputer
::
SeekJ
(
const
FuncGraphPtr
&
fg
,
const
FuncGraphSetPtr
&
path
)
{
MS_EXCEPTION_IF_NULL
(
path
);
if
(
path
->
contains
(
fg
))
{
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" had been checked"
;
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" had been checked"
;
return
false
;
}
MS_EXCEPTION_IF_NULL
(
manager_
);
...
...
@@ -1149,7 +1149,7 @@ bool FuncGraphJTotalComputer::SeekJ(const FuncGraphPtr &fg, const FuncGraphSetPt
std
::
find_if
(
func_graph_counter_map
[
fg
].
begin
(),
func_graph_counter_map
[
fg
].
end
(),
[
path
](
const
std
::
pair
<
FuncGraphPtr
,
int
>
iter
)
{
return
!
path
->
contains
(
iter
.
first
);
});
if
(
contains_j
!=
func_graph_counter_map
[
fg
].
end
())
{
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" contains J("
<<
contains_j
->
first
->
ToString
()
<<
")"
;
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" contains J("
<<
contains_j
->
first
->
ToString
()
<<
")"
;
return
true
;
}
}
...
...
@@ -1160,12 +1160,11 @@ bool FuncGraphJTotalComputer::SeekJ(const FuncGraphPtr &fg, const FuncGraphSetPt
for
(
auto
&
item
:
used
[
fg
])
{
auto
used_g
=
item
.
first
;
if
(
SeekJ
(
used_g
,
path
))
{
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" users func graph "
<<
used_g
->
ToString
()
<<
" which contains J(func_graph)"
;
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" users func graph "
<<
used_g
->
ToString
()
<<
" which contains J(func_graph)"
;
return
true
;
}
}
MS_LOG
(
DEBUG
)
<<
""
<<
fg
->
ToString
()
<<
" doesn't contain J(func_graph)"
;
MS_LOG
(
DEBUG
)
<<
fg
->
ToString
()
<<
" doesn't contain J(func_graph)"
;
return
false
;
}
...
...
mindspore/ccsrc/ir/primitive.cc
浏览文件 @
420ef2a3
...
...
@@ -145,14 +145,14 @@ py::function PrimitivePy::GetComputeFunction() {
static
const
char
*
const
compute_func_name
=
"vm_impl"
;
if
(
py
::
hasattr
(
python_obj_
,
compute_func_name
))
{
MS_LOG
(
INFO
)
<<
""
<<
name
()
<<
" compute_func_name"
;
MS_LOG
(
INFO
)
<<
name
()
<<
" compute_func_name"
;
py
::
function
fn
=
python_obj_
.
attr
(
compute_func_name
).
cast
<
py
::
function
>
();
return
fn
;
}
static
const
std
::
string
vm_module
=
"mindspore.ops.vm_impl_registry"
;
static
const
std
::
string
get_vm_impl_fn
=
"get_vm_impl_fn"
;
MS_LOG
(
INFO
)
<<
""
<<
name
()
<<
": get_vm_impl_fn"
;
MS_LOG
(
INFO
)
<<
name
()
<<
": get_vm_impl_fn"
;
py
::
function
get_fn
=
parse
::
python_adapter
::
GetPyFn
(
vm_module
,
get_vm_impl_fn
);
py
::
function
vm_fn
=
get_fn
(
python_obj_
);
...
...
mindspore/ccsrc/operator/composite/composite.cc
浏览文件 @
420ef2a3
...
...
@@ -676,7 +676,7 @@ void MultitypeFuncGraph::Register(const std::vector<std::string> &types_name, co
for
(
auto
&
type_name
:
types_name
)
{
auto
type_ptr
=
StringToType
(
type_name
);
if
(
type_ptr
==
nullptr
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
type_name
<<
" convert from string error "
;
MS_LOG
(
EXCEPTION
)
<<
type_name
<<
" convert from string error "
;
}
types
.
push_back
(
type_ptr
);
}
...
...
@@ -955,8 +955,7 @@ int CheckSliceMember(const AbstractBasePtr &member, int default_value, const std
return
default_value
;
}
MS_LOG
(
EXCEPTION
)
<<
""
<<
member_name
<<
" should be a AbstractScalar or AbstractNone, but got "
<<
member
->
ToString
();
MS_LOG
(
EXCEPTION
)
<<
member_name
<<
" should be a AbstractScalar or AbstractNone, but got "
<<
member
->
ToString
();
}
void
GenerateTupleSliceParameter
(
const
AbstractTuplePtr
&
tuple
,
const
AbstractSlicePtr
&
slice
,
int
*
start_index
,
...
...
mindspore/ccsrc/operator/prim_nn.cc
浏览文件 @
420ef2a3
...
...
@@ -246,7 +246,7 @@ AbstractBasePtr InferImplBiasAddGrad(const AnalysisEnginePtr &, const PrimitiveP
// Inputs: at least one tensor(y_backprop)
// Outputs: dbias
if
(
args_spec_list
.
empty
())
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
primitive
->
name
()
<<
" evaluator at least has 1 parameters, while the input size is "
MS_LOG
(
EXCEPTION
)
<<
primitive
->
name
()
<<
" evaluator at least has 1 parameters, while the input size is "
<<
args_spec_list
.
size
()
<<
"."
;
}
...
...
@@ -255,8 +255,7 @@ AbstractBasePtr InferImplBiasAddGrad(const AnalysisEnginePtr &, const PrimitiveP
MS_EXCEPTION_IF_NULL
(
shape_y
);
std
::
vector
<
int
>
y_dims
=
shape_y
->
shape
();
if
(
y_dims
.
size
()
<
2
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
primitive
->
name
()
<<
" input y backprop, dim should >= 2, while "
<<
y_dims
.
size
()
<<
"."
;
MS_LOG
(
EXCEPTION
)
<<
primitive
->
name
()
<<
" input y backprop, dim should >= 2, while "
<<
y_dims
.
size
()
<<
"."
;
}
std
::
vector
<
int
>
bias_dims
=
{
y_dims
[
1
]};
ShapePtr
ret_shape
=
std
::
make_shared
<
Shape
>
(
bias_dims
);
...
...
mindspore/ccsrc/operator/prim_statement.cc
浏览文件 @
420ef2a3
...
...
@@ -80,8 +80,7 @@ AbstractBasePtr InferImplDot(const AnalysisEnginePtr &, const PrimitivePtr &prim
auto
y_shp_value
=
y_shp
->
shape
();
// Should be matrix which shape size is 2.
if
(
x_shp_value
.
size
()
!=
2
||
y_shp_value
.
size
()
!=
2
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op_name
<<
" evaluator requires input two 2D tensors, while the dimensions of two tensors are "
MS_LOG
(
EXCEPTION
)
<<
op_name
<<
" evaluator requires input two 2D tensors, while the dimensions of two tensors are "
<<
x_shp_value
.
size
()
<<
", "
<<
y_shp_value
.
size
()
<<
" "
;
}
if
(
x_shp_value
[
1
]
!=
y_shp_value
[
0
]
&&
x_shp_value
[
1
]
!=
Shape
::
SHP_ANY
&&
y_shp_value
[
0
]
!=
Shape
::
SHP_ANY
)
{
...
...
mindspore/ccsrc/optimizer/optimizer.h
浏览文件 @
420ef2a3
...
...
@@ -171,7 +171,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
};
use_profile
?
(
WITH
(
MsProfile
::
GetProfile
()
->
Step
(
pass_names_
[
i
]))
opt_func
)
:
opt_func
();
#ifdef DEBUG
MS_LOG
(
DEBUG
)
<<
""
<<
name_
<<
" round "
<<
counter
<<
" OptPass "
<<
pass_names_
[
i
]
<<
" end."
;
MS_LOG
(
DEBUG
)
<<
name_
<<
" round "
<<
counter
<<
" OptPass "
<<
pass_names_
[
i
]
<<
" end."
;
auto
fg_name
=
name_
+
"_r"
+
std
::
to_string
(
counter
)
+
"_"
+
std
::
to_string
(
i
)
+
"_"
+
pass_names_
[
i
];
func_graph
->
DumpFuncGraph
(
fg_name
);
DumpIR
(
fg_name
+
".ir"
,
func_graph
);
...
...
mindspore/ccsrc/pipeline/parse/function_block.cc
浏览文件 @
420ef2a3
...
...
@@ -37,7 +37,7 @@ void FunctionBlock::AddPrevBlock(const FunctionBlockPtr &block) { prev_blocks_.p
// write variable records the variable name to corresponding node
void
FunctionBlock
::
WriteVariable
(
const
std
::
string
&
var_name
,
const
AnfNodePtr
&
node
)
{
MS_LOG
(
DEBUG
)
<<
""
<<
func_graph_
->
ToString
()
<<
" write var "
<<
var_name
<<
" with node "
<<
node
->
DebugString
();
MS_LOG
(
DEBUG
)
<<
func_graph_
->
ToString
()
<<
" write var "
<<
var_name
<<
" with node "
<<
node
->
DebugString
();
vars_
[
var_name
]
=
node
;
}
...
...
@@ -71,7 +71,7 @@ AnfNodePtr FunctionBlock::ReadVariable(const std::string &var) {
TraceManager
::
DebugTrace
(
std
::
make_shared
<
TracePhi
>
(
debug_info
));
ParameterPtr
phi_param
=
std
::
make_shared
<
Parameter
>
(
func_graph
());
TraceManager
::
EndTrace
();
MS_LOG
(
DEBUG
)
<<
""
<<
func_graph_
->
ToString
()
<<
" generate phi node "
<<
phi_param
->
ToString
()
<<
" for "
<<
var
;
MS_LOG
(
DEBUG
)
<<
func_graph_
->
ToString
()
<<
" generate phi node "
<<
phi_param
->
ToString
()
<<
" for "
<<
var
;
func_graph
()
->
add_parameter
(
phi_param
);
phi_nodes_
[
phi_param
]
=
var
;
WriteVariable
(
var
,
phi_param
);
...
...
mindspore/ccsrc/pipeline/pipeline.cc
浏览文件 @
420ef2a3
...
...
@@ -333,7 +333,7 @@ void ExecutorPy::GetGeBackendPolicy() const {
MS_EXCEPTION_IF_NULL
(
ms_context
);
std
::
string
backend
=
ms_context
->
backend_policy
();
if
(
backend
!=
"ge"
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
backend
<<
" backend policy is not supported under ge backend!"
;
MS_LOG
(
EXCEPTION
)
<<
backend
<<
" backend policy is not supported under ge backend!"
;
}
}
...
...
@@ -491,10 +491,10 @@ void RunPipelineAction(const ActionItem &action, pipeline::ResourcePtr resource,
// load MindSpore IR from file
if
(
action
.
first
==
"symbol_resolve"
)
{
MS_LOG
(
DEBUG
)
<<
""
<<
action
.
first
<<
" read ir file: "
<<
ir_file
;
MS_LOG
(
DEBUG
)
<<
action
.
first
<<
" read ir file: "
<<
ir_file
;
std
::
vector
<
FuncGraphPtr
>
graphs
=
ImportIR
(
ir_file
);
if
(
graphs
.
size
()
==
0
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
action
.
first
<<
" read ir file "
<<
ir_file
<<
" failed as no graph found"
;
MS_LOG
(
EXCEPTION
)
<<
action
.
first
<<
" read ir file "
<<
ir_file
<<
" failed as no graph found"
;
}
auto
manager
=
resource
->
manager
();
MS_EXCEPTION_IF_NULL
(
manager
);
...
...
mindspore/ccsrc/pipeline/static_analysis/analysis_context.cc
浏览文件 @
420ef2a3
...
...
@@ -78,7 +78,7 @@ AnalysisContextPtr AnalysisContext::Filter(const FuncGraphPtr &func_graph) {
oss
<<
", context: "
<<
iter
.
second
.
lock
()
->
ToString
()
<<
"]"
;
}
oss
<<
"}"
;
MS_LOG
(
EXCEPTION
)
<<
""
<<
oss
.
str
()
<<
" NodeInfo: "
<<
trace
::
GetDebugInfo
(
func_graph
->
debug_info
());
MS_LOG
(
EXCEPTION
)
<<
oss
.
str
()
<<
" NodeInfo: "
<<
trace
::
GetDebugInfo
(
func_graph
->
debug_info
());
}
return
parent_context
;
}
...
...
mindspore/ccsrc/pipeline/static_analysis/evaluator.cc
浏览文件 @
420ef2a3
...
...
@@ -33,8 +33,7 @@ void InferEntryLogging(const EvaluatorPtr &evaluator, const AbstractBasePtrList
MS_LOG
(
DEBUG
)
<<
"Evaluator "
<<
evaluator
->
ToString
()
<<
" run for "
<<
out_conf
->
node
()
->
scope
()
->
name
();
}
for
(
size_t
i
=
0
;
i
<
arg_spec_list
.
size
();
i
++
)
{
MS_LOG
(
DEBUG
)
<<
""
<<
evaluator
->
ToString
()
<<
" input["
<<
i
<<
"] abstract value: "
<<
arg_spec_list
[
i
]
->
ToString
();
MS_LOG
(
DEBUG
)
<<
evaluator
->
ToString
()
<<
" input["
<<
i
<<
"] abstract value: "
<<
arg_spec_list
[
i
]
->
ToString
();
}
}
...
...
@@ -137,7 +136,7 @@ AbstractBasePtrList FuncGraphEvaluator::NormalizeArgs(const AbstractBasePtrList
MS_EXCEPTION_IF_NULL
(
arg
);
return
arg
->
Broaden
();
});
MS_LOG
(
DEBUG
)
<<
""
<<
func_graph_
->
ToString
()
<<
" original: "
<<
mindspore
::
ToString
(
args_spec_list
)
MS_LOG
(
DEBUG
)
<<
func_graph_
->
ToString
()
<<
" original: "
<<
mindspore
::
ToString
(
args_spec_list
)
<<
", broaded: "
<<
mindspore
::
ToString
(
broaded_list
);
return
broaded_list
;
}
...
...
@@ -230,20 +229,20 @@ AbstractBasePtr Evaluator::Run(AnalysisEnginePtr engine, const ConfigPtrList &ar
MS_EXCEPTION_IF_NULL
(
cache_
);
auto
iter
=
cache_
->
find
(
args_spec_list
);
if
(
iter
==
cache_
->
end
())
{
MS_LOG
(
DEBUG
)
<<
""
<<
evaluator_name
<<
" cache miss, call Infer()."
;
MS_LOG
(
DEBUG
)
<<
evaluator_name
<<
" cache miss, call Infer()."
;
AbstractBasePtr
ret
=
Infer
(
engine
,
args_spec_list
);
if
(
ret
==
nullptr
)
{
InferFailLogging
(
shared_from_base
<
Evaluator
>
(),
args_spec_list
,
out_conf
);
MS_LOG
(
EXCEPTION
)
<<
"Evaluator "
<<
evaluator_name
<<
" result is nullptr."
;
}
MS_EXCEPTION_IF_NULL
(
ret
);
MS_LOG
(
DEBUG
)
<<
""
<<
evaluator_name
<<
" set cache. return: "
<<
ret
->
ToString
()
<<
"."
;
MS_LOG
(
DEBUG
)
<<
evaluator_name
<<
" set cache. return: "
<<
ret
->
ToString
()
<<
"."
;
(
*
cache_
)[
args_spec_list
]
=
ret
;
trace
::
TraceGraphInferLeave
(
shared_from_base
<
Evaluator
>
());
return
ret
;
}
else
{
MS_EXCEPTION_IF_NULL
(
iter
->
second
);
MS_LOG
(
DEBUG
)
<<
""
<<
evaluator_name
<<
" cache hit. return: "
<<
iter
->
second
->
ToString
()
<<
"."
;
MS_LOG
(
DEBUG
)
<<
evaluator_name
<<
" cache hit. return: "
<<
iter
->
second
->
ToString
()
<<
"."
;
trace
::
TraceGraphInferLeave
(
shared_from_base
<
Evaluator
>
());
return
iter
->
second
;
}
...
...
mindspore/ccsrc/pipeline/static_analysis/param_validator.cc
浏览文件 @
420ef2a3
...
...
@@ -103,7 +103,7 @@ ShapePtr CheckShapeSame(const std::string &op, const AbstractTensorPtr &tensor_b
ShapePtr
shape_base
=
tensor_base
->
shape
();
ShapePtr
shape
=
tensor
->
shape
();
if
(
*
shape
!=
*
shape_base
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" evaluator first arg shape "
<<
tensor
->
shape
()
->
ToString
()
MS_LOG
(
EXCEPTION
)
<<
op
<<
" evaluator first arg shape "
<<
tensor
->
shape
()
->
ToString
()
<<
" are not consistent with second arg shape "
<<
tensor_base
->
shape
()
->
ToString
();
}
return
shape_base
;
...
...
@@ -113,7 +113,7 @@ TypePtr CheckDtypeSame(const std::string &op, const AbstractTensorPtr &tensor_ba
TypePtr
type_base
=
tensor_base
->
element
()
->
BuildType
();
TypePtr
type
=
tensor
->
element
()
->
BuildType
();
if
(
*
type
!=
*
type_base
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" evaluator first arg dtype "
<<
type_base
->
ToString
()
MS_LOG
(
EXCEPTION
)
<<
op
<<
" evaluator first arg dtype "
<<
type_base
->
ToString
()
<<
" are not consistent with second arg dtype "
<<
type
->
ToString
();
}
return
type_base
;
...
...
@@ -121,14 +121,14 @@ TypePtr CheckDtypeSame(const std::string &op, const AbstractTensorPtr &tensor_ba
int
CheckAxis
(
const
std
::
string
&
op
,
const
ValuePtr
&
axis
,
int
minimum
,
int
max
)
{
if
(
axis
==
nullptr
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" evaluator axis is null"
;
MS_LOG
(
EXCEPTION
)
<<
op
<<
" evaluator axis is null"
;
}
if
(
!
axis
->
isa
<
Int32Imm
>
())
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" evaluator axis should be int, but got "
<<
axis
->
type_name
();
MS_LOG
(
EXCEPTION
)
<<
op
<<
" evaluator axis should be int, but got "
<<
axis
->
type_name
();
}
int
axis_value
=
GetValue
<
int
>
(
axis
);
if
(
axis_value
>
max
||
axis_value
<
minimum
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" evaluator axis value should be in the range ["
<<
minimum
<<
", "
<<
max
MS_LOG
(
EXCEPTION
)
<<
op
<<
" evaluator axis value should be in the range ["
<<
minimum
<<
", "
<<
max
<<
"], but get "
<<
axis_value
;
}
return
axis_value
;
...
...
@@ -136,8 +136,7 @@ int CheckAxis(const std::string &op, const ValuePtr &axis, int minimum, int max)
void
CheckArgsSize
(
const
std
::
string
&
op
,
const
mindspore
::
abstract
::
AbstractBasePtrList
&
args_spec_list
,
size_t
size_expect
)
{
if
(
args_spec_list
.
size
()
!=
size_expect
)
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
" input args size should be "
<<
size_expect
<<
", but got "
<<
args_spec_list
.
size
();
MS_LOG
(
EXCEPTION
)
<<
op
<<
" input args size should be "
<<
size_expect
<<
", but got "
<<
args_spec_list
.
size
();
}
for
(
size_t
i
=
0
;
i
<
size_expect
;
i
++
)
{
...
...
mindspore/ccsrc/pipeline/static_analysis/param_validator.h
浏览文件 @
420ef2a3
...
...
@@ -70,7 +70,7 @@ ABSTRACT_REPORT_NAME_TRAITS(Class)
template
<
typename
T
>
std
::
shared_ptr
<
T
>
CheckArg
(
const
std
::
string
&
op
,
const
AbstractBasePtrList
&
args_spec_list
,
size_t
index
)
{
if
(
index
>=
args_spec_list
.
size
())
{
MS_EXCEPTION
(
ValueError
)
<<
""
<<
op
<<
" evaluator args list index out of bound, size "
<<
args_spec_list
.
size
()
MS_EXCEPTION
(
ValueError
)
<<
op
<<
" evaluator args list index out of bound, size "
<<
args_spec_list
.
size
()
<<
", index "
<<
index
;
}
auto
arg
=
dyn_cast
<
T
>
(
args_spec_list
[
index
]);
...
...
mindspore/ccsrc/pipeline/static_analysis/static_analysis.cc
浏览文件 @
420ef2a3
...
...
@@ -122,7 +122,7 @@ AnalysisResult AnalysisEngine::Run(const FuncGraphPtr &func_graph, const Abstrac
MS_EXCEPTION_IF_NULL
(
root_context
->
func_graph
());
AnfNodeConfigPtr
output_conf
=
MakeConfig
(
root_context
->
func_graph
()
->
get_return
(),
root_context
);
MS_EXCEPTION_IF_NULL
(
func_graph
);
MS_LOG
(
INFO
)
<<
""
<<
func_graph
->
ToString
()
<<
": Run finished."
;
MS_LOG
(
INFO
)
<<
func_graph
->
ToString
()
<<
": Run finished."
;
AnalysisResult
result
;
MS_EXCEPTION_IF_NULL
(
output_conf
);
...
...
@@ -167,7 +167,7 @@ AbstractBasePtr AnalysisEngine::Eval(const AnfNodeConfigPtr &conf) {
for
(
auto
iter
:
compute_conf_stack_
)
{
buffer
<<
" -> "
<<
iter
->
DebugString
();
}
MS_LOG
(
DEBUG
)
<<
""
<<
buffer
.
str
();
MS_LOG
(
DEBUG
)
<<
buffer
.
str
();
#endif
MS_LOG
(
DEBUG
)
<<
"Begin Eval NodeConfig "
<<
conf
->
ToString
();
MS_EXCEPTION_IF_NULL
(
node
);
...
...
mindspore/ccsrc/pipeline/static_analysis/utils.cc
浏览文件 @
420ef2a3
...
...
@@ -175,7 +175,7 @@ std::vector<int> RealBroadcast(const std::string &op, std::vector<int> x_shape,
output_i
=
x_i
;
}
else
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
op
<<
op
<<
" evaluator the shape of first tensor and the shape of second tensor do not meet the broadcasting "
"requirements"
;
}
...
...
mindspore/ccsrc/transform/convert.cc
浏览文件 @
420ef2a3
...
...
@@ -623,7 +623,7 @@ void DfGraphConvertor::InitParamWithData(const TensorOrderMap &tensors) {
auto
node_itor
=
params_
.
find
(
name
);
// if name not in params_, create a node in graph
if
(
node_itor
==
params_
.
end
())
{
MS_LOG
(
WARNING
)
<<
""
<<
name
<<
" is not in params, and create a new node."
;
MS_LOG
(
WARNING
)
<<
name
<<
" is not in params, and create a new node."
;
ParameterPtr
param
=
anf_graph_
->
add_parameter
();
name
=
name
+
"_temp"
;
param
->
set_name
(
name
);
...
...
mindspore/ccsrc/vm/vm.cc
浏览文件 @
420ef2a3
...
...
@@ -216,8 +216,8 @@ void FinalVM::InstCall(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
1
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -232,8 +232,8 @@ void FinalVM::InstTailCall(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
3
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -261,7 +261,7 @@ void FinalVM::InstTailCall(const VectorRef &args) {
void
FinalVM
::
InstSwitchReturn
(
const
VectorRef
&
args
)
{
MS_LOG
(
DEBUG
)
<<
"Start"
;
if
(
args
.
size
()
!=
1
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires one parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires one parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
Pop
(
1
);
...
...
@@ -272,8 +272,8 @@ void FinalVM::InstReturn(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
2
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -295,7 +295,7 @@ void FinalVM::InstPartial(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
1
;
if
(
args
.
size
()
<
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" or more parameters, while the input size is "
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" or more parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -314,8 +314,8 @@ void FinalVM::InstPartial(const VectorRef &args) {
void
FinalVM
::
InstSimuSwitch
(
const
VectorRef
&
args
)
{
const
size_t
args_size
=
4
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
bool
cond
=
utils
::
cast
<
bool
>
(
args
[
0
]);
...
...
@@ -368,8 +368,8 @@ void FinalVM::InstSimuSwitch(const VectorRef &args) {
void
FinalVM
::
InstRealSwitch
(
const
VectorRef
&
args
)
{
const
size_t
args_size
=
3
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -378,7 +378,7 @@ void FinalVM::InstRealSwitch(const VectorRef &args) {
int
vfalse
=
utils
::
cast
<
int
>
(
args
[
2
]);
BaseRef
c
=
Ref
(
cond
);
MS_LOG
(
DEBUG
)
<<
""
<<
vtrue
<<
" false:"
<<
vfalse
<<
" InstSwitch: "
<<
c
.
ToString
();
MS_LOG
(
DEBUG
)
<<
vtrue
<<
" false:"
<<
vfalse
<<
" InstSwitch: "
<<
c
.
ToString
();
bool
bool_value
=
false
;
if
(
backend_
->
GetCond
(
c
,
&
bool_value
))
{
MS_LOG
(
DEBUG
)
<<
"Cond:"
<<
bool_value
;
...
...
@@ -417,8 +417,8 @@ void FinalVM::InstPush(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
1
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -431,8 +431,8 @@ void FinalVM::InstInput(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
1
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
@@ -445,13 +445,13 @@ void FinalVM::InstPadStack(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start"
;
const
size_t
args_size
=
1
;
if
(
args
.
size
()
!=
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" parameter, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
int
sz
=
utils
::
cast
<
int
>
(
args
[
0
]);
MS_LOG
(
DEBUG
)
<<
""
<<
insts_stack_
.
size
()
<<
" need padstack "
<<
sz
<<
" sp_ "
<<
sp_
;
MS_LOG
(
DEBUG
)
<<
insts_stack_
.
size
()
<<
" need padstack "
<<
sz
<<
" sp_ "
<<
sp_
;
size_t
stack_size
=
insts_stack_
.
size
();
int
need
=
sz
-
(
static_cast
<
int
>
(
stack_size
)
-
sp_
);
if
(
need
>
0
)
{
...
...
@@ -501,7 +501,7 @@ void FinalVM::InstPushPrim(const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"Start: "
<<
args
.
size
();
const
size_t
args_size
=
2
;
if
(
args
.
size
()
<
args_size
)
{
MS_LOG
(
ERROR
)
<<
""
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" or more parameters, while the input size is "
MS_LOG
(
ERROR
)
<<
__FUNCTION__
<<
" requires "
<<
args_size
<<
" or more parameters, while the input size is "
<<
args
.
size
()
<<
"."
;
return
;
}
...
...
mindspore/ccsrc/vm/vmimpl.cc
浏览文件 @
420ef2a3
...
...
@@ -445,7 +445,7 @@ BaseRef RunOperation(const PrimitivePtr &prim, const VectorRef &args) {
MS_LOG
(
DEBUG
)
<<
"operation start "
<<
prim
->
name
();
auto
func
=
operation
!=
nullptr
?
operation
->
GetComputeFunction
()
:
prim
->
GetComputeFunction
();
if
(
py
::
isinstance
<
py
::
none
>
(
func
))
{
MS_LOG
(
EXCEPTION
)
<<
""
<<
prim
->
name
()
<<
" 's compute function is not implemented"
;
MS_LOG
(
EXCEPTION
)
<<
prim
->
name
()
<<
" 's compute function is not implemented"
;
}
py
::
tuple
py_args
=
py
::
tuple
(
args
.
size
());
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录