Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
magicwindyyd
mindspore
提交
6748ef00
M
mindspore
项目概览
magicwindyyd
/
mindspore
与 Fork 源项目一致
Fork自
MindSpore / mindspore
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
M
mindspore
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
6748ef00
编写于
6月 02, 2020
作者:
M
mindspore-ci-bot
提交者:
Gitee
6月 02, 2020
浏览文件
操作
浏览文件
下载
差异文件
!1776 fix codex
Merge pull request !1776 from fary86/fix_codex_06.01
上级
e79df7c1
a2b882bc
变更
13
隐藏空白更改
内联
并排
Showing
13 changed file
with
13 addition
and
30 deletion
+13
-30
mindspore/ccsrc/debug/info.cc
mindspore/ccsrc/debug/info.cc
+4
-4
mindspore/ccsrc/debug/info.h
mindspore/ccsrc/debug/info.h
+2
-2
mindspore/ccsrc/ir/func_graph.h
mindspore/ccsrc/ir/func_graph.h
+0
-3
mindspore/ccsrc/ir/manager.cc
mindspore/ccsrc/ir/manager.cc
+3
-3
mindspore/ccsrc/ir/tensor.cc
mindspore/ccsrc/ir/tensor.cc
+0
-3
mindspore/ccsrc/ir/tensor.h
mindspore/ccsrc/ir/tensor.h
+0
-5
mindspore/ccsrc/minnie/param_value_minnie.h
mindspore/ccsrc/minnie/param_value_minnie.h
+0
-1
mindspore/ccsrc/minnie/tensor_minnie.h
mindspore/ccsrc/minnie/tensor_minnie.h
+0
-1
mindspore/ccsrc/operator/composite/multitype_funcgraph.cc
mindspore/ccsrc/operator/composite/multitype_funcgraph.cc
+0
-2
mindspore/ccsrc/operator/composite/multitype_funcgraph.h
mindspore/ccsrc/operator/composite/multitype_funcgraph.h
+0
-2
mindspore/ccsrc/optimizer/opt.cc
mindspore/ccsrc/optimizer/opt.cc
+1
-1
mindspore/ccsrc/utils/primitive_utils.cc
mindspore/ccsrc/utils/primitive_utils.cc
+1
-1
mindspore/ccsrc/vm/vm.cc
mindspore/ccsrc/vm/vm.cc
+2
-2
未找到文件。
mindspore/ccsrc/debug/info.cc
浏览文件 @
6748ef00
...
...
@@ -126,10 +126,10 @@ int64_t DebugInfo::debug_id() {
}
int64_t
DebugInfo
::
unique_id_through_copy
()
const
{
TraceInfoPtr
trace_info
=
const_cast
<
DebugInfo
*>
(
this
)
->
trace_info
();
if
(
trace_
info
!=
nullptr
)
{
if
(
trace_info
->
isa
<
TraceCopy
>
()
&&
trace_
info
->
debug_info
()
!=
nullptr
)
{
return
trace_
info
->
debug_info
()
->
unique_id_through_copy
();
auto
info
=
trace_info
();
if
(
info
!=
nullptr
)
{
if
(
info
->
isa
<
TraceCopy
>
()
&&
info
->
debug_info
()
!=
nullptr
)
{
return
info
->
debug_info
()
->
unique_id_through_copy
();
}
}
return
unique_id
();
...
...
mindspore/ccsrc/debug/info.h
浏览文件 @
6748ef00
...
...
@@ -118,7 +118,7 @@ class TraceContext {
void
set_location
(
const
LocationPtr
&
loc
)
{
location_
=
loc
;
}
LocationPtr
location
()
{
return
location_
;
}
void
set_trace_info
(
const
TraceInfoPtr
&
trace_info
)
{
trace_info_
=
trace_info
;
}
TraceInfoPtr
trace_info
()
{
return
trace_info_
;
}
TraceInfoPtr
trace_info
()
const
{
return
trace_info_
;
}
void
set_func_name
(
const
std
::
string
&
func_name
)
{
func_name_
=
func_name
;
}
std
::
string
func_name
()
{
return
func_name_
;
}
};
...
...
@@ -139,7 +139,7 @@ class DebugInfo : public Base {
std
::
string
get_id
()
{
return
std
::
to_string
(
debug_id
());
}
void
set_trace_info
(
const
TraceInfoPtr
&
trace_info
)
{
trace_info_
=
trace_info
;
}
TraceInfoPtr
trace_info
()
{
return
trace_info_
;
}
TraceInfoPtr
trace_info
()
const
{
return
trace_info_
;
}
void
set_location
(
const
LocationPtr
&
loc
)
{
location_
=
loc
;
}
virtual
LocationPtr
location
()
{
return
location_
;
}
std
::
string
name
()
{
return
name_
;
}
...
...
mindspore/ccsrc/ir/func_graph.h
浏览文件 @
6748ef00
...
...
@@ -57,9 +57,6 @@ class AbstractFunction;
using
AbstractFunctionPtr
=
std
::
shared_ptr
<
AbstractFunction
>
;
}
// namespace abstract
class
FuncGraphManager
;
using
FuncGraphManagerPtr
=
std
::
shared_ptr
<
FuncGraphManager
>
;
// ANF transform class
// either a primitive or a func_graph
class
FuncGraphTransform
{
...
...
mindspore/ccsrc/ir/manager.cc
浏览文件 @
6748ef00
...
...
@@ -464,7 +464,7 @@ void FuncGraphManager::MoveAllCNodeDropGraph(FuncGraphPtr source, FuncGraphPtr t
}
}
inline
void
FuncGraphManager
::
AddEdge
(
AnfNodePtr
node
,
int
index
,
AnfNodePtr
input
)
{
void
FuncGraphManager
::
AddEdge
(
AnfNodePtr
node
,
int
index
,
AnfNodePtr
input
)
{
auto
fg
=
node
->
func_graph
();
if
(
input
->
isa
<
ValueNode
>
())
{
fg
->
AddValueNode
(
input
);
...
...
@@ -485,7 +485,7 @@ inline void FuncGraphManager::AddEdge(AnfNodePtr node, int index, AnfNodePtr inp
}
}
inline
void
FuncGraphManager
::
DropEdge
(
AnfNodePtr
node
,
int
index
,
AnfNodePtr
input
)
{
void
FuncGraphManager
::
DropEdge
(
AnfNodePtr
node
,
int
index
,
AnfNodePtr
input
)
{
auto
fg
=
node
->
func_graph
();
if
(
input
->
isa
<
ValueNode
>
())
{
fg
->
DropValueNode
(
input
);
...
...
@@ -506,7 +506,7 @@ inline void FuncGraphManager::DropEdge(AnfNodePtr node, int index, AnfNodePtr in
}
}
inline
void
FuncGraphManager
::
MoveAllNodes
(
FuncGraphPtr
source
,
FuncGraphPtr
target
)
{
void
FuncGraphManager
::
MoveAllNodes
(
FuncGraphPtr
source
,
FuncGraphPtr
target
)
{
target
->
CopyNodes
(
source
);
target
->
CopyValueNodes
(
source
);
target
->
CopyFuncGraphCNodesIndex
(
source
);
...
...
mindspore/ccsrc/ir/tensor.cc
浏览文件 @
6748ef00
...
...
@@ -28,9 +28,7 @@
#include "pipeline/static_analysis/abstract_value.h"
namespace
mindspore
{
namespace
tensor
{
void
DataBuf2Contiguous
(
const
py
::
array
&
src
,
py
::
array
*
const
dest
)
{
if
(
dest
==
nullptr
)
{
MS_LOG
(
EXCEPTION
)
<<
"Failed to copy data to a contiguous buffer as dest is nullptr!"
;
...
...
@@ -493,6 +491,5 @@ REGISTER_PYBIND_DEFINE(Tensor, ([](const py::module *m) {
.
def
(
"dtype"
,
&
MetaTensor
::
Dtype
,
"Get the MetaTensor's dtype."
)
.
def
(
"shape"
,
&
MetaTensor
::
shape
,
"Get the MetaTensor's shape."
);
}));
}
// namespace tensor
}
// namespace mindspore
mindspore/ccsrc/ir/tensor.h
浏览文件 @
6748ef00
...
...
@@ -34,9 +34,7 @@ namespace py = pybind11;
using
float16
=
Eigen
::
half
;
namespace
pybind11
{
namespace
detail
{
// Similar to enums in `pybind11/numpy.h`. Determined by doing:
// python3 -c 'import numpy as np; print(np.dtype(np.float16).num)'
constexpr
int
NPY_FLOAT16
=
23
;
...
...
@@ -85,7 +83,6 @@ template <>
struct
type_caster
<
float16
>
:
public
npy_scalar_caster
<
float16
>
{
static
constexpr
auto
name
=
"float16"
;
};
}
// namespace detail
}
// namespace pybind11
...
...
@@ -96,7 +93,6 @@ using DeviceAddressPtr = std::shared_ptr<mindspore::device::DeviceAddress>;
// mindspore namespace is the top level namespace of Mindsporeession project.
// Other namespace should be a sub namespace of mindspore namespace in the ME project.
namespace
mindspore
{
// brief mindspore::tensor namespace
//
// A sub namespace in ME to support tensor related definition.
...
...
@@ -273,7 +269,6 @@ class Tensor : public MetaTensor {
using
TensorPtr
=
std
::
shared_ptr
<
Tensor
>
;
using
TensorPtrList
=
std
::
vector
<
std
::
shared_ptr
<
Tensor
>>
;
}
// namespace tensor
}
// namespace mindspore
...
...
mindspore/ccsrc/minnie/param_value_minnie.h
浏览文件 @
6748ef00
...
...
@@ -39,6 +39,5 @@ class ParamValueMinnie : public ParamValue {
};
using
ParamValueMinniePtr
=
std
::
shared_ptr
<
ParamValueMinnie
>
;
}
// namespace mindspore
#endif // MINDSPORE_CCSRC_MINNIE_PARAM_VALUE_MINNIE_H_
mindspore/ccsrc/minnie/tensor_minnie.h
浏览文件 @
6748ef00
...
...
@@ -70,7 +70,6 @@ class TensorMinnie : public MetaTensor {
};
using
TensorMinniePtr
=
std
::
shared_ptr
<
TensorMinnie
>
;
}
// namespace tensor
}
// namespace mindspore
...
...
mindspore/ccsrc/operator/composite/multitype_funcgraph.cc
浏览文件 @
6748ef00
...
...
@@ -39,7 +39,6 @@
namespace
mindspore
{
// namespace to support composite operators definition
namespace
prim
{
MultitypeFuncGraph
::
MultitypeFuncGraph
(
const
std
::
string
&
name
)
:
MetaFuncGraph
(
name
)
{
fn_cache_
.
clear
();
signatures_
=
std
::
vector
<
Signature
>
({
// def multitype(*args:ref):
...
...
@@ -148,6 +147,5 @@ REGISTER_PYBIND_DEFINE(MultitypeFuncGraph_, ([](const py::module *m) {
.
def
(
py
::
init
<
std
::
string
&>
())
.
def
(
"register_fn"
,
&
MultitypeFuncGraph
::
PyRegister
);
}));
}
// namespace prim
}
// namespace mindspore
mindspore/ccsrc/operator/composite/multitype_funcgraph.h
浏览文件 @
6748ef00
...
...
@@ -34,7 +34,6 @@
namespace
mindspore
{
// namespace to support composite operators definition
namespace
prim
{
class
MultitypeFuncGraph
:
public
MetaFuncGraph
{
public:
explicit
MultitypeFuncGraph
(
const
std
::
string
&
name
);
...
...
@@ -59,7 +58,6 @@ class MultitypeFuncGraph : public MetaFuncGraph {
std
::
unordered_map
<
TypePtrList
,
py
::
function
,
TypeListHasher
,
TypeListEqual
>
fn_cache_py_
;
};
using
MultitypeFuncGraphPtr
=
std
::
shared_ptr
<
MultitypeFuncGraph
>
;
}
// namespace prim
}
// namespace mindspore
...
...
mindspore/ccsrc/optimizer/opt.cc
浏览文件 @
6748ef00
...
...
@@ -88,7 +88,7 @@ AnfNodePtr Substitution::operator()(const OptimizerPtr &optimizer, const AnfNode
return
result
;
}
inline
bool
isTraversable
(
const
AnfNodePtr
&
node
)
{
static
bool
isTraversable
(
const
AnfNodePtr
&
node
)
{
if
(
node
==
nullptr
)
{
return
false
;
}
...
...
mindspore/ccsrc/utils/primitive_utils.cc
浏览文件 @
6748ef00
...
...
@@ -41,7 +41,7 @@ py::function GetComputeFunction(std::string name) {
if
(
!
py
::
hasattr
(
mod
,
common
::
SafeCStr
(
name
)))
{
PyErr_SetString
(
PyExc_NotImplementedError
,
common
::
SafeCStr
(
name
));
// If raise AttributeError, user can't understand. This case need raise NotImplementedError.
throw
py
::
error_already_set
(
);
throw
(
py
::
error_already_set
()
);
}
py
::
object
fn
=
mod
.
attr
(
common
::
SafeCStr
(
name
));
return
fn
;
...
...
mindspore/ccsrc/vm/vm.cc
浏览文件 @
6748ef00
...
...
@@ -619,7 +619,7 @@ void FinalVM::SyncData(const py::object &arg) {
BaseRef
FinalVM
::
RunHook
(
const
PrimitivePtr
&
prim
,
const
VectorRef
&
args
)
{
MS_LOG
(
DEBUG
)
<<
"input for operation:"
;
std
::
size_t
args_size
=
args
.
size
();
py
::
tuple
py_args
=
py
::
tuple
(
args_size
);
auto
py_args
=
py
::
tuple
(
args_size
);
size_t
i
=
0
;
for
(
auto
&
arg
:
args
)
{
py_args
[
i
]
=
BaseRefToPyData
(
arg
);
...
...
@@ -643,7 +643,7 @@ BaseRef FinalVM::RunHook(const PrimitivePtr &prim, const VectorRef &args) {
std
::
string
cell_id
=
GetValue
<
std
::
string
>
(
prim
->
GetAttr
(
"cell_id"
));
if
(
_hook_grad
.
find
(
cell_id
)
!=
_hook_grad
.
end
())
{
std
::
size_t
hook_args_size
=
3
;
py
::
tuple
hook_args
=
py
::
tuple
(
hook_args_size
);
auto
hook_args
=
py
::
tuple
(
hook_args_size
);
hook_args
[
0
]
=
cell_id
;
hook_args
[
1
]
=
py
::
make_tuple
(
_hook_grad
[
cell_id
]);
hook_args
[
2
]
=
py
::
make_tuple
(
py_args
[
2
]);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录