Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
MegEngine 天元
MegEngine
提交
8fe865d8
MegEngine
项目概览
MegEngine 天元
/
MegEngine
1 年多 前同步成功
通知
403
Star
4705
Fork
582
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
MegEngine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
提交
8fe865d8
编写于
1月 13, 2021
作者:
M
Megvii Engine Team
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
feat(imperative/ops): add infer_output_attrs for Reshape
GitOrigin-RevId: 9150d7f84d1f0a4e50f5160213c660fdca904224
上级
267d6127
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
66 addition
and
16 deletion
+66
-16
imperative/src/impl/ops/broadcast.cpp
imperative/src/impl/ops/broadcast.cpp
+66
-3
imperative/src/impl/ops/specializations.cpp
imperative/src/impl/ops/specializations.cpp
+0
-13
未找到文件。
imperative/src/impl/ops/broadcast.cpp
浏览文件 @
8fe865d8
...
@@ -17,7 +17,7 @@
...
@@ -17,7 +17,7 @@
namespace
mgb
{
namespace
mgb
{
namespace
imperative
{
namespace
imperative
{
namespace
{
namespace
broadcast
{
std
::
shared_ptr
<
OpDef
>
make_from_op_node
(
cg
::
OperatorNodeBase
*
node_
)
{
std
::
shared_ptr
<
OpDef
>
make_from_op_node
(
cg
::
OperatorNodeBase
*
node_
)
{
node_
->
cast_final_safe
<
opr
::
Broadcast
>
();
node_
->
cast_final_safe
<
opr
::
Broadcast
>
();
...
@@ -39,7 +39,7 @@ bool valid_broadcast(const TensorShape& src_shape,
...
@@ -39,7 +39,7 @@ bool valid_broadcast(const TensorShape& src_shape,
if
(
src_ndim
>
tar_ndim
)
{
if
(
src_ndim
>
tar_ndim
)
{
return
false
;
return
false
;
}
}
size_t
min_ndim
=
src_ndim
<
tar_ndim
?
src_ndim
:
tar_ndim
;
size_t
min_ndim
=
src_ndim
;
for
(
size_t
i
=
0
;
i
<
min_ndim
;
++
i
)
{
for
(
size_t
i
=
0
;
i
<
min_ndim
;
++
i
)
{
if
(
src_shape
[
src_ndim
-
i
-
1
]
!=
1
&&
if
(
src_shape
[
src_ndim
-
i
-
1
]
!=
1
&&
src_shape
[
src_ndim
-
i
-
1
]
!=
tar_shape
[
tar_ndim
-
i
-
1
])
{
src_shape
[
src_ndim
-
i
-
1
]
!=
tar_shape
[
tar_ndim
-
i
-
1
])
{
...
@@ -87,7 +87,70 @@ OP_TRAIT_REG(Broadcast, Broadcast, opr::Broadcast)
...
@@ -87,7 +87,70 @@ OP_TRAIT_REG(Broadcast, Broadcast, opr::Broadcast)
.
apply_on_var_node
(
apply_on_var_node
)
.
apply_on_var_node
(
apply_on_var_node
)
.
infer_output_attrs_fallible
(
infer_output_attrs_fallible
)
.
infer_output_attrs_fallible
(
infer_output_attrs_fallible
)
.
fallback
();
.
fallback
();
}
// anonymous namespace
}
// broadcast
namespace
reshape
{
auto
apply_on_var_node
(
const
OpDef
&
def
,
const
VarNodeArray
&
inputs
)
{
auto
&&
op
=
static_cast
<
const
Reshape
&>
(
def
);
mgb_assert
(
inputs
.
size
()
==
2
);
return
opr
::
Reshape
::
make
(
inputs
[
0
],
inputs
[
1
],
op
.
param
());
}
std
::
tuple
<
SmallVector
<
LogicalTensorDesc
>
,
bool
>
infer_output_attrs_fallible
(
const
OpDef
&
def
,
const
SmallVector
<
LogicalTensorDesc
>&
inputs
)
{
auto
&&
op
=
def
.
cast_final_safe
<
Reshape
>
();
size_t
nr_inp
=
inputs
.
size
();
mgb_assert
(
nr_inp
==
2
,
"Reshape expects 2 inputs; got %lu actually"
,
nr_inp
);
auto
&&
src
=
inputs
[
0
];
auto
&&
tshp
=
inputs
[
1
];
TensorLayout
out_layout
=
src
.
layout
;
if
(
tshp
.
layout
.
ndim
==
0
||
tshp
.
value
.
empty
())
{
out_layout
.
ndim
=
0
;
return
{{{
out_layout
,
src
.
comp_node
}},
false
};
}
mgb_assert
(
tshp
.
layout
.
ndim
==
1
,
"target shape of Broadcast expects ndim=1; got ndim=%lu actually"
,
tshp
.
layout
.
ndim
);
size_t
target_ndim
=
tshp
.
layout
.
shape
[
0
];
out_layout
.
ndim
=
target_ndim
;
auto
*
ptr
=
tshp
.
value
.
ptr
<
dt_int32
>
();
for
(
size_t
i
=
0
;
i
<
target_ndim
;
++
i
)
{
out_layout
.
shape
[
i
]
=
ptr
[
i
];
}
if
(
src
.
layout
.
ndim
==
0
)
{
return
{{{
out_layout
,
src
.
comp_node
}},
false
};
}
if
(
op
.
axis
!=
opr
::
Reshape
::
Param
::
INVALID_AXIS
)
{
mgb_assert
(
out_layout
.
shape
[
op
.
axis
]
==
-
1
);
out_layout
.
shape
[
op
.
axis
]
=
1
;
mgb_assert
(
src
.
layout
.
total_nr_elems
()
%
out_layout
.
total_nr_elems
()
==
0
,
"can not reshape from %s to %s"
,
src
.
layout
.
to_string
().
c_str
(),
out_layout
.
to_string
().
c_str
());
out_layout
.
shape
[
op
.
axis
]
=
src
.
layout
.
total_nr_elems
()
/
out_layout
.
total_nr_elems
();
}
else
{
mgb_assert
(
src
.
layout
.
total_nr_elems
()
==
out_layout
.
total_nr_elems
(),
"can not reshape from %s to %s"
,
src
.
layout
.
to_string
().
c_str
(),
out_layout
.
to_string
().
c_str
());
}
return
{{{
out_layout
,
src
.
comp_node
}},
true
};
}
OP_TRAIT_REG
(
Reshape
,
Reshape
)
.
apply_on_var_node
(
apply_on_var_node
)
.
infer_output_attrs_fallible
(
infer_output_attrs_fallible
)
.
fallback
();
}
// reshape
}
// namespace imperative
}
// namespace imperative
}
// namespace mgb
}
// namespace mgb
...
...
imperative/src/impl/ops/specializations.cpp
浏览文件 @
8fe865d8
...
@@ -548,19 +548,6 @@ OP_TRAIT_REG(Remap, Remap)
...
@@ -548,19 +548,6 @@ OP_TRAIT_REG(Remap, Remap)
.
fallback
();
.
fallback
();
}}
// remap
}}
// remap
namespace
{
namespace
reshape
{
auto
apply_on_var_node
(
const
OpDef
&
def
,
const
VarNodeArray
&
inputs
)
{
auto
&&
op
=
static_cast
<
const
Reshape
&>
(
def
);
mgb_assert
(
inputs
.
size
()
==
2
);
return
opr
::
Reshape
::
make
(
inputs
[
0
],
inputs
[
1
],
op
.
param
());
}
OP_TRAIT_REG
(
Reshape
,
Reshape
)
.
apply_on_var_node
(
apply_on_var_node
)
.
fallback
();
}}
// reshape
namespace
{
namespace
{
auto
get_index
(
auto
get_index
(
const
VarNodeArray
&
inputs
,
size_t
vidx
,
const
VarNodeArray
&
inputs
,
size_t
vidx
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录