Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
MegEngine 天元
MegEngine
提交
d540d38b
MegEngine
项目概览
MegEngine 天元
/
MegEngine
1 年多 前同步成功
通知
403
Star
4705
Fork
582
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
MegEngine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
提交
d540d38b
编写于
9月 24, 2021
作者:
X
XindaH
提交者:
GitHub
9月 24, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Revert "feat(imperative): add pixel_shuffle opr"
This reverts commit
d17cd60d
.
上级
d17cd60d
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
0 addition
and
160 deletion
+0
-160
imperative/python/megengine/functional/nn.py
imperative/python/megengine/functional/nn.py
+0
-65
imperative/python/megengine/module/pixel_shuffle.py
imperative/python/megengine/module/pixel_shuffle.py
+0
-24
imperative/python/test/unit/functional/test_functional.py
imperative/python/test/unit/functional/test_functional.py
+0
-71
未找到文件。
imperative/python/megengine/functional/nn.py
浏览文件 @
d540d38b
...
...
@@ -15,7 +15,6 @@ from ..core._imperative_rt.ops import SubgraphBuilder as _SubgraphBuilder
from
..core.ops
import
builtin
from
..core.ops.builtin
import
(
BatchNorm
,
Dimshuffle
,
Elemwise
,
GetVarShape
,
Identity
,
...
...
@@ -87,7 +86,6 @@ __all__ = [
"sync_batch_norm"
,
"warp_affine"
,
"warp_perspective"
,
"pixel_shuffle"
,
]
...
...
@@ -1735,69 +1733,6 @@ def pad(
return
output
@
lru_cache
(
maxsize
=
None
)
def
_get_layerPixelShuffle
(
device
,
dtype
,
dim_order
):
@
subgraph
(
"LayerPixelShuffle"
,
dtype
,
device
,
3
)
def
layerPixelShuffle
(
inputs
,
f
,
c
):
inp
,
shape_0
,
shape_1
=
inputs
inp
=
f
(
Reshape
(),
inp
,
shape_0
)
inp
=
f
(
Dimshuffle
(
dim_order
),
inp
)
oup
=
f
(
Reshape
(),
inp
,
shape_1
)
return
(
oup
,),
(
True
,)
return
layerPixelShuffle
def
pixel_shuffle
(
inp
:
Tensor
,
upscale_factor
:
int
)
->
Tensor
:
"""
Rearranges elements in a tensor of shape (*, C x r^2, H, W) to a tensor of
shape (*, C, H x r, W x r), where r is an upscale factor, where * is zero
or more batch dimensions.
:param inp: input tensor.
:param upscale_factor: upscale factor of pixel_shuffle.
:return: output tensor.
"""
assert
upscale_factor
>
0
,
"upscale_factor should larger than 0"
assert
inp
.
ndim
>=
3
,
"the input dimension of pixel_shuffle should be larger than 3"
assert
(
inp
.
shape
[
-
3
]
%
(
upscale_factor
**
2
)
==
0
),
"the -3 dimension should be divided by (upscale_factor ** 2)"
_device
=
inp
.
device
_dtype
=
inp
.
dtype
shape_ori
=
inp
.
shape
high_dim
=
shape_ori
[:
-
3
]
square
=
upscale_factor
**
2
n
=
1
for
item
in
high_dim
:
n
*=
item
shape_0
=
(
n
,
int
(
shape_ori
[
-
3
]
/
square
),
upscale_factor
,
upscale_factor
,
shape_ori
[
-
2
],
shape_ori
[
-
1
],
)
shape_1
=
(
*
high_dim
,
shape_ori
[
-
3
]
/
square
,
shape_ori
[
-
2
]
*
upscale_factor
,
shape_ori
[
-
1
]
*
upscale_factor
,
)
dim_order
=
(
0
,
1
,
4
,
2
,
5
,
3
)
layerPixelShuffle
=
_get_layerPixelShuffle
(
_device
,
_dtype
,
dim_order
)
shape_0
=
convert_single_value
(
shape_0
,
dtype
=
inp
.
dtype
,
device
=
inp
.
device
)
shape_1
=
convert_single_value
(
shape_1
,
dtype
=
inp
.
dtype
,
device
=
inp
.
device
)
outvar
,
*
_
=
apply
(
layerPixelShuffle
(),
inp
,
shape_0
,
shape_1
)
return
outvar
from
.quantized
import
conv_bias_activation
# isort:skip
from
.loss
import
*
# isort:skip
from
.metric
import
*
# isort:skip
...
...
imperative/python/megengine/module/pixel_shuffle.py
已删除
100644 → 0
浏览文件 @
d17cd60d
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from
..functional.nn
import
pixel_shuffle
from
.module
import
Module
class
PixelShuffle
(
Module
):
r
"""
Rearranges elements in a tensor of shape (*, C x r^2, H, W) to a tensor of
shape (*, C, H x r, W x r), where r is an upscale factor, where * is zero
or more batch dimensions.
"""
def
__init__
(
self
,
upscale_factor
:
int
,
**
kwargs
):
super
().
__init__
(
**
kwargs
)
self
.
upscale_factor
=
upscale_factor
def
forward
(
self
,
x
):
return
pixel_shuffle
(
x
,
self
.
upscale_factor
)
imperative/python/test/unit/functional/test_functional.py
浏览文件 @
d540d38b
...
...
@@ -1177,74 +1177,3 @@ def test_pad():
dst
=
np
.
pad
(
src
,
((
2
,
2
),
(
2
,
2
)),
"reflect"
)
res
=
F
.
nn
.
pad
(
tensor
(
src
),
((
2
,
2
),
(
2
,
2
)),
"REFLECT"
)
np
.
testing
.
assert_allclose
(
res
,
dst
,
atol
=
1e-5
)
def
pixel_shuffle
(
data
,
r
):
high_dim
=
data
.
shape
[:
-
3
]
data
=
data
.
reshape
(
-
1
,
data
.
shape
[
-
3
],
data
.
shape
[
-
2
],
data
.
shape
[
-
1
])
inn
,
ic
,
ih
,
iw
=
data
.
shape
res
=
np
.
zeros
((
inn
,
int
(
ic
/
(
r
*
r
)),
ih
*
r
,
iw
*
r
))
for
n
in
range
(
inn
):
for
c
in
range
(
ic
):
for
h
in
range
(
ih
):
for
w
in
range
(
iw
):
res
[
n
,
int
(
c
/
r
/
r
),
h
*
r
+
int
((
c
%
(
r
*
r
))
/
r
),
w
*
r
+
c
%
r
,
]
=
data
[
n
,
c
,
h
,
w
]
if
len
(
high_dim
)
>
0
:
res
=
res
.
reshape
((
*
high_dim
,
int
(
ic
/
r
/
r
),
ih
*
r
,
iw
*
r
))
else
:
res
=
res
[
0
]
return
res
def
test_pixel_shuffle
():
# ndim = 3
inp
=
np
.
arange
(
16
*
3
*
3
).
reshape
(
16
,
3
,
3
)
out
=
F
.
pixel_shuffle
(
tensor
(
inp
),
upscale_factor
=
4
)
golden
=
pixel_shuffle
(
inp
,
4
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
# ndim = 4
inp
=
np
.
arange
(
3
*
18
*
3
*
3
).
reshape
(
3
,
18
,
3
,
3
)
out
=
F
.
pixel_shuffle
(
tensor
(
inp
),
upscale_factor
=
3
)
golden
=
pixel_shuffle
(
inp
,
3
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
# ndim = 5
inp
=
np
.
arange
(
5
*
3
*
20
*
3
*
4
).
reshape
(
5
,
3
,
20
,
3
,
4
)
out
=
F
.
pixel_shuffle
(
tensor
(
inp
),
upscale_factor
=
2
)
golden
=
pixel_shuffle
(
inp
,
2
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
# ndim = 6
inp
=
np
.
arange
(
6
*
5
*
3
*
25
*
3
*
4
).
reshape
(
6
,
5
,
3
,
25
,
3
,
4
)
out
=
F
.
pixel_shuffle
(
tensor
(
inp
),
upscale_factor
=
5
)
golden
=
pixel_shuffle
(
inp
,
5
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
# ndim = 7
inp
=
np
.
arange
(
2
*
3
*
5
*
3
*
20
*
3
*
4
).
reshape
(
2
,
3
,
5
,
3
,
20
,
3
,
4
)
out
=
F
.
pixel_shuffle
(
tensor
(
inp
),
upscale_factor
=
2
)
golden
=
pixel_shuffle
(
inp
,
2
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
@
pytest
.
mark
.
parametrize
(
"is_symbolic"
,
[
False
,
True
])
def
test_pixel_shuffle_symbolic
(
is_symbolic
):
def
fn
(
inp
,
upscale_factor
):
return
F
.
pixel_shuffle
(
inp
,
upscale_factor
=
upscale_factor
)
if
is_symbolic
is
not
None
:
fn
=
jit
.
trace
(
symbolic
=
is_symbolic
)(
fn
)
inp
=
tensor
(
np
.
arange
(
3
*
4
*
5
*
5
).
reshape
(
3
,
4
,
5
,
5
))
golden
=
pixel_shuffle
(
inp
,
2
)
for
_
in
range
(
3
):
out
=
fn
(
inp
,
2
)
np
.
testing
.
assert_equal
(
out
.
numpy
(),
golden
)
if
is_symbolic
is
None
:
break
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录