Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
197b5a9b
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
197b5a9b
编写于
9月 09, 2021
作者:
S
shangliang Xu
提交者:
GitHub
9月 09, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[dev] add bifpn in necks (#4148)
上级
f2ab17c0
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
304 addition
and
0 deletion
+304
-0
ppdet/modeling/necks/__init__.py
ppdet/modeling/necks/__init__.py
+2
-0
ppdet/modeling/necks/bifpn.py
ppdet/modeling/necks/bifpn.py
+302
-0
未找到文件。
ppdet/modeling/necks/__init__.py
浏览文件 @
197b5a9b
...
...
@@ -18,6 +18,7 @@ from . import hrfpn
from
.
import
ttf_fpn
from
.
import
centernet_fpn
from
.
import
pan
from
.
import
bifpn
from
.fpn
import
*
from
.yolo_fpn
import
*
...
...
@@ -26,3 +27,4 @@ from .ttf_fpn import *
from
.centernet_fpn
import
*
from
.blazeface_fpn
import
*
from
.pan
import
*
from
.bifpn
import
*
ppdet/modeling/necks/bifpn.py
0 → 100644
浏览文件 @
197b5a9b
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
paddle
import
paddle.nn
as
nn
import
paddle.nn.functional
as
F
from
paddle
import
ParamAttr
from
paddle.nn.initializer
import
Constant
from
ppdet.core.workspace
import
register
,
serializable
from
ppdet.modeling.layers
import
ConvNormLayer
from
..shape_spec
import
ShapeSpec
__all__
=
[
'BiFPN'
]
class
SeparableConvLayer
(
nn
.
Layer
):
def
__init__
(
self
,
in_channels
,
out_channels
=
None
,
kernel_size
=
3
,
norm_type
=
'bn'
,
norm_groups
=
32
,
act
=
'swish'
):
super
(
SeparableConvLayer
,
self
).
__init__
()
assert
norm_type
in
[
'bn'
,
'sync_bn'
,
'gn'
,
None
]
assert
act
in
[
'swish'
,
'relu'
,
None
]
self
.
in_channels
=
in_channels
if
out_channels
is
None
:
self
.
out_channels
=
self
.
in_channels
self
.
norm_type
=
norm_type
self
.
norm_groups
=
norm_groups
self
.
depthwise_conv
=
nn
.
Conv2D
(
in_channels
,
in_channels
,
kernel_size
,
padding
=
kernel_size
//
2
,
groups
=
in_channels
,
bias_attr
=
False
)
self
.
pointwise_conv
=
nn
.
Conv2D
(
in_channels
,
self
.
out_channels
,
1
)
# norm type
if
self
.
norm_type
==
'bn'
:
self
.
norm
=
nn
.
BatchNorm2D
(
self
.
out_channels
)
elif
self
.
norm_type
==
'sync_bn'
:
self
.
norm
=
nn
.
SyncBatchNorm
(
self
.
out_channels
)
elif
self
.
norm_type
==
'gn'
:
self
.
norm
=
nn
.
GroupNorm
(
num_groups
=
self
.
norm_groups
,
num_channels
=
self
.
out_channels
)
# activation
if
act
==
'swish'
:
self
.
act
=
nn
.
Swish
()
elif
act
==
'relu'
:
self
.
act
=
nn
.
ReLU
()
def
forward
(
self
,
x
):
if
self
.
act
is
not
None
:
x
=
self
.
act
(
x
)
out
=
self
.
depthwise_conv
(
x
)
out
=
self
.
pointwise_conv
(
out
)
if
self
.
norm_type
is
not
None
:
out
=
self
.
norm
(
out
)
return
out
class
BiFPNCell
(
nn
.
Layer
):
def
__init__
(
self
,
channels
=
256
,
num_levels
=
5
,
eps
=
1e-5
,
use_weighted_fusion
=
True
,
kernel_size
=
3
,
norm_type
=
'bn'
,
norm_groups
=
32
,
act
=
'swish'
):
super
(
BiFPNCell
,
self
).
__init__
()
self
.
channels
=
channels
self
.
num_levels
=
num_levels
self
.
eps
=
eps
self
.
use_weighted_fusion
=
use_weighted_fusion
# up
self
.
conv_up
=
nn
.
LayerList
([
SeparableConvLayer
(
self
.
channels
,
kernel_size
=
kernel_size
,
norm_type
=
norm_type
,
norm_groups
=
norm_groups
,
act
=
act
)
for
_
in
range
(
self
.
num_levels
-
1
)
])
# down
self
.
conv_down
=
nn
.
LayerList
([
SeparableConvLayer
(
self
.
channels
,
kernel_size
=
kernel_size
,
norm_type
=
norm_type
,
norm_groups
=
norm_groups
,
act
=
act
)
for
_
in
range
(
self
.
num_levels
-
1
)
])
if
self
.
use_weighted_fusion
:
self
.
up_weights
=
self
.
create_parameter
(
shape
=
[
self
.
num_levels
-
1
,
2
],
attr
=
ParamAttr
(
initializer
=
Constant
(
1.
)))
self
.
down_weights
=
self
.
create_parameter
(
shape
=
[
self
.
num_levels
-
1
,
3
],
attr
=
ParamAttr
(
initializer
=
Constant
(
1.
)))
def
_feature_fusion_cell
(
self
,
conv_layer
,
lateral_feat
,
sampling_feat
,
route_feat
=
None
,
weights
=
None
):
if
self
.
use_weighted_fusion
:
weights
=
F
.
relu
(
weights
)
weights
=
weights
/
(
weights
.
sum
()
+
self
.
eps
)
if
route_feat
is
not
None
:
out_feat
=
weights
[
0
]
*
lateral_feat
+
\
weights
[
1
]
*
sampling_feat
+
\
weights
[
2
]
*
route_feat
else
:
out_feat
=
weights
[
0
]
*
lateral_feat
+
\
weights
[
1
]
*
sampling_feat
else
:
if
route_feat
is
not
None
:
out_feat
=
lateral_feat
+
sampling_feat
+
route_feat
else
:
out_feat
=
lateral_feat
+
sampling_feat
out_feat
=
conv_layer
(
out_feat
)
return
out_feat
def
forward
(
self
,
feats
):
# feats: [P3 - P7]
lateral_feats
=
[]
# up
up_feature
=
feats
[
-
1
]
for
i
,
feature
in
enumerate
(
feats
[::
-
1
]):
if
i
==
0
:
lateral_feats
.
append
(
feature
)
else
:
shape
=
paddle
.
shape
(
feature
)
up_feature
=
F
.
interpolate
(
up_feature
,
size
=
[
shape
[
2
],
shape
[
3
]])
lateral_feature
=
self
.
_feature_fusion_cell
(
self
.
conv_up
[
i
-
1
],
feature
,
up_feature
,
weights
=
self
.
up_weights
[
i
-
1
]
if
self
.
use_weighted_fusion
else
None
)
lateral_feats
.
append
(
lateral_feature
)
up_feature
=
lateral_feature
out_feats
=
[]
# down
down_feature
=
lateral_feats
[
-
1
]
for
i
,
(
lateral_feature
,
route_feature
)
in
enumerate
(
zip
(
lateral_feats
[::
-
1
],
feats
)):
if
i
==
0
:
out_feats
.
append
(
lateral_feature
)
else
:
down_feature
=
F
.
max_pool2d
(
down_feature
,
3
,
2
,
1
)
if
i
==
len
(
feats
)
-
1
:
route_feature
=
None
weights
=
self
.
down_weights
[
i
-
1
][:
2
]
if
self
.
use_weighted_fusion
else
None
else
:
weights
=
self
.
down_weights
[
i
-
1
]
if
self
.
use_weighted_fusion
else
None
out_feature
=
self
.
_feature_fusion_cell
(
self
.
conv_down
[
i
-
1
],
lateral_feature
,
down_feature
,
route_feature
,
weights
=
weights
)
out_feats
.
append
(
out_feature
)
down_feature
=
out_feature
return
out_feats
@
register
@
serializable
class
BiFPN
(
nn
.
Layer
):
"""
Bidirectional Feature Pyramid Network, see https://arxiv.org/abs/1911.09070
Args:
in_channels (list[int]): input channels of each level which can be
derived from the output shape of backbone by from_config.
out_channel (int): output channel of each level.
num_extra_levels (int): the number of extra stages added to the last level.
default: 2
fpn_strides (List): The stride of each level.
num_stacks (int): the number of stacks for BiFPN, default: 1.
use_weighted_fusion (bool): use weighted feature fusion in BiFPN, default: True.
norm_type (string|None): the normalization type in BiFPN module. If
norm_type is None, norm will not be used after conv and if
norm_type is string, bn, gn, sync_bn are available. default: bn.
norm_groups (int): if you use gn, set this param.
act (string|None): the activation function of BiFPN.
"""
def
__init__
(
self
,
in_channels
=
(
512
,
1024
,
2048
),
out_channel
=
256
,
num_extra_levels
=
2
,
fpn_strides
=
[
8
,
16
,
32
,
64
,
128
],
num_stacks
=
1
,
use_weighted_fusion
=
True
,
norm_type
=
'bn'
,
norm_groups
=
32
,
act
=
'swish'
):
super
(
BiFPN
,
self
).
__init__
()
assert
num_stacks
>
0
,
"The number of stacks of BiFPN is at least 1."
assert
norm_type
in
[
'bn'
,
'sync_bn'
,
'gn'
,
None
]
assert
act
in
[
'swish'
,
'relu'
,
None
]
assert
num_extra_levels
>=
0
,
\
"The `num_extra_levels` must be non negative(>=0)."
self
.
in_channels
=
in_channels
self
.
out_channel
=
out_channel
self
.
num_extra_levels
=
num_extra_levels
self
.
num_stacks
=
num_stacks
self
.
use_weighted_fusion
=
use_weighted_fusion
self
.
norm_type
=
norm_type
self
.
norm_groups
=
norm_groups
self
.
act
=
act
self
.
num_levels
=
len
(
self
.
in_channels
)
+
self
.
num_extra_levels
if
len
(
fpn_strides
)
!=
self
.
num_levels
:
for
i
in
range
(
self
.
num_extra_levels
):
fpn_strides
+=
[
fpn_strides
[
-
1
]
*
2
]
self
.
fpn_strides
=
fpn_strides
self
.
lateral_convs
=
nn
.
LayerList
()
for
in_c
in
in_channels
:
self
.
lateral_convs
.
append
(
ConvNormLayer
(
in_c
,
self
.
out_channel
,
1
,
1
))
if
self
.
num_extra_levels
>
0
:
self
.
extra_convs
=
nn
.
LayerList
()
for
i
in
range
(
self
.
num_extra_levels
):
if
i
==
0
:
self
.
extra_convs
.
append
(
ConvNormLayer
(
self
.
in_channels
[
-
1
],
self
.
out_channel
,
3
,
2
))
else
:
self
.
extra_convs
.
append
(
nn
.
MaxPool2D
(
3
,
2
,
1
))
self
.
bifpn_cells
=
nn
.
LayerList
()
for
i
in
range
(
self
.
num_stacks
):
self
.
bifpn_cells
.
append
(
BiFPNCell
(
self
.
out_channel
,
self
.
num_levels
,
use_weighted_fusion
=
self
.
use_weighted_fusion
,
norm_type
=
self
.
norm_type
,
norm_groups
=
self
.
norm_groups
,
act
=
self
.
act
))
@
classmethod
def
from_config
(
cls
,
cfg
,
input_shape
):
return
{
'in_channels'
:
[
i
.
channels
for
i
in
input_shape
],
'fpn_strides'
:
[
i
.
stride
for
i
in
input_shape
]
}
@
property
def
out_shape
(
self
):
return
[
ShapeSpec
(
channels
=
self
.
out_channel
,
stride
=
s
)
for
s
in
self
.
fpn_strides
]
def
forward
(
self
,
feats
):
assert
len
(
feats
)
==
len
(
self
.
in_channels
)
fpn_feats
=
[]
for
conv_layer
,
feature
in
zip
(
self
.
lateral_convs
,
feats
):
fpn_feats
.
append
(
conv_layer
(
feature
))
if
self
.
num_extra_levels
>
0
:
feat
=
feats
[
-
1
]
for
conv_layer
in
self
.
extra_convs
:
feat
=
conv_layer
(
feat
)
fpn_feats
.
append
(
feat
)
for
bifpn_cell
in
self
.
bifpn_cells
:
fpn_feats
=
bifpn_cell
(
fpn_feats
)
return
fpn_feats
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录