Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
ea8e8ebd
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
ea8e8ebd
编写于
11月 27, 2020
作者:
W
wangxinxin08
提交者:
GitHub
11月 27, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine code to avoid some problem (#1772)
上级
aa16d88a
变更
8
隐藏空白更改
内联
并排
Showing
8 changed file
with
52 addition
and
65 deletion
+52
-65
configs/_base_/datasets/coco.yml
configs/_base_/datasets/coco.yml
+0
-1
configs/_base_/readers/yolov3_reader.yml
configs/_base_/readers/yolov3_reader.yml
+1
-0
ppdet/data/reader.py
ppdet/data/reader.py
+24
-13
ppdet/data/source/coco.py
ppdet/data/source/coco.py
+2
-11
ppdet/data/source/dataset.py
ppdet/data/source/dataset.py
+5
-6
ppdet/modeling/backbone/darknet.py
ppdet/modeling/backbone/darknet.py
+2
-2
ppdet/modeling/ops.py
ppdet/modeling/ops.py
+17
-31
tools/train.py
tools/train.py
+1
-1
未找到文件。
configs/_base_/datasets/coco.yml
浏览文件 @
ea8e8ebd
...
...
@@ -6,7 +6,6 @@ TrainDataset:
image_dir
:
train2017
anno_path
:
annotations/instances_train2017.json
dataset_dir
:
dataset/coco
mixup_epoch
:
250
EvalDataset
:
!COCODataSet
...
...
configs/_base_/readers/yolov3_reader.yml
浏览文件 @
ea8e8ebd
...
...
@@ -21,6 +21,7 @@ TrainReader:
batch_size
:
8
shuffle
:
true
drop_last
:
true
mixup_epoch
:
250
EvalReader
:
...
...
ppdet/data/reader.py
浏览文件 @
ea8e8ebd
...
...
@@ -30,6 +30,7 @@ class Compose(object):
if
hasattr
(
op_cls
,
'num_classes'
):
op_cls
.
num_classes
=
num_classes
# TODO: should be refined in the future
if
op_cls
in
[
transform
.
Gt2YoloTargetOp
,
transform
.
Gt2YoloTarget
]:
...
...
@@ -89,7 +90,8 @@ class BaseDataLoader(object):
drop_last
=
False
,
drop_empty
=
True
,
num_classes
=
81
,
with_background
=
True
):
with_background
=
True
,
**
kwargs
):
# out fields
self
.
_fields
=
inputs_def
[
'fields'
]
if
inputs_def
else
None
# sample transform
...
...
@@ -107,6 +109,7 @@ class BaseDataLoader(object):
self
.
shuffle
=
shuffle
self
.
drop_last
=
drop_last
self
.
with_background
=
with_background
self
.
kwargs
=
kwargs
def
__call__
(
self
,
dataset
,
...
...
@@ -120,6 +123,8 @@ class BaseDataLoader(object):
# get data
self
.
_dataset
.
set_out
(
self
.
_sample_transforms
,
copy
.
deepcopy
(
self
.
_fields
))
# set kwargs
self
.
_dataset
.
set_kwargs
(
**
self
.
kwargs
)
# batch sampler
if
batch_sampler
is
None
:
self
.
_batch_sampler
=
DistributedBatchSampler
(
...
...
@@ -154,10 +159,12 @@ class TrainReader(BaseDataLoader):
drop_last
=
True
,
drop_empty
=
True
,
num_classes
=
81
,
with_background
=
True
):
super
(
TrainReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
)
with_background
=
True
,
**
kwargs
):
super
(
TrainReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
,
**
kwargs
)
@
register
...
...
@@ -171,10 +178,12 @@ class EvalReader(BaseDataLoader):
drop_last
=
True
,
drop_empty
=
True
,
num_classes
=
81
,
with_background
=
True
):
super
(
EvalReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
)
with_background
=
True
,
**
kwargs
):
super
(
EvalReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
,
**
kwargs
)
@
register
...
...
@@ -188,7 +197,9 @@ class TestReader(BaseDataLoader):
drop_last
=
False
,
drop_empty
=
True
,
num_classes
=
81
,
with_background
=
True
):
super
(
TestReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
)
with_background
=
True
,
**
kwargs
):
super
(
TestReader
,
self
).
__init__
(
inputs_def
,
sample_transforms
,
batch_transforms
,
batch_size
,
shuffle
,
drop_last
,
drop_empty
,
num_classes
,
with_background
,
**
kwargs
)
ppdet/data/source/coco.py
浏览文件 @
ea8e8ebd
...
...
@@ -28,18 +28,9 @@ class COCODataSet(DetDataset):
dataset_dir
=
None
,
image_dir
=
None
,
anno_path
=
None
,
mixup_epoch
=-
1
,
cutmix_epoch
=-
1
,
mosaic_epoch
=-
1
,
sample_num
=-
1
):
super
(
COCODataSet
,
self
).
__init__
(
dataset_dir
,
image_dir
,
anno_path
,
sample_num
,
mixup_epoch
=
mixup_epoch
,
cutmix_epoch
=
cutmix_epoch
,
mosaic_epoch
=
mosaic_epoch
)
super
(
COCODataSet
,
self
).
__init__
(
dataset_dir
,
image_dir
,
anno_path
,
sample_num
)
self
.
load_image_only
=
False
self
.
load_semantic
=
False
...
...
ppdet/data/source/dataset.py
浏览文件 @
ea8e8ebd
...
...
@@ -33,9 +33,6 @@ class DetDataset(Dataset):
anno_path
=
None
,
sample_num
=-
1
,
use_default_label
=
None
,
mixup_epoch
=-
1
,
cutmix_epoch
=-
1
,
mosaic_epoch
=-
1
,
**
kwargs
):
super
(
DetDataset
,
self
).
__init__
()
self
.
dataset_dir
=
dataset_dir
if
dataset_dir
is
not
None
else
''
...
...
@@ -44,9 +41,6 @@ class DetDataset(Dataset):
self
.
sample_num
=
sample_num
self
.
use_default_label
=
use_default_label
self
.
epoch
=
0
self
.
mixup_epoch
=
mixup_epoch
self
.
cutmix_epoch
=
cutmix_epoch
self
.
mosaic_epoch
=
mosaic_epoch
def
__len__
(
self
,
):
return
len
(
self
.
roidbs
)
...
...
@@ -77,6 +71,11 @@ class DetDataset(Dataset):
out
[
k
]
=
roidb
[
k
]
return
out
.
values
()
def
set_kwargs
(
self
,
**
kwargs
):
self
.
mixup_epoch
=
kwargs
.
get
(
'mixup_epoch'
,
-
1
)
self
.
cutmix_epoch
=
kwargs
.
get
(
'cutmix_epoch'
,
-
1
)
self
.
mosaic_epoch
=
kwargs
.
get
(
'mosaic_epoch'
,
-
1
)
def
set_out
(
self
,
sample_transform
,
fields
):
self
.
transform
=
sample_transform
self
.
fields
=
fields
...
...
ppdet/modeling/backbone/darknet.py
浏览文件 @
ea8e8ebd
...
...
@@ -4,7 +4,7 @@ import paddle.nn.functional as F
from
paddle
import
ParamAttr
from
paddle.regularizer
import
L2Decay
from
ppdet.core.workspace
import
register
,
serializable
from
ppdet.modeling.ops
import
BatchN
orm
from
ppdet.modeling.ops
import
batch_n
orm
__all__
=
[
'DarkNet'
,
'ConvBNLayer'
]
...
...
@@ -31,7 +31,7 @@ class ConvBNLayer(nn.Layer):
groups
=
groups
,
weight_attr
=
ParamAttr
(
name
=
name
+
'.conv.weights'
),
bias_attr
=
False
)
self
.
batch_norm
=
BatchN
orm
(
ch_out
,
norm_type
=
norm_type
,
name
=
name
)
self
.
batch_norm
=
batch_n
orm
(
ch_out
,
norm_type
=
norm_type
,
name
=
name
)
self
.
act
=
act
def
forward
(
self
,
inputs
):
...
...
ppdet/modeling/ops.py
浏览文件 @
ea8e8ebd
...
...
@@ -29,40 +29,26 @@ import numpy as np
from
functools
import
reduce
__all__
=
[
'roi_pool'
,
'roi_align'
,
'prior_box'
,
'anchor_generator'
,
'generate_proposals'
,
'iou_similarity'
,
'box_coder'
,
'yolo_box'
,
'multiclass_nms'
,
'distribute_fpn_proposals'
,
'collect_fpn_proposals'
,
'matrix_nms'
,
'BatchNorm'
,
'roi_pool'
,
'roi_align'
,
'prior_box'
,
'anchor_generator'
,
'generate_proposals'
,
'iou_similarity'
,
'box_coder'
,
'yolo_box'
,
'multiclass_nms'
,
'distribute_fpn_proposals'
,
'collect_fpn_proposals'
,
'matrix_nms'
,
'batch_norm'
]
class
BatchNorm
(
nn
.
Layer
):
def
__init__
(
self
,
ch
,
norm_type
=
'bn'
,
name
=
None
):
super
(
BatchNorm
,
self
).
__init__
()
bn_name
=
name
+
'.bn'
if
norm_type
==
'sync_bn'
:
batch_norm
=
nn
.
SyncBatchNorm
else
:
batch_norm
=
nn
.
BatchNorm2D
self
.
batch_norm
=
batch_norm
(
ch
,
weight_attr
=
ParamAttr
(
name
=
bn_name
+
'.scale'
,
regularizer
=
L2Decay
(
0.
)),
bias_attr
=
ParamAttr
(
name
=
bn_name
+
'.offset'
,
regularizer
=
L2Decay
(
0.
)))
def
forward
(
self
,
x
):
return
self
.
batch_norm
(
x
)
def
batch_norm
(
ch
,
norm_type
=
'bn'
,
name
=
None
):
bn_name
=
name
+
'.bn'
if
norm_type
==
'sync_bn'
:
batch_norm
=
nn
.
SyncBatchNorm
else
:
batch_norm
=
nn
.
BatchNorm2D
return
batch_norm
(
ch
,
weight_attr
=
ParamAttr
(
name
=
bn_name
+
'.scale'
,
regularizer
=
L2Decay
(
0.
)),
bias_attr
=
ParamAttr
(
name
=
bn_name
+
'.offset'
,
regularizer
=
L2Decay
(
0.
)))
def
roi_pool
(
input
,
...
...
tools/train.py
浏览文件 @
ea8e8ebd
...
...
@@ -156,7 +156,7 @@ def run(FLAGS, cfg, place):
start_epoch
=
optimizer
.
state_dict
()[
'LR_Scheduler'
][
'last_epoch'
]
for
epoch_id
in
range
(
int
(
cfg
.
epoch
)):
cur_eid
=
epoch_id
+
start_epoch
train_loader
.
dataset
.
epoch
=
epoch_
id
train_loader
.
dataset
.
epoch
=
cur_e
id
for
iter_id
,
data
in
enumerate
(
train_loader
):
start_time
=
end_time
end_time
=
time
.
time
()
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录