Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleDetection
提交
89d14782
P
PaddleDetection
项目概览
PaddlePaddle
/
PaddleDetection
大约 1 年 前同步成功
通知
695
Star
11112
Fork
2696
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
184
列表
看板
标记
里程碑
合并请求
40
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
184
Issue
184
列表
看板
标记
里程碑
合并请求
40
合并请求
40
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
89d14782
编写于
7月 07, 2021
作者:
M
Manuel Garcia
提交者:
GitHub
7月 07, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Replace deprecated methods 'warn' and 'getargspec' (#3600)
上级
ff02c045
变更
29
隐藏空白更改
内联
并排
Showing
29 changed file
with
82 addition
and
79 deletion
+82
-79
dataset/dota_coco/dota_to_coco.py
dataset/dota_coco/dota_to_coco.py
+1
-1
ppdet/core/config/schema.py
ppdet/core/config/schema.py
+2
-2
ppdet/core/config/yaml_helpers.py
ppdet/core/config/yaml_helpers.py
+1
-1
ppdet/data/reader.py
ppdet/data/reader.py
+8
-8
ppdet/data/source/category.py
ppdet/data/source/category.py
+1
-1
ppdet/data/source/mot.py
ppdet/data/source/mot.py
+4
-4
ppdet/data/source/voc.py
ppdet/data/source/voc.py
+6
-5
ppdet/data/source/widerface.py
ppdet/data/source/widerface.py
+3
-3
ppdet/data/transform/operators.py
ppdet/data/transform/operators.py
+3
-3
ppdet/engine/callbacks.py
ppdet/engine/callbacks.py
+1
-1
ppdet/engine/tracker.py
ppdet/engine/tracker.py
+1
-1
ppdet/engine/trainer.py
ppdet/engine/trainer.py
+1
-1
static/ppdet/core/config/schema.py
static/ppdet/core/config/schema.py
+2
-2
static/ppdet/core/config/yaml_helpers.py
static/ppdet/core/config/yaml_helpers.py
+1
-1
static/ppdet/core/workspace.py
static/ppdet/core/workspace.py
+2
-2
static/ppdet/data/parallel_map.py
static/ppdet/data/parallel_map.py
+4
-4
static/ppdet/data/reader.py
static/ppdet/data/reader.py
+3
-2
static/ppdet/data/shared_queue/queue.py
static/ppdet/data/shared_queue/queue.py
+2
-2
static/ppdet/data/shared_queue/sharedmemory.py
static/ppdet/data/shared_queue/sharedmemory.py
+2
-2
static/ppdet/data/source/coco.py
static/ppdet/data/source/coco.py
+8
-8
static/ppdet/data/source/voc.py
static/ppdet/data/source/voc.py
+6
-5
static/ppdet/data/source/widerface.py
static/ppdet/data/source/widerface.py
+3
-3
static/ppdet/data/transform/autoaugment_utils.py
static/ppdet/data/transform/autoaugment_utils.py
+6
-6
static/ppdet/data/transform/operators.py
static/ppdet/data/transform/operators.py
+2
-2
static/ppdet/modeling/losses/yolo_loss.py
static/ppdet/modeling/losses/yolo_loss.py
+1
-1
static/ppdet/utils/download.py
static/ppdet/utils/download.py
+1
-1
static/slim/sensitive/sensitive.py
static/slim/sensitive/sensitive.py
+1
-1
static/tools/anchor_cluster.py
static/tools/anchor_cluster.py
+3
-3
tools/anchor_cluster.py
tools/anchor_cluster.py
+3
-3
未找到文件。
dataset/dota_coco/dota_to_coco.py
浏览文件 @
89d14782
...
...
@@ -90,7 +90,7 @@ def dota_2_coco(image_dir,
# annotations
anno_txt_path
=
osp
.
join
(
txt_dir
,
osp
.
splitext
(
basename
)[
0
]
+
'.txt'
)
if
not
osp
.
exists
(
anno_txt_path
):
logger
.
warn
(
'path of {} not exists'
.
format
(
anno_txt_path
))
logger
.
warn
ing
(
'path of {} not exists'
.
format
(
anno_txt_path
))
for
line
in
open
(
anno_txt_path
):
line
=
line
.
strip
()
...
...
ppdet/core/config/schema.py
浏览文件 @
89d14782
...
...
@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations
=
argspec
.
annotations
has_kwargs
=
argspec
.
varkw
is
not
None
else
:
argspec
=
inspect
.
getargspec
(
ctor
)
argspec
=
inspect
.
get
full
argspec
(
ctor
)
# python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now
annotations
=
getattr
(
ctor
,
'__annotations__'
,
{})
has_kwargs
=
argspec
.
keywords
is
not
None
has_kwargs
=
argspec
.
varkw
is
not
None
names
=
[
arg
for
arg
in
argspec
.
args
if
arg
!=
'self'
]
defaults
=
argspec
.
defaults
...
...
ppdet/core/config/yaml_helpers.py
浏览文件 @
89d14782
...
...
@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if
hasattr
(
inspect
,
'getfullargspec'
):
argspec
=
inspect
.
getfullargspec
(
cls
)
else
:
argspec
=
inspect
.
getargspec
(
cls
.
__init__
)
argspec
=
inspect
.
get
full
argspec
(
cls
.
__init__
)
argnames
=
[
arg
for
arg
in
argspec
.
args
if
arg
!=
'self'
]
def
python_representer
(
dumper
,
obj
):
...
...
ppdet/data/reader.py
浏览文件 @
89d14782
...
...
@@ -55,9 +55,9 @@ class Compose(object):
data
=
f
(
data
)
except
Exception
as
e
:
stack_info
=
traceback
.
format_exc
()
logger
.
warn
(
"fail to map sample transform [{}] "
"with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
logger
.
warn
ing
(
"fail to map sample transform [{}] "
"with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
raise
e
return
data
...
...
@@ -74,9 +74,9 @@ class BatchCompose(Compose):
data
=
f
(
data
)
except
Exception
as
e
:
stack_info
=
traceback
.
format_exc
()
logger
.
warn
(
"fail to map batch transform [{}] "
"with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
logger
.
warn
ing
(
"fail to map batch transform [{}] "
"with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
raise
e
# remove keys which is not needed by model
...
...
@@ -184,8 +184,8 @@ class BaseDataLoader(object):
if
use_shared_memory
:
shm_size
=
_get_shared_memory_size_in_M
()
if
shm_size
is
not
None
and
shm_size
<
1024.
:
logger
.
warn
(
"Shared memory size is less than 1G, "
"disable shared_memory in DataLoader"
)
logger
.
warn
ing
(
"Shared memory size is less than 1G, "
"disable shared_memory in DataLoader"
)
use_shared_memory
=
False
self
.
dataloader
=
DataLoader
(
...
...
ppdet/data/source/category.py
浏览文件 @
89d14782
...
...
@@ -77,7 +77,7 @@ def get_categories(metric_type, anno_file=None, arch=None):
elif
metric_type
.
lower
()
==
'oid'
:
if
anno_file
and
os
.
path
.
isfile
(
anno_file
):
logger
.
warn
(
"only default categories support for OID19"
)
logger
.
warn
ing
(
"only default categories support for OID19"
)
return
_oid19_category
()
elif
metric_type
.
lower
()
==
'widerface'
:
...
...
ppdet/data/source/mot.py
浏览文件 @
89d14782
...
...
@@ -175,12 +175,12 @@ class MOTDataSet(DetDataset):
lbl_file
=
self
.
label_files
[
data_name
][
img_index
-
start_index
]
if
not
os
.
path
.
exists
(
img_file
):
logger
.
warn
(
'Illegal image file: {}, and it will be ignored'
.
format
(
img_file
))
logger
.
warn
ing
(
'Illegal image file: {}, and it will be ignored'
.
format
(
img_file
))
continue
if
not
os
.
path
.
isfile
(
lbl_file
):
logger
.
warn
(
'Illegal label file: {}, and it will be ignored'
.
format
(
lbl_file
))
logger
.
warn
ing
(
'Illegal label file: {}, and it will be ignored'
.
format
(
lbl_file
))
continue
labels
=
np
.
loadtxt
(
lbl_file
,
dtype
=
np
.
float32
).
reshape
(
-
1
,
6
)
...
...
ppdet/data/source/voc.py
浏览文件 @
89d14782
...
...
@@ -89,13 +89,14 @@ class VOCDataSet(DetDataset):
img_file
,
xml_file
=
[
os
.
path
.
join
(
image_dir
,
x
)
\
for
x
in
line
.
strip
().
split
()[:
2
]]
if
not
os
.
path
.
exists
(
img_file
):
logger
.
warn
(
logger
.
warn
ing
(
'Illegal image file: {}, and it will be ignored'
.
format
(
img_file
))
continue
if
not
os
.
path
.
isfile
(
xml_file
):
logger
.
warn
(
'Illegal xml file: {}, and it will be ignored'
.
format
(
xml_file
))
logger
.
warning
(
'Illegal xml file: {}, and it will be ignored'
.
format
(
xml_file
))
continue
tree
=
ET
.
parse
(
xml_file
)
if
tree
.
find
(
'id'
)
is
None
:
...
...
@@ -107,7 +108,7 @@ class VOCDataSet(DetDataset):
im_w
=
float
(
tree
.
find
(
'size'
).
find
(
'width'
).
text
)
im_h
=
float
(
tree
.
find
(
'size'
).
find
(
'height'
).
text
)
if
im_w
<
0
or
im_h
<
0
:
logger
.
warn
(
logger
.
warn
ing
(
'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'
.
format
(
im_w
,
im_h
,
xml_file
))
continue
...
...
@@ -137,7 +138,7 @@ class VOCDataSet(DetDataset):
gt_score
.
append
([
1.
])
difficult
.
append
([
_difficult
])
else
:
logger
.
warn
(
logger
.
warn
ing
(
'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'
.
format
(
xml_file
,
x1
,
y1
,
x2
,
y2
))
...
...
ppdet/data/source/widerface.py
浏览文件 @
89d14782
...
...
@@ -139,9 +139,9 @@ class WIDERFaceDataSet(DetDataset):
h
=
float
(
split_str
[
3
])
# Filter out wrong labels
if
w
<
0
or
h
<
0
:
logger
.
warn
(
'Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'
.
format
(
w
,
h
,
file_dict
[
num_class
][
0
]))
logger
.
warn
ing
(
'Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'
.
format
(
w
,
h
,
file_dict
[
num_class
][
0
]))
continue
xmin
=
max
(
0
,
xmin
)
ymin
=
max
(
0
,
ymin
)
...
...
ppdet/data/transform/operators.py
浏览文件 @
89d14782
...
...
@@ -129,7 +129,7 @@ class Decode(BaseOperator):
if
'h'
not
in
sample
:
sample
[
'h'
]
=
im
.
shape
[
0
]
elif
sample
[
'h'
]
!=
im
.
shape
[
0
]:
logger
.
warn
(
logger
.
warn
ing
(
"The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual "
"image height."
.
format
(
im
.
shape
[
0
],
sample
[
'h'
]))
...
...
@@ -137,7 +137,7 @@ class Decode(BaseOperator):
if
'w'
not
in
sample
:
sample
[
'w'
]
=
im
.
shape
[
1
]
elif
sample
[
'w'
]
!=
im
.
shape
[
1
]:
logger
.
warn
(
logger
.
warn
ing
(
"The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual "
"image width."
.
format
(
im
.
shape
[
1
],
sample
[
'w'
]))
...
...
@@ -724,7 +724,7 @@ class Resize(BaseOperator):
# apply rbox
if
'gt_rbox2poly'
in
sample
:
if
np
.
array
(
sample
[
'gt_rbox2poly'
]).
shape
[
1
]
!=
8
:
logger
.
warn
(
logger
.
warn
ing
(
"gt_rbox2poly's length shoule be 8, but actually is {}"
.
format
(
len
(
sample
[
'gt_rbox2poly'
])))
sample
[
'gt_rbox2poly'
]
=
self
.
apply_bbox
(
sample
[
'gt_rbox2poly'
],
...
...
ppdet/engine/callbacks.py
浏览文件 @
89d14782
...
...
@@ -173,7 +173,7 @@ class Checkpointer(Callback):
else
:
key
=
'mask'
if
key
not
in
map_res
:
logger
.
warn
(
"Evaluation results empty, this may be due to "
\
logger
.
warn
ing
(
"Evaluation results empty, this may be due to "
\
"training iterations being too few or not "
\
"loading the correct weights."
)
return
...
...
ppdet/engine/tracker.py
浏览文件 @
89d14782
...
...
@@ -74,7 +74,7 @@ class Tracker(object):
if
self
.
cfg
.
metric
==
'MOT'
:
self
.
_metrics
=
[
MOTMetric
(),
]
else
:
logger
.
warn
(
"Metric not support for metric type {}"
.
format
(
logger
.
warn
ing
(
"Metric not support for metric type {}"
.
format
(
self
.
cfg
.
metric
))
self
.
_metrics
=
[]
...
...
ppdet/engine/trainer.py
浏览文件 @
89d14782
...
...
@@ -198,7 +198,7 @@ class Trainer(object):
elif
self
.
cfg
.
metric
==
'ReID'
:
self
.
_metrics
=
[
JDEReIDMetric
(),
]
else
:
logger
.
warn
(
"Metric not support for metric type {}"
.
format
(
logger
.
warn
ing
(
"Metric not support for metric type {}"
.
format
(
self
.
cfg
.
metric
))
self
.
_metrics
=
[]
...
...
static/ppdet/core/config/schema.py
浏览文件 @
89d14782
...
...
@@ -185,12 +185,12 @@ def extract_schema(cls):
annotations
=
argspec
.
annotations
has_kwargs
=
argspec
.
varkw
is
not
None
else
:
argspec
=
inspect
.
getargspec
(
ctor
)
argspec
=
inspect
.
get
full
argspec
(
ctor
)
# python 2 type hinting workaround, see pep-3107
# however, since `typeguard` does not support python 2, type checking
# is still python 3 only for now
annotations
=
getattr
(
ctor
,
'__annotations__'
,
{})
has_kwargs
=
argspec
.
keywords
is
not
None
has_kwargs
=
argspec
.
varkw
is
not
None
names
=
[
arg
for
arg
in
argspec
.
args
if
arg
!=
'self'
]
defaults
=
argspec
.
defaults
...
...
static/ppdet/core/config/yaml_helpers.py
浏览文件 @
89d14782
...
...
@@ -52,7 +52,7 @@ def _make_python_representer(cls):
if
hasattr
(
inspect
,
'getfullargspec'
):
argspec
=
inspect
.
getfullargspec
(
cls
)
else
:
argspec
=
inspect
.
getargspec
(
cls
.
__init__
)
argspec
=
inspect
.
get
full
argspec
(
cls
.
__init__
)
argnames
=
[
arg
for
arg
in
argspec
.
args
if
arg
!=
'self'
]
def
python_representer
(
dumper
,
obj
):
...
...
static/ppdet/core/workspace.py
浏览文件 @
89d14782
...
...
@@ -166,8 +166,8 @@ def make_partial(cls):
if
not
hasattr
(
op_module
,
op_name
):
import
logging
logger
=
logging
.
getLogger
(
__name__
)
logger
.
warn
(
'{} OP not found, maybe a newer version of paddle '
'is required.'
.
format
(
cls
.
__op__
))
logger
.
warn
ing
(
'{} OP not found, maybe a newer version of paddle '
'is required.'
.
format
(
cls
.
__op__
))
return
cls
op
=
getattr
(
op_module
,
op_name
)
...
...
static/ppdet/data/parallel_map.py
浏览文件 @
89d14782
...
...
@@ -210,10 +210,10 @@ class ParallelMap(object):
else
:
errmsg
=
"consumer[{}] exit abnormally"
.
format
(
w
.
ident
)
logger
.
warn
(
errmsg
)
logger
.
warn
ing
(
errmsg
)
if
abnormal_num
>
0
:
logger
.
warn
(
"{} consumers have exited abnormally!!!"
\
logger
.
warn
ing
(
"{} consumers have exited abnormally!!!"
\
.
format
(
abnormal_num
))
return
abnormal_num
==
0
...
...
@@ -238,7 +238,7 @@ class ParallelMap(object):
if
isinstance
(
sample
,
EndSignal
):
self
.
_consumer_endsig
[
sample
.
id
]
=
sample
logger
.
warn
(
"recv endsignal from outq with errmsg[{}]"
\
logger
.
warn
ing
(
"recv endsignal from outq with errmsg[{}]"
\
.
format
(
sample
.
errmsg
))
if
len
(
self
.
_consumer_endsig
.
keys
())
<
len
(
self
.
_consumers
):
...
...
@@ -267,7 +267,7 @@ class ParallelMap(object):
" for some consumers exited abnormally before!!!"
if
not
self
.
drained
():
logger
.
warn
(
"reset before epoch[{}] finishes"
.
format
(
logger
.
warn
ing
(
"reset before epoch[{}] finishes"
.
format
(
self
.
_epoch
))
self
.
_produced
=
self
.
_produced
-
self
.
_consumed
else
:
...
...
static/ppdet/data/reader.py
浏览文件 @
89d14782
...
...
@@ -46,8 +46,9 @@ class Compose(object):
data
=
f
(
data
,
ctx
)
except
Exception
as
e
:
stack_info
=
traceback
.
format_exc
()
logger
.
warn
(
"fail to map op [{}] with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
logger
.
warning
(
"fail to map op [{}] with error: {} and stack:
\n
{}"
.
format
(
f
,
e
,
str
(
stack_info
)))
raise
e
return
data
...
...
static/ppdet/data/shared_queue/queue.py
浏览文件 @
89d14782
...
...
@@ -74,7 +74,7 @@ class SharedQueue(Queue):
stack_info
=
traceback
.
format_exc
()
err_msg
=
'failed to put a element to SharedQueue '
\
'with stack info[%s]'
%
(
stack_info
)
logger
.
warn
(
err_msg
)
logger
.
warn
ing
(
err_msg
)
if
buff
is
not
None
:
buff
.
free
()
...
...
@@ -94,7 +94,7 @@ class SharedQueue(Queue):
stack_info
=
traceback
.
format_exc
()
err_msg
=
'failed to get element from SharedQueue '
\
'with stack info[%s]'
%
(
stack_info
)
logger
.
warn
(
err_msg
)
logger
.
warn
ing
(
err_msg
)
raise
e
finally
:
if
buff
is
not
None
:
...
...
static/ppdet/data/shared_queue/sharedmemory.py
浏览文件 @
89d14782
...
...
@@ -231,7 +231,7 @@ class PageAllocator(object):
fname
=
fname
+
'.'
+
str
(
uuid
.
uuid4
())[:
6
]
with
open
(
fname
,
'wb'
)
as
f
:
f
.
write
(
pickle
.
dumps
(
info
,
-
1
))
logger
.
warn
(
'dump alloc info to file[%s]'
%
(
fname
))
logger
.
warn
ing
(
'dump alloc info to file[%s]'
%
(
fname
))
def
_reset
(
self
):
alloc_page_pos
=
self
.
_header_pages
...
...
@@ -458,7 +458,7 @@ class SharedMemoryMgr(object):
if
start
is
None
:
time
.
sleep
(
0.1
)
if
ct
%
100
==
0
:
logger
.
warn
(
'not enough space for reason[%s]'
%
(
errmsg
))
logger
.
warn
ing
(
'not enough space for reason[%s]'
%
(
errmsg
))
ct
+=
1
else
:
...
...
static/ppdet/data/source/coco.py
浏览文件 @
89d14782
...
...
@@ -97,8 +97,8 @@ class COCODataSet(DataSet):
if
'annotations'
not
in
coco
.
dataset
:
self
.
load_image_only
=
True
logger
.
warn
(
'Annotation file: {} does not contains ground truth '
'and load image information only.'
.
format
(
anno_path
))
logger
.
warn
ing
(
'Annotation file: {} does not contains ground truth '
'and load image information only.'
.
format
(
anno_path
))
for
img_id
in
img_ids
:
img_anno
=
coco
.
loadImgs
([
img_id
])[
0
]
...
...
@@ -109,14 +109,14 @@ class COCODataSet(DataSet):
im_path
=
os
.
path
.
join
(
image_dir
,
im_fname
)
if
image_dir
else
im_fname
if
not
os
.
path
.
exists
(
im_path
):
logger
.
warn
(
'Illegal image file: {}, and it will be '
'ignored'
.
format
(
im_path
))
logger
.
warn
ing
(
'Illegal image file: {}, and it will be '
'ignored'
.
format
(
im_path
))
continue
if
im_w
<
0
or
im_h
<
0
:
logger
.
warn
(
'Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'
.
format
(
im_w
,
im_h
,
img_id
))
logger
.
warn
ing
(
'Illegal width: {} or height: {} in annotation, '
'and im_id: {} will be ignored'
.
format
(
im_w
,
im_h
,
img_id
))
continue
coco_rec
=
{
...
...
@@ -141,7 +141,7 @@ class COCODataSet(DataSet):
inst
[
'clean_bbox'
]
=
[
x1
,
y1
,
x2
,
y2
]
bboxes
.
append
(
inst
)
else
:
logger
.
warn
(
logger
.
warn
ing
(
'Found an invalid bbox in annotations: im_id: {}, '
'x1: {}, y1: {}, x2: {}, y2: {}.'
.
format
(
img_id
,
x1
,
y1
,
x2
,
y2
))
...
...
static/ppdet/data/source/voc.py
浏览文件 @
89d14782
...
...
@@ -111,13 +111,14 @@ class VOCDataSet(DataSet):
img_file
,
xml_file
=
[
os
.
path
.
join
(
image_dir
,
x
)
\
for
x
in
line
.
strip
().
split
()[:
2
]]
if
not
os
.
path
.
exists
(
img_file
):
logger
.
warn
(
logger
.
warn
ing
(
'Illegal image file: {}, and it will be ignored'
.
format
(
img_file
))
continue
if
not
os
.
path
.
isfile
(
xml_file
):
logger
.
warn
(
'Illegal xml file: {}, and it will be ignored'
.
format
(
xml_file
))
logger
.
warning
(
'Illegal xml file: {}, and it will be ignored'
.
format
(
xml_file
))
continue
tree
=
ET
.
parse
(
xml_file
)
if
tree
.
find
(
'id'
)
is
None
:
...
...
@@ -129,7 +130,7 @@ class VOCDataSet(DataSet):
im_w
=
float
(
tree
.
find
(
'size'
).
find
(
'width'
).
text
)
im_h
=
float
(
tree
.
find
(
'size'
).
find
(
'height'
).
text
)
if
im_w
<
0
or
im_h
<
0
:
logger
.
warn
(
logger
.
warn
ing
(
'Illegal width: {} or height: {} in annotation, '
'and {} will be ignored'
.
format
(
im_w
,
im_h
,
xml_file
))
continue
...
...
@@ -156,7 +157,7 @@ class VOCDataSet(DataSet):
is_crowd
.
append
([
0
])
difficult
.
append
([
_difficult
])
else
:
logger
.
warn
(
logger
.
warn
ing
(
'Found an invalid bbox in annotations: xml_file: {}'
', x1: {}, y1: {}, x2: {}, y2: {}.'
.
format
(
xml_file
,
x1
,
y1
,
x2
,
y2
))
...
...
static/ppdet/data/source/widerface.py
浏览文件 @
89d14782
...
...
@@ -133,9 +133,9 @@ class WIDERFaceDataSet(DataSet):
h
=
float
(
split_str
[
3
])
# Filter out wrong labels
if
w
<
0
or
h
<
0
:
logger
.
warn
(
'Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'
.
format
(
w
,
h
,
file_dict
[
num_class
][
0
]))
logger
.
warn
ing
(
'Illegal box with w: {}, h: {} in '
'img: {}, and it will be ignored'
.
format
(
w
,
h
,
file_dict
[
num_class
][
0
]))
continue
xmin
=
max
(
0
,
xmin
)
ymin
=
max
(
0
,
ymin
)
...
...
static/ppdet/data/transform/autoaugment_utils.py
浏览文件 @
89d14782
...
...
@@ -1451,19 +1451,19 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
# Check to see if prob is passed into function. This is used for operations
# where we alter bboxes independently.
# pytype:disable=wrong-arg-types
if
'prob'
in
inspect
.
getargspec
(
func
)[
0
]:
if
'prob'
in
inspect
.
get
full
argspec
(
func
)[
0
]:
args
=
tuple
([
prob
]
+
list
(
args
))
# pytype:enable=wrong-arg-types
# Add in replace arg if it is required for the function that is being called.
if
'replace'
in
inspect
.
getargspec
(
func
)[
0
]:
if
'replace'
in
inspect
.
get
full
argspec
(
func
)[
0
]:
# Make sure replace is the final argument
assert
'replace'
==
inspect
.
getargspec
(
func
)[
0
][
-
1
]
assert
'replace'
==
inspect
.
get
full
argspec
(
func
)[
0
][
-
1
]
args
=
tuple
(
list
(
args
)
+
[
replace_value
])
# Add bboxes as the second positional argument for the function if it does
# not already exist.
if
'bboxes'
not
in
inspect
.
getargspec
(
func
)[
0
]:
if
'bboxes'
not
in
inspect
.
get
full
argspec
(
func
)[
0
]:
func
=
bbox_wrapper
(
func
)
return
(
func
,
prob
,
args
)
...
...
@@ -1471,11 +1471,11 @@ def _parse_policy_info(name, prob, level, replace_value, augmentation_hparams):
def
_apply_func_with_prob
(
func
,
image
,
args
,
prob
,
bboxes
):
"""Apply `func` to image w/ `args` as input with probability `prob`."""
assert
isinstance
(
args
,
tuple
)
assert
'bboxes'
==
inspect
.
getargspec
(
func
)[
0
][
1
]
assert
'bboxes'
==
inspect
.
get
full
argspec
(
func
)[
0
][
1
]
# If prob is a function argument, then this randomness is being handled
# inside the function, so make sure it is always called.
if
'prob'
in
inspect
.
getargspec
(
func
)[
0
]:
if
'prob'
in
inspect
.
get
full
argspec
(
func
)[
0
]:
prob
=
1.0
# Apply the function with probability `prob`.
...
...
static/ppdet/data/transform/operators.py
浏览文件 @
89d14782
...
...
@@ -125,7 +125,7 @@ class DecodeImage(BaseOperator):
if
'h'
not
in
sample
:
sample
[
'h'
]
=
im
.
shape
[
0
]
elif
sample
[
'h'
]
!=
im
.
shape
[
0
]:
logger
.
warn
(
logger
.
warn
ing
(
"The actual image height: {} is not equal to the "
"height: {} in annotation, and update sample['h'] by actual "
"image height."
.
format
(
im
.
shape
[
0
],
sample
[
'h'
]))
...
...
@@ -133,7 +133,7 @@ class DecodeImage(BaseOperator):
if
'w'
not
in
sample
:
sample
[
'w'
]
=
im
.
shape
[
1
]
elif
sample
[
'w'
]
!=
im
.
shape
[
1
]:
logger
.
warn
(
logger
.
warn
ing
(
"The actual image width: {} is not equal to the "
"width: {} in annotation, and update sample['w'] by actual "
"image width."
.
format
(
im
.
shape
[
1
],
sample
[
'w'
]))
...
...
static/ppdet/modeling/losses/yolo_loss.py
浏览文件 @
89d14782
...
...
@@ -67,7 +67,7 @@ class YOLOv3Loss(object):
self
.
match_score
=
match_score
if
batch_size
!=
-
1
:
logger
.
warn
(
logger
.
warn
ing
(
"config YOLOv3Loss.batch_size is deprecated, "
"training batch size should be set by TrainReader.batch_size"
)
...
...
static/ppdet/utils/download.py
浏览文件 @
89d14782
...
...
@@ -321,7 +321,7 @@ def _download(url, path, md5sum=None):
shutil
.
move
(
tmp_fullname
,
fullname
)
return
fullname
else
:
logger
.
warn
(
logger
.
warn
ing
(
"Download from url imcomplete, try downloading again..."
)
os
.
remove
(
tmp_fullname
)
continue
...
...
static/slim/sensitive/sensitive.py
浏览文件 @
89d14782
...
...
@@ -104,7 +104,7 @@ def main():
if
cfg
.
weights
:
checkpoint
.
load_params
(
exe
,
eval_prog
,
cfg
.
weights
)
else
:
logger
.
warn
(
"Please set cfg.weights to load trained model."
)
logger
.
warn
ing
(
"Please set cfg.weights to load trained model."
)
# whether output bbox is normalized in model output layer
is_bbox_normalized
=
False
...
...
static/tools/anchor_cluster.py
浏览文件 @
89d14782
...
...
@@ -264,9 +264,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0
=
self
.
whs
i
=
(
wh0
<
3.0
).
any
(
1
).
sum
()
if
i
:
logger
.
warn
(
'Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height'
%
(
i
,
len
(
wh0
)))
logger
.
warn
ing
(
'Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height'
%
(
i
,
len
(
wh0
)))
wh
=
wh0
[(
wh0
>=
2.0
).
any
(
1
)]
logger
.
info
(
'Running kmeans for %g anchors on %g points...'
%
...
...
tools/anchor_cluster.py
浏览文件 @
89d14782
...
...
@@ -251,9 +251,9 @@ class YOLOv5AnchorCluster(BaseAnchorCluster):
wh0
=
self
.
whs
i
=
(
wh0
<
3.0
).
any
(
1
).
sum
()
if
i
:
logger
.
warn
(
'Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height'
%
(
i
,
len
(
wh0
)))
logger
.
warn
ing
(
'Extremely small objects found. %d of %d'
'labels are < 3 pixels in width or height'
%
(
i
,
len
(
wh0
)))
wh
=
wh0
[(
wh0
>=
2.0
).
any
(
1
)]
logger
.
info
(
'Running kmeans for %g anchors on %g points...'
%
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录