Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
6d2de979
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
大约 1 年 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
6d2de979
编写于
3月 14, 2022
作者:
G
gaotingquan
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix
上级
1a9d6229
变更
11
隐藏空白更改
内联
并排
Showing
11 changed file
with
139 addition
and
116 deletion
+139
-116
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
+4
-4
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
+4
-4
deploy/python/ppshitu_v2/examples/predict.py
deploy/python/ppshitu_v2/examples/predict.py
+13
-7
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
+15
-23
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
...n/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py
...itu_v2/processor/algo_mod/postprocessor/classification.py
+27
-31
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
...python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
+27
-20
deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py
...ython/ppshitu_v2/processor/algo_mod/predictor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py
...shitu_v2/processor/algo_mod/predictor/paddle_predictor.py
+22
-12
deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
...on/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
...python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
+12
-3
未找到文件。
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
浏览文件 @
6d2de979
...
...
@@ -27,10 +27,10 @@ Modules:
-
name
:
PaddlePredictor
type
:
predictor
inference_model_dir
:
"
./MobileNetV2_infer"
input
_names
:
i
nputs
:
image
output
_names
:
save_infer_model/scale_0.tmp_1
:
logits
to_model
_names
:
i
mage
:
inputs
from_model
_names
:
logits
:
0
-
name
:
TopK
type
:
postprocessor
k
:
10
...
...
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
浏览文件 @
6d2de979
...
...
@@ -26,9 +26,9 @@ Modules:
-
name
:
PaddlePredictor
type
:
predictor
inference_model_dir
:
models/product_ResNet50_vd_aliproduct_v1.0_infer
input
_names
:
x
:
image
output
_names
:
save_infer_model/scale_0.tmp_1
:
features
to_model
_names
:
image
:
x
from_model
_names
:
features
:
0
-
name
:
FeatureNormalizer
type
:
postprocessor
\ No newline at end of file
deploy/python/ppshitu_v2/examples/predict.py
浏览文件 @
6d2de979
...
...
@@ -20,14 +20,20 @@ def main():
input_data
=
{
"input_image"
:
img
}
data
=
engine
.
process
(
input_data
)
# for det, cls
# print(data)
# for cls
if
"classification_res"
in
data
:
print
(
data
[
"classification_res"
])
# for det
elif
"detection_res"
in
data
:
print
(
data
[
"detection_res"
])
# for rec
# features = data["pred"]["features"]
# print(features)
# print(features.shape)
# print(type(features))
elif
"features"
in
data
[
"pred"
]:
features
=
data
[
"pred"
][
"features"
]
print
(
features
)
print
(
features
.
shape
)
print
(
type
(
features
))
else
:
print
(
"ERROR"
)
if
__name__
==
'__main__'
:
...
...
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
浏览文件 @
6d2de979
# from .postprocessor import build_postprocessor
# from .preprocessor import build_preprocessor
# from .predictor import build_predictor
import
importlib
from
processor.algo_mod
import
preprocessor
from
processor.algo_mod
import
predictor
from
processor.algo_mod
import
postprocessor
from
processor.algo_mod
import
searcher
from
.postprocessor
import
build_postprocessor
from
.preprocessor
import
build_preprocessor
from
.predictor
import
build_predictor
from
.searcher
import
build_searcher
from
..base_processor
import
BaseProcessor
...
...
@@ -17,20 +11,18 @@ class AlgoMod(BaseProcessor):
self
.
processors
=
[]
for
processor_config
in
config
[
"processors"
]:
processor_type
=
processor_config
.
get
(
"type"
)
processor_name
=
processor_config
.
get
(
"name"
)
_mod
=
importlib
.
import_module
(
__name__
)
processor
=
getattr
(
getattr
(
_mod
,
processor_type
),
processor_name
)(
processor_config
)
# if processor_type == "preprocessor":
# processor = build_preprocessor(processor_config)
# elif processor_type == "predictor":
# processor = build_predictor(processor_config)
# elif processor_type == "postprocessor":
# processor = build_postprocessor(processor_config)
# else:
# raise NotImplemented("processor type {} unknown.".format(processor_type))
if
processor_type
==
"preprocessor"
:
processor
=
build_preprocessor
(
processor_config
)
elif
processor_type
==
"predictor"
:
processor
=
build_predictor
(
processor_config
)
elif
processor_type
==
"postprocessor"
:
processor
=
build_postprocessor
(
processor_config
)
elif
processor_type
==
"searcher"
:
processor
=
build_searcher
(
processor_config
)
else
:
raise
NotImplemented
(
"processor type {} unknown."
.
format
(
processor_type
))
self
.
processors
.
append
(
processor
)
def
process
(
self
,
input_data
):
...
...
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
浏览文件 @
6d2de979
...
...
@@ -4,7 +4,8 @@ from .classification import TopK
from
.det
import
DetPostPro
from
.rec
import
FeatureNormalizer
# def build_postprocessor(config):
# processor_mod = importlib.import_module(__name__)
# processor_name = config.get("name")
# return getattr(processor_mod, processor_name)(config)
def
build_postprocessor
(
config
):
processor_mod
=
importlib
.
import_module
(
__name__
)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py
浏览文件 @
6d2de979
...
...
@@ -2,6 +2,7 @@ import os
import
numpy
as
np
from
utils
import
logger
from
...base_processor
import
BaseProcessor
...
...
@@ -20,8 +21,8 @@ class TopK(BaseProcessor):
return
None
if
not
os
.
path
.
exists
(
class_id_map_file
):
print
(
"
Warning:
If want to use your own label_dict, please input legal path!
\n
Otherwise label_names will be empty!"
logger
.
warning
(
"
[Classification]
If want to use your own label_dict, please input legal path!
\n
Otherwise label_names will be empty!"
)
return
None
...
...
@@ -33,36 +34,31 @@ class TopK(BaseProcessor):
partition
=
line
.
split
(
"
\n
"
)[
0
].
partition
(
" "
)
class_id_map
[
int
(
partition
[
0
])]
=
str
(
partition
[
-
1
])
except
Exception
as
ex
:
print
(
ex
)
logger
.
warning
(
f
"[Classification]
{
ex
}
"
)
class_id_map
=
None
return
class_id_map
def
process
(
self
,
data
):
x
=
data
[
"pred"
][
"logits"
]
# TODO(gaotingquan): support file_name
# if file_names is not None:
# assert x.shape[0] == len(file_names)
y
=
[]
for
idx
,
probs
in
enumerate
(
x
):
index
=
probs
.
argsort
(
axis
=
0
)[
-
self
.
topk
:][::
-
1
].
astype
(
"int32"
)
if
not
self
.
multilabel
else
np
.
where
(
probs
>=
0.5
)[
0
].
astype
(
"int32"
)
clas_id_list
=
[]
score_list
=
[]
label_name_list
=
[]
for
i
in
index
:
clas_id_list
.
append
(
i
.
item
())
score_list
.
append
(
probs
[
i
].
item
())
if
self
.
class_id_map
is
not
None
:
label_name_list
.
append
(
self
.
class_id_map
[
i
.
item
()])
result
=
{
"class_ids"
:
clas_id_list
,
"scores"
:
np
.
around
(
score_list
,
decimals
=
5
).
tolist
(),
}
# if file_names is not None:
# result["file_name"] = file_names[idx]
if
label_name_list
is
not
None
:
result
[
"label_names"
]
=
label_name_list
y
.
append
(
result
)
return
y
# TODO(gaotingquan): only support bs==1 when 'connector' is not implemented.
probs
=
data
[
"pred"
][
"logits"
][
0
]
index
=
probs
.
argsort
(
axis
=
0
)[
-
self
.
topk
:][::
-
1
].
astype
(
"int32"
)
if
not
self
.
multilabel
else
np
.
where
(
probs
>=
0.5
)[
0
].
astype
(
"int32"
)
clas_id_list
=
[]
score_list
=
[]
label_name_list
=
[]
for
i
in
index
:
clas_id_list
.
append
(
i
.
item
())
score_list
.
append
(
probs
[
i
].
item
())
if
self
.
class_id_map
is
not
None
:
label_name_list
.
append
(
self
.
class_id_map
[
i
.
item
()])
result
=
{
"class_ids"
:
clas_id_list
,
"scores"
:
np
.
around
(
score_list
,
decimals
=
5
).
tolist
(),
}
if
label_name_list
is
not
None
:
result
[
"label_names"
]
=
label_name_list
data
[
"classification_res"
]
=
result
return
data
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
浏览文件 @
6d2de979
...
...
@@ -11,27 +11,34 @@ class DetPostPro(BaseProcessor):
self
.
label_list
=
config
[
"label_list"
]
self
.
max_det_results
=
config
[
"max_det_results"
]
def
process
(
self
,
input_
data
):
pred
=
input_
data
[
"pred"
]
def
process
(
self
,
data
):
pred
=
data
[
"pred"
]
np_boxes
=
pred
[
list
(
pred
.
keys
())[
0
]]
if
reduce
(
lambda
x
,
y
:
x
*
y
,
np_boxes
.
shape
)
<
6
:
logger
.
warning
(
'[Detector] No object detected.'
)
np_boxes
=
np
.
array
([])
keep_indexes
=
np_boxes
[:,
1
].
argsort
()[::
-
1
][:
self
.
max_det_results
]
results
=
[]
for
idx
in
keep_indexes
:
single_res
=
np_boxes
[
idx
]
if
reduce
(
lambda
x
,
y
:
x
*
y
,
np_boxes
.
shape
)
>=
6
:
keep_indexes
=
np_boxes
[:,
1
].
argsort
()[::
-
1
][:
self
.
max_det_results
]
# TODO(gaotingquan): only support bs==1
single_res
=
np_boxes
[
0
]
class_id
=
int
(
single_res
[
0
])
score
=
single_res
[
1
]
bbox
=
single_res
[
2
:]
if
score
<
self
.
threshold
:
continue
label_name
=
self
.
label_list
[
class_id
]
results
.
append
({
"class_id"
:
class_id
,
"score"
:
score
,
"bbox"
:
bbox
,
"label_name"
:
label_name
,
})
return
results
if
score
>
self
.
threshold
:
label_name
=
self
.
label_list
[
class_id
]
results
=
{
"class_id"
:
class_id
,
"score"
:
score
,
"bbox"
:
bbox
,
"label_name"
:
label_name
,
}
data
[
"detection_res"
]
=
results
return
data
logger
.
warning
(
'[Detector] No object detected.'
)
results
=
{
"class_id"
:
None
,
"score"
:
None
,
"bbox"
:
None
,
"label_name"
:
None
,
}
data
[
"detection_res"
]
=
results
return
data
deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py
浏览文件 @
6d2de979
...
...
@@ -3,7 +3,8 @@ import importlib
from
processor.algo_mod.predictor.paddle_predictor
import
PaddlePredictor
from
processor.algo_mod.predictor.onnx_predictor
import
ONNXPredictor
# def build_predictor(config):
# processor_mod = importlib.import_module(__name__)
# processor_name = config.get("name")
# return getattr(processor_mod, processor_name)(config)
def
build_predictor
(
config
):
processor_mod
=
importlib
.
import_module
(
__name__
)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py
浏览文件 @
6d2de979
...
...
@@ -48,30 +48,40 @@ class PaddlePredictor(BaseProcessor):
paddle_config
.
switch_use_feed_fetch_ops
(
False
)
self
.
predictor
=
create_predictor
(
paddle_config
)
if
"input_names"
in
config
and
config
[
"input_names"
]:
self
.
input_name_mapping
=
config
[
"input_names"
]
if
"to_model_names"
in
config
and
config
[
"to_model_names"
]:
self
.
input_name_map
=
{
v
:
k
for
k
,
v
in
config
[
"to_model_names"
].
items
()
}
else
:
self
.
input_name_map
ping
=
[]
if
"
output_names"
in
config
and
config
[
"output
_names"
]:
self
.
output_name_map
ping
=
config
[
"output
_names"
]
self
.
input_name_map
=
{}
if
"
from_model_names"
in
config
and
config
[
"from_model
_names"
]:
self
.
output_name_map
=
config
[
"from_model
_names"
]
else
:
self
.
output_name_map
ping
=
[]
self
.
output_name_map
=
{}
def
process
(
self
,
data
):
input_names
=
self
.
predictor
.
get_input_names
()
for
input_name
in
input_names
:
input_tensor
=
self
.
predictor
.
get_input_handle
(
input_name
)
name
=
self
.
input_name_map
ping
[
input_name
]
if
input_name
in
self
.
input_name_map
ping
else
input_name
name
=
self
.
input_name_map
[
input_name
]
if
input_name
in
self
.
input_name_map
else
input_name
input_tensor
.
copy_from_cpu
(
data
[
name
])
self
.
predictor
.
run
()
output_data
=
{}
model_output
=
[]
output_names
=
self
.
predictor
.
get_output_names
()
for
output_name
in
output_names
:
output
=
self
.
predictor
.
get_output_handle
(
output_name
)
name
=
self
.
output_name_mapping
[
output_name
]
if
output_name
in
self
.
output_name_mapping
else
output_name
output_data
[
name
]
=
output
.
copy_to_cpu
()
model_output
.
append
((
output_name
,
output
.
copy_to_cpu
()))
if
self
.
output_name_map
:
output_data
=
{}
for
name
in
self
.
output_name_map
:
idx
=
self
.
output_name_map
[
name
]
output_data
[
name
]
=
model_output
[
idx
][
1
]
else
:
output_data
=
dict
(
model_output
)
data
[
"pred"
]
=
output_data
return
data
deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
浏览文件 @
6d2de979
...
...
@@ -2,7 +2,8 @@ import importlib
from
processor.algo_mod.preprocessor.image_processor
import
ImageProcessor
# def build_preprocessor(config):
# processor_mod = importlib.import_module(__name__)
# processor_name = config.get("name")
# return getattr(processor_mod, processor_name)(config)
def
build_preprocessor
(
config
):
processor_mod
=
importlib
.
import_module
(
__name__
)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
浏览文件 @
6d2de979
...
...
@@ -4,11 +4,15 @@ import pickle
import
faiss
def
build_searcher
(
config
):
return
Searcher
(
config
)
class
Searcher
:
def
__init__
(
self
,
config
):
super
().
__init__
()
self
.
S
earcher
=
faiss
.
read_index
(
self
.
faiss_s
earcher
=
faiss
.
read_index
(
os
.
path
.
join
(
config
[
"index_dir"
],
"vector.index"
))
with
open
(
os
.
path
.
join
(
config
[
"index_dir"
],
"id_map.pkl"
),
"rb"
)
as
fd
:
...
...
@@ -18,6 +22,11 @@ class Searcher:
def
process
(
self
,
data
):
features
=
data
[
"features"
]
scores
,
docs
=
self
.
Searcher
.
search
(
features
,
self
.
return_k
)
data
[
"search_res"
]
=
(
scores
,
docs
)
scores
,
docs
=
self
.
faiss_searcher
.
search
(
features
,
self
.
return_k
)
preds
=
{}
preds
[
"rec_docs"
]
=
self
.
id_map
[
docs
[
0
][
0
]].
split
()[
1
]
preds
[
"rec_scores"
]
=
scores
[
0
][
0
]
data
[
"search_res"
]
=
preds
return
data
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录