Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
PaddleClas
提交
6d2de979
P
PaddleClas
项目概览
PaddlePaddle
/
PaddleClas
大约 1 年 前同步成功
通知
115
Star
4999
Fork
1114
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
19
列表
看板
标记
里程碑
合并请求
6
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleClas
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
19
Issue
19
列表
看板
标记
里程碑
合并请求
6
合并请求
6
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
6d2de979
编写于
3月 14, 2022
作者:
G
gaotingquan
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix
上级
1a9d6229
变更
11
显示空白变更内容
内联
并排
Showing
11 changed file
with
139 addition
and
116 deletion
+139
-116
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
+4
-4
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
+4
-4
deploy/python/ppshitu_v2/examples/predict.py
deploy/python/ppshitu_v2/examples/predict.py
+13
-7
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
+15
-23
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
...n/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py
...itu_v2/processor/algo_mod/postprocessor/classification.py
+27
-31
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
...python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
+27
-20
deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py
...ython/ppshitu_v2/processor/algo_mod/predictor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py
...shitu_v2/processor/algo_mod/predictor/paddle_predictor.py
+22
-12
deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
...on/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
+5
-4
deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
...python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
+12
-3
未找到文件。
deploy/python/ppshitu_v2/configs/test_cls_config.yaml
浏览文件 @
6d2de979
...
@@ -27,10 +27,10 @@ Modules:
...
@@ -27,10 +27,10 @@ Modules:
-
name
:
PaddlePredictor
-
name
:
PaddlePredictor
type
:
predictor
type
:
predictor
inference_model_dir
:
"
./MobileNetV2_infer"
inference_model_dir
:
"
./MobileNetV2_infer"
input
_names
:
to_model
_names
:
i
nputs
:
image
i
mage
:
inputs
output
_names
:
from_model
_names
:
save_infer_model/scale_0.tmp_1
:
logits
logits
:
0
-
name
:
TopK
-
name
:
TopK
type
:
postprocessor
type
:
postprocessor
k
:
10
k
:
10
...
...
deploy/python/ppshitu_v2/configs/test_rec_config.yaml
浏览文件 @
6d2de979
...
@@ -26,9 +26,9 @@ Modules:
...
@@ -26,9 +26,9 @@ Modules:
-
name
:
PaddlePredictor
-
name
:
PaddlePredictor
type
:
predictor
type
:
predictor
inference_model_dir
:
models/product_ResNet50_vd_aliproduct_v1.0_infer
inference_model_dir
:
models/product_ResNet50_vd_aliproduct_v1.0_infer
input
_names
:
to_model
_names
:
x
:
image
image
:
x
output
_names
:
from_model
_names
:
save_infer_model/scale_0.tmp_1
:
features
features
:
0
-
name
:
FeatureNormalizer
-
name
:
FeatureNormalizer
type
:
postprocessor
type
:
postprocessor
\ No newline at end of file
deploy/python/ppshitu_v2/examples/predict.py
浏览文件 @
6d2de979
...
@@ -20,14 +20,20 @@ def main():
...
@@ -20,14 +20,20 @@ def main():
input_data
=
{
"input_image"
:
img
}
input_data
=
{
"input_image"
:
img
}
data
=
engine
.
process
(
input_data
)
data
=
engine
.
process
(
input_data
)
# for det, cls
# for cls
# print(data)
if
"classification_res"
in
data
:
print
(
data
[
"classification_res"
])
# for det
elif
"detection_res"
in
data
:
print
(
data
[
"detection_res"
])
# for rec
# for rec
# features = data["pred"]["features"]
elif
"features"
in
data
[
"pred"
]:
# print(features)
features
=
data
[
"pred"
][
"features"
]
# print(features.shape)
print
(
features
)
# print(type(features))
print
(
features
.
shape
)
print
(
type
(
features
))
else
:
print
(
"ERROR"
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
...
...
deploy/python/ppshitu_v2/processor/algo_mod/__init__.py
浏览文件 @
6d2de979
# from .postprocessor import build_postprocessor
from
.postprocessor
import
build_postprocessor
# from .preprocessor import build_preprocessor
from
.preprocessor
import
build_preprocessor
# from .predictor import build_predictor
from
.predictor
import
build_predictor
from
.searcher
import
build_searcher
import
importlib
from
processor.algo_mod
import
preprocessor
from
processor.algo_mod
import
predictor
from
processor.algo_mod
import
postprocessor
from
processor.algo_mod
import
searcher
from
..base_processor
import
BaseProcessor
from
..base_processor
import
BaseProcessor
...
@@ -17,20 +11,18 @@ class AlgoMod(BaseProcessor):
...
@@ -17,20 +11,18 @@ class AlgoMod(BaseProcessor):
self
.
processors
=
[]
self
.
processors
=
[]
for
processor_config
in
config
[
"processors"
]:
for
processor_config
in
config
[
"processors"
]:
processor_type
=
processor_config
.
get
(
"type"
)
processor_type
=
processor_config
.
get
(
"type"
)
processor_name
=
processor_config
.
get
(
"name"
)
_mod
=
importlib
.
import_module
(
__name__
)
processor
=
getattr
(
getattr
(
_mod
,
processor_type
),
processor_name
)(
processor_config
)
# if processor_type == "preprocessor":
if
processor_type
==
"preprocessor"
:
# processor = build_preprocessor(processor_config)
processor
=
build_preprocessor
(
processor_config
)
# elif processor_type == "predictor":
elif
processor_type
==
"predictor"
:
# processor = build_predictor(processor_config)
processor
=
build_predictor
(
processor_config
)
# elif processor_type == "postprocessor":
elif
processor_type
==
"postprocessor"
:
# processor = build_postprocessor(processor_config)
processor
=
build_postprocessor
(
processor_config
)
# else:
elif
processor_type
==
"searcher"
:
# raise NotImplemented("processor type {} unknown.".format(processor_type))
processor
=
build_searcher
(
processor_config
)
else
:
raise
NotImplemented
(
"processor type {} unknown."
.
format
(
processor_type
))
self
.
processors
.
append
(
processor
)
self
.
processors
.
append
(
processor
)
def
process
(
self
,
input_data
):
def
process
(
self
,
input_data
):
...
...
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/__init__.py
浏览文件 @
6d2de979
...
@@ -4,7 +4,8 @@ from .classification import TopK
...
@@ -4,7 +4,8 @@ from .classification import TopK
from
.det
import
DetPostPro
from
.det
import
DetPostPro
from
.rec
import
FeatureNormalizer
from
.rec
import
FeatureNormalizer
# def build_postprocessor(config):
# processor_mod = importlib.import_module(__name__)
def
build_postprocessor
(
config
):
# processor_name = config.get("name")
processor_mod
=
importlib
.
import_module
(
__name__
)
# return getattr(processor_mod, processor_name)(config)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/classification.py
浏览文件 @
6d2de979
...
@@ -2,6 +2,7 @@ import os
...
@@ -2,6 +2,7 @@ import os
import
numpy
as
np
import
numpy
as
np
from
utils
import
logger
from
...base_processor
import
BaseProcessor
from
...base_processor
import
BaseProcessor
...
@@ -20,8 +21,8 @@ class TopK(BaseProcessor):
...
@@ -20,8 +21,8 @@ class TopK(BaseProcessor):
return
None
return
None
if
not
os
.
path
.
exists
(
class_id_map_file
):
if
not
os
.
path
.
exists
(
class_id_map_file
):
print
(
logger
.
warning
(
"
Warning:
If want to use your own label_dict, please input legal path!
\n
Otherwise label_names will be empty!"
"
[Classification]
If want to use your own label_dict, please input legal path!
\n
Otherwise label_names will be empty!"
)
)
return
None
return
None
...
@@ -33,17 +34,13 @@ class TopK(BaseProcessor):
...
@@ -33,17 +34,13 @@ class TopK(BaseProcessor):
partition
=
line
.
split
(
"
\n
"
)[
0
].
partition
(
" "
)
partition
=
line
.
split
(
"
\n
"
)[
0
].
partition
(
" "
)
class_id_map
[
int
(
partition
[
0
])]
=
str
(
partition
[
-
1
])
class_id_map
[
int
(
partition
[
0
])]
=
str
(
partition
[
-
1
])
except
Exception
as
ex
:
except
Exception
as
ex
:
print
(
ex
)
logger
.
warning
(
f
"[Classification]
{
ex
}
"
)
class_id_map
=
None
class_id_map
=
None
return
class_id_map
return
class_id_map
def
process
(
self
,
data
):
def
process
(
self
,
data
):
x
=
data
[
"pred"
][
"logits"
]
# TODO(gaotingquan): only support bs==1 when 'connector' is not implemented.
# TODO(gaotingquan): support file_name
probs
=
data
[
"pred"
][
"logits"
][
0
]
# if file_names is not None:
# assert x.shape[0] == len(file_names)
y
=
[]
for
idx
,
probs
in
enumerate
(
x
):
index
=
probs
.
argsort
(
axis
=
0
)[
-
self
.
topk
:][::
-
1
].
astype
(
index
=
probs
.
argsort
(
axis
=
0
)[
-
self
.
topk
:][::
-
1
].
astype
(
"int32"
)
if
not
self
.
multilabel
else
np
.
where
(
"int32"
)
if
not
self
.
multilabel
else
np
.
where
(
probs
>=
0.5
)[
0
].
astype
(
"int32"
)
probs
>=
0.5
)[
0
].
astype
(
"int32"
)
...
@@ -60,9 +57,8 @@ class TopK(BaseProcessor):
...
@@ -60,9 +57,8 @@ class TopK(BaseProcessor):
"scores"
:
np
.
around
(
"scores"
:
np
.
around
(
score_list
,
decimals
=
5
).
tolist
(),
score_list
,
decimals
=
5
).
tolist
(),
}
}
# if file_names is not None:
# result["file_name"] = file_names[idx]
if
label_name_list
is
not
None
:
if
label_name_list
is
not
None
:
result
[
"label_names"
]
=
label_name_list
result
[
"label_names"
]
=
label_name_list
y
.
append
(
result
)
return
y
data
[
"classification_res"
]
=
result
return
data
deploy/python/ppshitu_v2/processor/algo_mod/postprocessor/det.py
浏览文件 @
6d2de979
...
@@ -11,27 +11,34 @@ class DetPostPro(BaseProcessor):
...
@@ -11,27 +11,34 @@ class DetPostPro(BaseProcessor):
self
.
label_list
=
config
[
"label_list"
]
self
.
label_list
=
config
[
"label_list"
]
self
.
max_det_results
=
config
[
"max_det_results"
]
self
.
max_det_results
=
config
[
"max_det_results"
]
def
process
(
self
,
input_
data
):
def
process
(
self
,
data
):
pred
=
input_
data
[
"pred"
]
pred
=
data
[
"pred"
]
np_boxes
=
pred
[
list
(
pred
.
keys
())[
0
]]
np_boxes
=
pred
[
list
(
pred
.
keys
())[
0
]]
if
reduce
(
lambda
x
,
y
:
x
*
y
,
np_boxes
.
shape
)
<
6
:
if
reduce
(
lambda
x
,
y
:
x
*
y
,
np_boxes
.
shape
)
>=
6
:
logger
.
warning
(
'[Detector] No object detected.'
)
keep_indexes
=
np_boxes
[:,
1
].
argsort
()[::
-
1
][:
np_boxes
=
np
.
array
([])
self
.
max_det_results
]
# TODO(gaotingquan): only support bs==1
keep_indexes
=
np_boxes
[:,
1
].
argsort
()[::
-
1
][:
self
.
max_det_results
]
single_res
=
np_boxes
[
0
]
results
=
[]
for
idx
in
keep_indexes
:
single_res
=
np_boxes
[
idx
]
class_id
=
int
(
single_res
[
0
])
class_id
=
int
(
single_res
[
0
])
score
=
single_res
[
1
]
score
=
single_res
[
1
]
bbox
=
single_res
[
2
:]
bbox
=
single_res
[
2
:]
if
score
<
self
.
threshold
:
if
score
>
self
.
threshold
:
continue
label_name
=
self
.
label_list
[
class_id
]
label_name
=
self
.
label_list
[
class_id
]
results
.
append
(
{
results
=
{
"class_id"
:
class_id
,
"class_id"
:
class_id
,
"score"
:
score
,
"score"
:
score
,
"bbox"
:
bbox
,
"bbox"
:
bbox
,
"label_name"
:
label_name
,
"label_name"
:
label_name
,
})
}
return
results
data
[
"detection_res"
]
=
results
return
data
logger
.
warning
(
'[Detector] No object detected.'
)
results
=
{
"class_id"
:
None
,
"score"
:
None
,
"bbox"
:
None
,
"label_name"
:
None
,
}
data
[
"detection_res"
]
=
results
return
data
deploy/python/ppshitu_v2/processor/algo_mod/predictor/__init__.py
浏览文件 @
6d2de979
...
@@ -3,7 +3,8 @@ import importlib
...
@@ -3,7 +3,8 @@ import importlib
from
processor.algo_mod.predictor.paddle_predictor
import
PaddlePredictor
from
processor.algo_mod.predictor.paddle_predictor
import
PaddlePredictor
from
processor.algo_mod.predictor.onnx_predictor
import
ONNXPredictor
from
processor.algo_mod.predictor.onnx_predictor
import
ONNXPredictor
# def build_predictor(config):
# processor_mod = importlib.import_module(__name__)
def
build_predictor
(
config
):
# processor_name = config.get("name")
processor_mod
=
importlib
.
import_module
(
__name__
)
# return getattr(processor_mod, processor_name)(config)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/predictor/paddle_predictor.py
浏览文件 @
6d2de979
...
@@ -48,30 +48,40 @@ class PaddlePredictor(BaseProcessor):
...
@@ -48,30 +48,40 @@ class PaddlePredictor(BaseProcessor):
paddle_config
.
switch_use_feed_fetch_ops
(
False
)
paddle_config
.
switch_use_feed_fetch_ops
(
False
)
self
.
predictor
=
create_predictor
(
paddle_config
)
self
.
predictor
=
create_predictor
(
paddle_config
)
if
"input_names"
in
config
and
config
[
"input_names"
]:
if
"to_model_names"
in
config
and
config
[
"to_model_names"
]:
self
.
input_name_mapping
=
config
[
"input_names"
]
self
.
input_name_map
=
{
v
:
k
for
k
,
v
in
config
[
"to_model_names"
].
items
()
}
else
:
else
:
self
.
input_name_map
ping
=
[]
self
.
input_name_map
=
{}
if
"
output_names"
in
config
and
config
[
"output
_names"
]:
if
"
from_model_names"
in
config
and
config
[
"from_model
_names"
]:
self
.
output_name_map
ping
=
config
[
"output
_names"
]
self
.
output_name_map
=
config
[
"from_model
_names"
]
else
:
else
:
self
.
output_name_map
ping
=
[]
self
.
output_name_map
=
{}
def
process
(
self
,
data
):
def
process
(
self
,
data
):
input_names
=
self
.
predictor
.
get_input_names
()
input_names
=
self
.
predictor
.
get_input_names
()
for
input_name
in
input_names
:
for
input_name
in
input_names
:
input_tensor
=
self
.
predictor
.
get_input_handle
(
input_name
)
input_tensor
=
self
.
predictor
.
get_input_handle
(
input_name
)
name
=
self
.
input_name_map
ping
[
name
=
self
.
input_name_map
[
input_name
]
if
input_name
in
self
.
input_name_map
ping
else
input_name
input_name
]
if
input_name
in
self
.
input_name_map
else
input_name
input_tensor
.
copy_from_cpu
(
data
[
name
])
input_tensor
.
copy_from_cpu
(
data
[
name
])
self
.
predictor
.
run
()
self
.
predictor
.
run
()
output_data
=
{}
model_output
=
[]
output_names
=
self
.
predictor
.
get_output_names
()
output_names
=
self
.
predictor
.
get_output_names
()
for
output_name
in
output_names
:
for
output_name
in
output_names
:
output
=
self
.
predictor
.
get_output_handle
(
output_name
)
output
=
self
.
predictor
.
get_output_handle
(
output_name
)
name
=
self
.
output_name_mapping
[
model_output
.
append
((
output_name
,
output
.
copy_to_cpu
()))
output_name
]
if
output_name
in
self
.
output_name_mapping
else
output_name
output_data
[
name
]
=
output
.
copy_to_cpu
()
if
self
.
output_name_map
:
output_data
=
{}
for
name
in
self
.
output_name_map
:
idx
=
self
.
output_name_map
[
name
]
output_data
[
name
]
=
model_output
[
idx
][
1
]
else
:
output_data
=
dict
(
model_output
)
data
[
"pred"
]
=
output_data
data
[
"pred"
]
=
output_data
return
data
return
data
deploy/python/ppshitu_v2/processor/algo_mod/preprocessor/__init__.py
浏览文件 @
6d2de979
...
@@ -2,7 +2,8 @@ import importlib
...
@@ -2,7 +2,8 @@ import importlib
from
processor.algo_mod.preprocessor.image_processor
import
ImageProcessor
from
processor.algo_mod.preprocessor.image_processor
import
ImageProcessor
# def build_preprocessor(config):
# processor_mod = importlib.import_module(__name__)
def
build_preprocessor
(
config
):
# processor_name = config.get("name")
processor_mod
=
importlib
.
import_module
(
__name__
)
# return getattr(processor_mod, processor_name)(config)
processor_name
=
config
.
get
(
"name"
)
return
getattr
(
processor_mod
,
processor_name
)(
config
)
deploy/python/ppshitu_v2/processor/algo_mod/searcher/__init__.py
浏览文件 @
6d2de979
...
@@ -4,11 +4,15 @@ import pickle
...
@@ -4,11 +4,15 @@ import pickle
import
faiss
import
faiss
def
build_searcher
(
config
):
return
Searcher
(
config
)
class
Searcher
:
class
Searcher
:
def
__init__
(
self
,
config
):
def
__init__
(
self
,
config
):
super
().
__init__
()
super
().
__init__
()
self
.
S
earcher
=
faiss
.
read_index
(
self
.
faiss_s
earcher
=
faiss
.
read_index
(
os
.
path
.
join
(
config
[
"index_dir"
],
"vector.index"
))
os
.
path
.
join
(
config
[
"index_dir"
],
"vector.index"
))
with
open
(
os
.
path
.
join
(
config
[
"index_dir"
],
"id_map.pkl"
),
"rb"
)
as
fd
:
with
open
(
os
.
path
.
join
(
config
[
"index_dir"
],
"id_map.pkl"
),
"rb"
)
as
fd
:
...
@@ -18,6 +22,11 @@ class Searcher:
...
@@ -18,6 +22,11 @@ class Searcher:
def
process
(
self
,
data
):
def
process
(
self
,
data
):
features
=
data
[
"features"
]
features
=
data
[
"features"
]
scores
,
docs
=
self
.
Searcher
.
search
(
features
,
self
.
return_k
)
scores
,
docs
=
self
.
faiss_searcher
.
search
(
features
,
self
.
return_k
)
data
[
"search_res"
]
=
(
scores
,
docs
)
preds
=
{}
preds
[
"rec_docs"
]
=
self
.
id_map
[
docs
[
0
][
0
]].
split
()[
1
]
preds
[
"rec_scores"
]
=
scores
[
0
][
0
]
data
[
"search_res"
]
=
preds
return
data
return
data
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录