Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
weixin_41840029
PaddleOCR
提交
a72bd05b
P
PaddleOCR
项目概览
weixin_41840029
/
PaddleOCR
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleOCR
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleOCR
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
a72bd05b
编写于
6月 30, 2022
作者:
A
andyjpaddle
提交者:
GitHub
6月 30, 2022
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #6743 from andyjpaddle/fix_serving_log
[TIPC] Fix serving log
上级
2f99f0e1
12060d8a
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
12 addition
and
9 deletion
+12
-9
test_tipc/test_serving_infer_cpp.sh
test_tipc/test_serving_infer_cpp.sh
+4
-2
test_tipc/test_serving_infer_python.sh
test_tipc/test_serving_infer_python.sh
+8
-7
未找到文件。
test_tipc/test_serving_infer_cpp.sh
浏览文件 @
a72bd05b
...
...
@@ -87,11 +87,13 @@ function func_serving(){
set_image_dir
=
$(
func_set_params
"
${
image_dir_key
}
"
"
${
image_dir_value
}
"
)
python_list
=(
${
python_list
}
)
cd
${
serving_dir_value
}
unset
https_proxy
unset
http_proxy
# cpp serving
for
gpu_id
in
${
gpu_value
[*]
}
;
do
if
[
${
gpu_id
}
=
"null"
]
;
then
server_log_path
=
"
${
LOG_PATH
}
/cpp_server_cpu.log"
web_service_cpp_cmd
=
"
${
python_list
[0]
}
${
web_service_py
}
--model
${
det_server_value
}
${
rec_server_value
}
${
op_key
}
${
op_value
}
${
port_key
}
${
port_value
}
>
${
server_log_path
}
2>&1
"
web_service_cpp_cmd
=
"
nohup
${
python_list
[0]
}
${
web_service_py
}
--model
${
det_server_value
}
${
rec_server_value
}
${
op_key
}
${
op_value
}
${
port_key
}
${
port_value
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cpp_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cpp_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
...
...
@@ -105,7 +107,7 @@ function func_serving(){
ps ux |
grep
-i
${
port_value
}
|
awk
'{print $2}'
| xargs
kill
-s
9
else
server_log_path
=
"
${
LOG_PATH
}
/cpp_server_gpu.log"
web_service_cpp_cmd
=
"
${
python_list
[0]
}
${
web_service_py
}
--model
${
det_server_value
}
${
rec_server_value
}
${
op_key
}
${
op_value
}
${
port_key
}
${
port_value
}
${
gpu_key
}
${
gpu_id
}
>
${
server_log_path
}
2>&1
"
web_service_cpp_cmd
=
"
nohup
${
python_list
[0]
}
${
web_service_py
}
--model
${
det_server_value
}
${
rec_server_value
}
${
op_key
}
${
op_value
}
${
port_key
}
${
port_value
}
${
gpu_key
}
${
gpu_id
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cpp_cmd
sleep
5s
_save_log_path
=
"
${
LOG_PATH
}
/cpp_client_gpu.log"
...
...
test_tipc/test_serving_infer_python.sh
浏览文件 @
a72bd05b
...
...
@@ -112,7 +112,8 @@ function func_serving(){
cd
${
serving_dir_value
}
python
=
${
python_list
[0]
}
unset
https_proxy
unset
http_proxy
# python serving
for
use_gpu
in
${
web_use_gpu_list
[*]
}
;
do
if
[
${
use_gpu
}
=
"null"
]
;
then
...
...
@@ -123,19 +124,19 @@ function func_serving(){
if
[
${
model_name
}
=
"ch_PP-OCRv2"
]
||
[
${
model_name
}
=
"ch_PP-OCRv3"
]
||
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0"
]
||
[
${
model_name
}
=
"ch_ppocr_server_v2.0"
]
;
then
set_det_model_config
=
$(
func_set_params
"
${
det_server_key
}
"
"
${
det_server_value
}
"
)
set_rec_model_config
=
$(
func_set_params
"
${
rec_server_key
}
"
"
${
rec_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_det_model_config
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_det_model_config
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
elif
[[
${
model_name
}
=
~
"det"
]]
;
then
set_det_model_config
=
$(
func_set_params
"
${
det_server_key
}
"
"
${
det_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_det_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_det_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
elif
[[
${
model_name
}
=
~
"rec"
]]
;
then
set_rec_model_config
=
$(
func_set_params
"
${
rec_server_key
}
"
"
${
rec_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
web_use_gpu_key
}
=""
${
web_use_mkldnn_key
}
=
${
use_mkldnn
}
${
set_cpu_threads
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
...
...
@@ -174,19 +175,19 @@ function func_serving(){
if
[
${
model_name
}
=
"ch_PP-OCRv2"
]
||
[
${
model_name
}
=
"ch_PP-OCRv3"
]
||
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0"
]
||
[
${
model_name
}
=
"ch_ppocr_server_v2.0"
]
;
then
set_det_model_config
=
$(
func_set_params
"
${
det_server_key
}
"
"
${
det_server_value
}
"
)
set_rec_model_config
=
$(
func_set_params
"
${
rec_server_key
}
"
"
${
rec_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_det_model_config
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_det_model_config
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
elif
[[
${
model_name
}
=
~
"det"
]]
;
then
set_det_model_config
=
$(
func_set_params
"
${
det_server_key
}
"
"
${
det_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_det_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_det_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
elif
[[
${
model_name
}
=
~
"rec"
]]
;
then
set_rec_model_config
=
$(
func_set_params
"
${
rec_server_key
}
"
"
${
rec_server_value
}
"
)
web_service_cmd
=
"
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1
"
web_service_cmd
=
"
nohup
${
python
}
${
web_service_py
}
${
set_tensorrt
}
${
set_precision
}
${
set_rec_model_config
}
>
${
server_log_path
}
2>&1 &
"
eval
$web_service_cmd
last_status
=
${
PIPESTATUS
[0]
}
status_check
$last_status
"
${
web_service_cmd
}
"
"
${
status_log
}
"
"
${
model_name
}
"
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录