Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
7c9c3c06
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
7c9c3c06
编写于
8月 04, 2020
作者:
J
Jack
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add output_dir args and give warning when yaml file is not found
上级
8138f9aa
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
72 addition
and
8 deletion
+72
-8
deploy/cpp/include/config_parser.h
deploy/cpp/include/config_parser.h
+13
-1
deploy/cpp/include/object_detector.h
deploy/cpp/include/object_detector.h
+6
-1
deploy/cpp/src/main.cc
deploy/cpp/src/main.cc
+42
-6
deploy/cpp/src/object_detector.cc
deploy/cpp/src/object_detector.cc
+11
-0
未找到文件。
deploy/cpp/include/config_parser.h
浏览文件 @
7c9c3c06
...
...
@@ -18,6 +18,12 @@
#include <vector>
#include <string>
#include <map>
#ifdef _WIN32
#include <direct.h>
#include <io.h>
#else // Linux/Unix
#include <unistd.h>
#endif
#include "yaml-cpp/yaml.h"
...
...
@@ -38,9 +44,15 @@ class ConfigPaser {
bool
load_config
(
const
std
::
string
&
model_dir
,
const
std
::
string
&
cfg
=
"infer_cfg.yml"
)
{
std
::
string
cfg_file
=
model_dir
+
OS_PATH_SEP
+
cfg
;
if
(
access
(
cfg_file
.
c_str
(),
0
)
<
0
)
{
std
::
cerr
<<
"[WARNING] Config yaml file is not found, please check "
<<
"whether infer_cfg.yml exists in model_dir"
<<
std
::
endl
;
return
false
;
}
// Load as a YAML::Node
YAML
::
Node
config
;
config
=
YAML
::
LoadFile
(
model_dir
+
OS_PATH_SEP
+
cfg
);
config
=
YAML
::
LoadFile
(
cfg_file
);
// Get runtime mode : fluid, trt_fp16, trt_fp32
if
(
config
[
"mode"
].
IsDefined
())
{
...
...
deploy/cpp/include/object_detector.h
浏览文件 @
7c9c3c06
...
...
@@ -58,12 +58,16 @@ class ObjectDetector {
bool
use_gpu
=
false
,
const
std
::
string
&
run_mode
=
"fluid"
,
const
int
gpu_id
=
0
)
{
config_
.
load_config
(
model_dir
);
success_init_
=
config_
.
load_config
(
model_dir
);
threshold_
=
config_
.
draw_threshold_
;
preprocessor_
.
Init
(
config_
.
preprocess_info_
,
config_
.
arch_
);
LoadModel
(
model_dir
,
use_gpu
,
config_
.
min_subgraph_size_
,
1
,
run_mode
,
gpu_id
);
}
bool
GetSuccessInit
()
const
{
return
success_init_
;
}
// Load Paddle inference model
void
LoadModel
(
const
std
::
string
&
model_dir
,
...
...
@@ -97,6 +101,7 @@ class ObjectDetector {
std
::
vector
<
float
>
output_data_
;
float
threshold_
;
ConfigPaser
config_
;
bool
success_init_
;
};
}
// namespace PaddleDetection
deploy/cpp/src/main.cc
浏览文件 @
7c9c3c06
...
...
@@ -20,6 +20,22 @@
#include "include/object_detector.h"
#ifdef _WIN32
#include <direct.h>
#include <io.h>
#else // Linux/Unix
#include <dirent.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#endif
#ifdef _WIN32
#define OS_PATH_SEP "\\"
#else
#define OS_PATH_SEP "/"
#endif
DEFINE_string
(
model_dir
,
""
,
"Path of inference model"
);
DEFINE_string
(
image_path
,
""
,
"Path of input image"
);
...
...
@@ -27,6 +43,23 @@ DEFINE_string(video_path, "", "Path of input video");
DEFINE_bool
(
use_gpu
,
false
,
"Infering with GPU or CPU"
);
DEFINE_string
(
run_mode
,
"fluid"
,
"Mode of running(fluid/trt_fp32/trt_fp16)"
);
DEFINE_int32
(
gpu_id
,
0
,
"Device id of GPU to execute"
);
DEFINE_string
(
output_dir
,
"output"
,
"Path of saved image or video"
);
std
::
string
generate_save_path
(
const
std
::
string
&
save_dir
,
const
std
::
string
&
file_path
)
{
if
(
access
(
save_dir
.
c_str
(),
0
)
<
0
)
{
#ifdef _WIN32
mkdir
(
save_dir
.
c_str
());
#else
if
(
mkdir
(
save_dir
.
c_str
(),
S_IRWXU
)
<
0
)
{
std
::
cerr
<<
"Fail to create "
<<
save_dir
<<
"directory."
<<
std
::
endl
;
}
#endif
}
int
pos
=
file_path
.
find_last_of
(
OS_PATH_SEP
);
std
::
string
image_name
(
file_path
.
substr
(
pos
+
1
));
return
save_dir
+
OS_PATH_SEP
+
image_name
;
}
void
PredictVideo
(
const
std
::
string
&
video_path
,
PaddleDetection
::
ObjectDetector
*
det
)
{
...
...
@@ -45,7 +78,7 @@ void PredictVideo(const std::string& video_path,
// Create VideoWriter for output
cv
::
VideoWriter
video_out
;
std
::
string
video_out_path
=
"output.mp4"
;
std
::
string
video_out_path
=
generate_save_path
(
FLAGS_output_dir
,
"output.mp4"
)
;
video_out
.
open
(
video_out_path
.
c_str
(),
0x00000021
,
video_fps
,
...
...
@@ -110,7 +143,8 @@ void PredictImage(const std::string& image_path,
std
::
vector
<
int
>
compression_params
;
compression_params
.
push_back
(
CV_IMWRITE_JPEG_QUALITY
);
compression_params
.
push_back
(
95
);
cv
::
imwrite
(
"output.jpg"
,
vis_img
,
compression_params
);
std
::
string
output_image_path
=
generate_save_path
(
FLAGS_output_dir
,
"output.jpg"
);
cv
::
imwrite
(
output_image_path
,
vis_img
,
compression_params
);
printf
(
"Visualized output saved as output.jpeg
\n
"
);
}
...
...
@@ -133,10 +167,12 @@ int main(int argc, char** argv) {
PaddleDetection
::
ObjectDetector
det
(
FLAGS_model_dir
,
FLAGS_use_gpu
,
FLAGS_run_mode
,
FLAGS_gpu_id
);
// Do inference on input video or image
if
(
!
FLAGS_video_path
.
empty
())
{
PredictVideo
(
FLAGS_video_path
,
&
det
);
}
else
if
(
!
FLAGS_image_path
.
empty
())
{
PredictImage
(
FLAGS_image_path
,
&
det
);
if
(
det
.
GetSuccessInit
())
{
if
(
!
FLAGS_video_path
.
empty
())
{
PredictVideo
(
FLAGS_video_path
,
&
det
);
}
else
if
(
!
FLAGS_image_path
.
empty
())
{
PredictImage
(
FLAGS_image_path
,
&
det
);
}
}
return
0
;
}
deploy/cpp/src/object_detector.cc
浏览文件 @
7c9c3c06
...
...
@@ -15,6 +15,12 @@
// for setprecision
#include <iomanip>
#include "include/object_detector.h"
#ifdef _WIN32
#include <direct.h>
#include <io.h>
#else // Linux/Unix
#include <unistd.h>
#endif
namespace
PaddleDetection
{
...
...
@@ -28,6 +34,11 @@ void ObjectDetector::LoadModel(const std::string& model_dir,
paddle
::
AnalysisConfig
config
;
std
::
string
prog_file
=
model_dir
+
OS_PATH_SEP
+
"__model__"
;
std
::
string
params_file
=
model_dir
+
OS_PATH_SEP
+
"__params__"
;
if
(
access
(
prog_file
.
c_str
(),
0
)
<
0
||
access
(
params_file
.
c_str
(),
0
)
<
0
)
{
std
::
cerr
<<
"[WARNING] Model file or parameter file can't be found."
<<
std
::
endl
;
success_init_
=
false
;
return
;
}
config
.
SetModel
(
prog_file
,
params_file
);
if
(
use_gpu
)
{
config
.
EnableUseGpu
(
100
,
gpu_id
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录