diff --git a/deploy/cpp/include/config_parser.h b/deploy/cpp/include/config_parser.h index f2102dcc9f2902d319790ebae705a6d3fa3a4993..4817eb38d3bcdb1397c7d6c145988787383d7029 100644 --- a/deploy/cpp/include/config_parser.h +++ b/deploy/cpp/include/config_parser.h @@ -18,6 +18,12 @@ #include #include #include +#ifdef _WIN32 +#include +#include +#else // Linux/Unix +#include +#endif #include "yaml-cpp/yaml.h" @@ -38,9 +44,15 @@ class ConfigPaser { bool load_config(const std::string& model_dir, const std::string& cfg = "infer_cfg.yml") { + std::string cfg_file = model_dir + OS_PATH_SEP + cfg; + if (access(cfg_file.c_str(), 0) < 0) { + std::cerr << "[WARNING] Config yaml file is not found, please check " + << "whether infer_cfg.yml exists in model_dir" << std::endl; + return false; + } // Load as a YAML::Node YAML::Node config; - config = YAML::LoadFile(model_dir + OS_PATH_SEP + cfg); + config = YAML::LoadFile(cfg_file); // Get runtime mode : fluid, trt_fp16, trt_fp32 if (config["mode"].IsDefined()) { diff --git a/deploy/cpp/include/object_detector.h b/deploy/cpp/include/object_detector.h index 82d860f8d24a92c965010d84988ceba62454b2cd..a13794abb2dd89619d86551ed10c3696656c525f 100644 --- a/deploy/cpp/include/object_detector.h +++ b/deploy/cpp/include/object_detector.h @@ -58,12 +58,16 @@ class ObjectDetector { bool use_gpu=false, const std::string& run_mode="fluid", const int gpu_id=0) { - config_.load_config(model_dir); + success_init_ = config_.load_config(model_dir); threshold_ = config_.draw_threshold_; preprocessor_.Init(config_.preprocess_info_, config_.arch_); LoadModel(model_dir, use_gpu, config_.min_subgraph_size_, 1, run_mode, gpu_id); } + bool GetSuccessInit() const { + return success_init_; + } + // Load Paddle inference model void LoadModel( const std::string& model_dir, @@ -97,6 +101,7 @@ class ObjectDetector { std::vector output_data_; float threshold_; ConfigPaser config_; + bool success_init_; }; } // namespace PaddleDetection diff --git a/deploy/cpp/src/main.cc b/deploy/cpp/src/main.cc index 94e74022a347cef6c1547ea47e8c135d72f97691..96fa3058eeb5f0e22ec0d9c25ff14c04bddf3960 100644 --- a/deploy/cpp/src/main.cc +++ b/deploy/cpp/src/main.cc @@ -20,6 +20,22 @@ #include "include/object_detector.h" +#ifdef _WIN32 +#include +#include +#else // Linux/Unix +#include +#include +#include +#include +#endif + + +#ifdef _WIN32 +#define OS_PATH_SEP "\\" +#else +#define OS_PATH_SEP "/" +#endif DEFINE_string(model_dir, "", "Path of inference model"); DEFINE_string(image_path, "", "Path of input image"); @@ -27,6 +43,23 @@ DEFINE_string(video_path, "", "Path of input video"); DEFINE_bool(use_gpu, false, "Infering with GPU or CPU"); DEFINE_string(run_mode, "fluid", "Mode of running(fluid/trt_fp32/trt_fp16)"); DEFINE_int32(gpu_id, 0, "Device id of GPU to execute"); +DEFINE_string(output_dir, "output", "Path of saved image or video"); + +std::string generate_save_path(const std::string& save_dir, + const std::string& file_path) { + if (access(save_dir.c_str(), 0) < 0) { +#ifdef _WIN32 + mkdir(save_dir.c_str()); +#else + if (mkdir(save_dir.c_str(), S_IRWXU) < 0) { + std::cerr << "Fail to create " << save_dir << "directory." << std::endl; + } +#endif + } + int pos = file_path.find_last_of(OS_PATH_SEP); + std::string image_name(file_path.substr(pos + 1)); + return save_dir + OS_PATH_SEP + image_name; +} void PredictVideo(const std::string& video_path, PaddleDetection::ObjectDetector* det) { @@ -45,7 +78,7 @@ void PredictVideo(const std::string& video_path, // Create VideoWriter for output cv::VideoWriter video_out; - std::string video_out_path = "output.mp4"; + std::string video_out_path = generate_save_path(FLAGS_output_dir, "output.mp4"); video_out.open(video_out_path.c_str(), 0x00000021, video_fps, @@ -110,7 +143,8 @@ void PredictImage(const std::string& image_path, std::vector compression_params; compression_params.push_back(CV_IMWRITE_JPEG_QUALITY); compression_params.push_back(95); - cv::imwrite("output.jpg", vis_img, compression_params); + std::string output_image_path = generate_save_path(FLAGS_output_dir, "output.jpg"); + cv::imwrite(output_image_path, vis_img, compression_params); printf("Visualized output saved as output.jpeg\n"); } @@ -133,10 +167,12 @@ int main(int argc, char** argv) { PaddleDetection::ObjectDetector det(FLAGS_model_dir, FLAGS_use_gpu, FLAGS_run_mode, FLAGS_gpu_id); // Do inference on input video or image - if (!FLAGS_video_path.empty()) { - PredictVideo(FLAGS_video_path, &det); - } else if (!FLAGS_image_path.empty()) { - PredictImage(FLAGS_image_path, &det); + if (det.GetSuccessInit()) { + if (!FLAGS_video_path.empty()) { + PredictVideo(FLAGS_video_path, &det); + } else if (!FLAGS_image_path.empty()) { + PredictImage(FLAGS_image_path, &det); + } } return 0; } diff --git a/deploy/cpp/src/object_detector.cc b/deploy/cpp/src/object_detector.cc index bffd5cc55857d61c3f1bf4aee5b76a6f8ede748a..be1b9410564f49566d4693bfec8e049595e6ef38 100644 --- a/deploy/cpp/src/object_detector.cc +++ b/deploy/cpp/src/object_detector.cc @@ -15,6 +15,12 @@ // for setprecision #include #include "include/object_detector.h" +#ifdef _WIN32 +#include +#include +#else // Linux/Unix +#include +#endif namespace PaddleDetection { @@ -28,6 +34,11 @@ void ObjectDetector::LoadModel(const std::string& model_dir, paddle::AnalysisConfig config; std::string prog_file = model_dir + OS_PATH_SEP + "__model__"; std::string params_file = model_dir + OS_PATH_SEP + "__params__"; + if (access(prog_file.c_str(), 0) < 0 || access(params_file.c_str(), 0) < 0) { + std::cerr << "[WARNING] Model file or parameter file can't be found." << std::endl; + success_init_ = false; + return; + } config.SetModel(prog_file, params_file); if (use_gpu) { config.EnableUseGpu(100, gpu_id);