From 4fd696ccdf657457e396cd790156cbf1eeaddf30 Mon Sep 17 00:00:00 2001 From: WenmuZhou <572459439@qq.com> Date: Wed, 9 Dec 2020 23:55:38 +0800 Subject: [PATCH] update inference model name --- deploy/cpp_infer/src/ocr_cls.cpp | 3 ++- deploy/cpp_infer/src/ocr_det.cpp | 3 ++- deploy/cpp_infer/src/ocr_rec.cpp | 3 ++- tools/infer/utility.py | 4 ++-- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/deploy/cpp_infer/src/ocr_cls.cpp b/deploy/cpp_infer/src/ocr_cls.cpp index 67939727..fed2023f 100644 --- a/deploy/cpp_infer/src/ocr_cls.cpp +++ b/deploy/cpp_infer/src/ocr_cls.cpp @@ -81,7 +81,8 @@ cv::Mat Classifier::Run(cv::Mat &img) { void Classifier::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/cls.pdmodel", model_dir + "/cls.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/deploy/cpp_infer/src/ocr_det.cpp b/deploy/cpp_infer/src/ocr_det.cpp index 3ca4cc26..e253f9cc 100644 --- a/deploy/cpp_infer/src/ocr_det.cpp +++ b/deploy/cpp_infer/src/ocr_det.cpp @@ -18,7 +18,8 @@ namespace PaddleOCR { void DBDetector::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/det.pdmodel", model_dir + "/det.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/deploy/cpp_infer/src/ocr_rec.cpp b/deploy/cpp_infer/src/ocr_rec.cpp index 0b6d0532..d4deb5a1 100644 --- a/deploy/cpp_infer/src/ocr_rec.cpp +++ b/deploy/cpp_infer/src/ocr_rec.cpp @@ -103,7 +103,8 @@ void CRNNRecognizer::Run(std::vector>> boxes, void CRNNRecognizer::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/rec.pdmodel", model_dir + "/rec.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/tools/infer/utility.py b/tools/infer/utility.py index 35b031e1..4b06b60b 100755 --- a/tools/infer/utility.py +++ b/tools/infer/utility.py @@ -100,8 +100,8 @@ def create_predictor(args, mode, logger): if model_dir is None: logger.info("not find {} model file path {}".format(mode, model_dir)) sys.exit(0) - model_file_path = model_dir + ".pdmodel" - params_file_path = model_dir + ".pdiparams" + model_file_path = model_dir + "/inference.pdmodel" + params_file_path = model_dir + "/inference.pdiparams" if not os.path.exists(model_file_path): logger.info("not find model file path {}".format(model_file_path)) sys.exit(0) -- GitLab