diff --git a/deploy/cpp_infer/src/ocr_cls.cpp b/deploy/cpp_infer/src/ocr_cls.cpp index 6793972757575e5fab2f175df52c6403dacc2939..fed2023f9f111294a07a9c841f4843404bbd9af2 100644 --- a/deploy/cpp_infer/src/ocr_cls.cpp +++ b/deploy/cpp_infer/src/ocr_cls.cpp @@ -81,7 +81,8 @@ cv::Mat Classifier::Run(cv::Mat &img) { void Classifier::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/cls.pdmodel", model_dir + "/cls.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/deploy/cpp_infer/src/ocr_det.cpp b/deploy/cpp_infer/src/ocr_det.cpp index 3ca4cc26b41373ff6cc4605e6b995969d8cd2245..e253f9cc89810f4d1adfca5be5186220a873d1a2 100644 --- a/deploy/cpp_infer/src/ocr_det.cpp +++ b/deploy/cpp_infer/src/ocr_det.cpp @@ -18,7 +18,8 @@ namespace PaddleOCR { void DBDetector::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/det.pdmodel", model_dir + "/det.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/deploy/cpp_infer/src/ocr_rec.cpp b/deploy/cpp_infer/src/ocr_rec.cpp index 0b6d0532b08a9d61222f3b71322e0468d21a78c1..d4deb5a17fc47427eb92cda02c270d268cfcafc7 100644 --- a/deploy/cpp_infer/src/ocr_rec.cpp +++ b/deploy/cpp_infer/src/ocr_rec.cpp @@ -103,7 +103,8 @@ void CRNNRecognizer::Run(std::vector>> boxes, void CRNNRecognizer::LoadModel(const std::string &model_dir) { AnalysisConfig config; - config.SetModel(model_dir + "/rec.pdmodel", model_dir + "/rec.pdiparams"); + config.SetModel(model_dir + "/inference.pdmodel", + model_dir + "/inference.pdiparams"); if (this->use_gpu_) { config.EnableUseGpu(this->gpu_mem_, this->gpu_id_); diff --git a/tools/infer/utility.py b/tools/infer/utility.py index 35b031e1352ee029be75e487dbf0c2c1a54166ed..4b06b60b9e25954be7375882b5fb67343312b222 100755 --- a/tools/infer/utility.py +++ b/tools/infer/utility.py @@ -100,8 +100,8 @@ def create_predictor(args, mode, logger): if model_dir is None: logger.info("not find {} model file path {}".format(mode, model_dir)) sys.exit(0) - model_file_path = model_dir + ".pdmodel" - params_file_path = model_dir + ".pdiparams" + model_file_path = model_dir + "/inference.pdmodel" + params_file_path = model_dir + "/inference.pdiparams" if not os.path.exists(model_file_path): logger.info("not find model file path {}".format(model_file_path)) sys.exit(0)