diff --git a/lite/demo/cxx/mask_detection/mask_detection.cc b/lite/demo/cxx/mask_detection/mask_detection.cc index 869f08094d348842da35f0d7f2b5fed54501a854..b2c8a68c63b804070adab4e43411bb2763662aa5 100644 --- a/lite/demo/cxx/mask_detection/mask_detection.cc +++ b/lite/demo/cxx/mask_detection/mask_detection.cc @@ -125,8 +125,8 @@ void pre_process(const cv::Mat& img, neon_mean_scale(dimg, data, width * height, mean, scale); } -void RunModel(std::string det_model_dir, - std::string class_model_dir, +void RunModel(std::string det_model_file, + std::string class_model_file, std::string img_path) { // Prepare cv::Mat img = imread(img_path, cv::IMREAD_COLOR); @@ -138,7 +138,7 @@ void RunModel(std::string det_model_dir, // Detection MobileConfig config; - config.set_model_dir(det_model_dir); + config.set_model_from_file(det_model_file); // Create Predictor For Detction Model std::shared_ptr predictor = @@ -185,7 +185,7 @@ void RunModel(std::string det_model_dir, } // Classification - config.set_model_dir(class_model_dir); + config.set_model_from_file(class_model_file); // Create Predictor For Classification Model predictor = CreatePaddlePredictor(config); @@ -290,12 +290,12 @@ void RunModel(std::string det_model_dir, int main(int argc, char** argv) { if (argc < 3) { std::cerr << "[ERROR] usage: " << argv[0] - << " detction_model_dir classification_model_dir image_path\n"; + << " detction_model_file classification_model_file image_path\n"; exit(1); } - std::string detect_model_dir = argv[1]; - std::string classify_model_dir = argv[2]; + std::string detect_model_file = argv[1]; + std::string classify_model_file = argv[2]; std::string img_path = argv[3]; - RunModel(detect_model_dir, classify_model_dir, img_path); + RunModel(detect_model_file, classify_model_file, img_path); return 0; } diff --git a/lite/demo/cxx/mobile_classify/mobile_classify.cc b/lite/demo/cxx/mobile_classify/mobile_classify.cc index d0cf59e185e1330b7d8487d562afa0af29236007..518040ebd07bb4e8940f6a885cddd4f3c98143f3 100644 --- a/lite/demo/cxx/mobile_classify/mobile_classify.cc +++ b/lite/demo/cxx/mobile_classify/mobile_classify.cc @@ -126,7 +126,7 @@ void pre_process(const cv::Mat& img, neon_mean_scale(dimg, data, width * height, means, scales); } -void RunModel(std::string model_dir, +void RunModel(std::string model_file, std::string img_path, const std::vector& labels, const int topk, @@ -134,7 +134,7 @@ void RunModel(std::string model_dir, int height) { // 1. Set MobileConfig MobileConfig config; - config.set_model_dir(model_dir); + config.set_model_from_file(model_file); // 2. Create PaddlePredictor by MobileConfig std::shared_ptr predictor = @@ -169,12 +169,12 @@ void RunModel(std::string model_dir, int main(int argc, char** argv) { if (argc < 4) { std::cerr << "[ERROR] usage: " << argv[0] - << " model_dir image_path label_file\n"; + << " model_file image_path label_file\n"; exit(1); } - printf("parameter: model_dir, image_path and label_file are necessary \n"); + printf("parameter: model_file, image_path and label_file are necessary \n"); printf("parameter: topk, input_width, input_height, are optional \n"); - std::string model_dir = argv[1]; + std::string model_file = argv[1]; std::string img_path = argv[2]; std::string label_file = argv[3]; std::vector labels; @@ -190,6 +190,6 @@ int main(int argc, char** argv) { height = atoi(argv[6]); } - RunModel(model_dir, img_path, labels, topk, width, height); + RunModel(model_file, img_path, labels, topk, width, height); return 0; } diff --git a/lite/demo/cxx/ssd_detection/ssd_detection.cc b/lite/demo/cxx/ssd_detection/ssd_detection.cc index 2408afcbf64a24924eca119a9d9481dc030250c9..0be4561cd8d083f26e562c2346da217bb4b48283 100644 --- a/lite/demo/cxx/ssd_detection/ssd_detection.cc +++ b/lite/demo/cxx/ssd_detection/ssd_detection.cc @@ -162,10 +162,10 @@ std::vector detect_object(const float* data, return rect_out; } -void RunModel(std::string model_dir, std::string img_path) { +void RunModel(std::string model_file, std::string img_path) { // 1. Set MobileConfig MobileConfig config; - config.set_model_dir(model_dir); + config.set_model_from_file(model_file); // 2. Create PaddlePredictor by MobileConfig std::shared_ptr predictor = @@ -199,11 +199,11 @@ void RunModel(std::string model_dir, std::string img_path) { int main(int argc, char** argv) { if (argc < 3) { - std::cerr << "[ERROR] usage: " << argv[0] << " model_dir image_path\n"; + std::cerr << "[ERROR] usage: " << argv[0] << " model_file image_path\n"; exit(1); } - std::string model_dir = argv[1]; + std::string model_file = argv[1]; std::string img_path = argv[2]; - RunModel(model_dir, img_path); + RunModel(model_file, img_path); return 0; } diff --git a/lite/demo/cxx/test_cv/test_img_prepross.cc b/lite/demo/cxx/test_cv/test_img_prepross.cc index c2cbd66cc0a15a1032141641d83fbf8db85d20bf..8b33f90b6741de99f7b7f46879a737e27242b0a7 100644 --- a/lite/demo/cxx/test_cv/test_img_prepross.cc +++ b/lite/demo/cxx/test_cv/test_img_prepross.cc @@ -50,7 +50,7 @@ void test_img(std::vector cluster_id, float rotate, FlipParam flip, LayoutType layout, - std::string model_dir, + std::string model_file, int test_iter = 1) { // init // paddle::lite::DeviceInfo::Init(); @@ -65,10 +65,10 @@ void test_img(std::vector cluster_id, std::cout << "cluster: " << cls << ", threads: " << th << std::endl; // 1. Set MobileConfig MobileConfig config; - config.set_model_dir(model_dir); + config.set_model_from_file(model_file); config.set_power_mode((PowerMode)cls); config.set_threads(th); - std::cout << "model: " << model_dir; + std::cout << "model: " << model_file; // 2. Create PaddlePredictor by MobileConfig std::shared_ptr predictor = @@ -359,9 +359,9 @@ int main(int argc, char** argv) { int flip = -1; float rotate = 90; int layout = 1; - std::string model_dir = "mobilenet_v1"; + std::string model_file = "mobilenet_v1.nb"; if (argc > 7) { - model_dir = argv[7]; + model_file = argv[7]; } if (argc > 8) { flip = atoi(argv[8]); @@ -383,7 +383,7 @@ int main(int argc, char** argv) { rotate, (FlipParam)flip, (LayoutType)layout, - model_dir, + model_file, 20); return 0; } diff --git a/lite/demo/cxx/test_cv/test_model_cv.cc b/lite/demo/cxx/test_cv/test_model_cv.cc index 24f408bf4a55ea2d499e39902201597c0e8c6e4e..caa085eecb81e54859c1bdd5cd7c0654175b7a9a 100644 --- a/lite/demo/cxx/test_cv/test_model_cv.cc +++ b/lite/demo/cxx/test_cv/test_model_cv.cc @@ -111,7 +111,7 @@ void pre_process(const cv::Mat& img, int width, int height, Tensor dstTensor) { #endif } -void RunModel(std::string model_dir, +void RunModel(std::string model_file, std::string img_path, std::vector input_shape, PowerMode power_mode, @@ -120,7 +120,7 @@ void RunModel(std::string model_dir, int warmup = 0) { // 1. Set MobileConfig MobileConfig config; - config.set_model_dir(model_dir); + config.set_model_from_file(model_file); config.set_power_mode(power_mode); config.set_threads(thread_num); @@ -161,7 +161,7 @@ void RunModel(std::string model_dir, } std::cout << "================== Speed Report ===================" << std::endl; - std::cout << "Model: " << model_dir + std::cout << "Model: " << model_file << ", power_mode: " << static_cast(power_mode) << ", threads num " << thread_num << ", warmup: " << warmup << ", repeats: " << test_iter << ", avg time: " << lps / test_iter @@ -187,10 +187,10 @@ void RunModel(std::string model_dir, int main(int argc, char** argv) { if (argc < 7) { std::cerr << "[ERROR] usage: " << argv[0] - << " model_dir image_path input_shape\n"; + << " model_file image_path input_shape\n"; exit(1); } - std::string model_dir = argv[1]; + std::string model_file = argv[1]; std::string img_path = argv[2]; std::vector input_shape; input_shape.push_back(atoi(argv[3])); @@ -213,7 +213,7 @@ int main(int argc, char** argv) { if (argc > 10) { warmup = atoi(argv[10]); } - RunModel(model_dir, + RunModel(model_file, img_path, input_shape, (PowerMode)power_mode, diff --git a/lite/demo/cxx/yolov3_detection/yolov3_detection.cc b/lite/demo/cxx/yolov3_detection/yolov3_detection.cc index a9beb1ed28de1f3c28eb5c03b3b660d518ee10c5..d34319050392c74c3fa552bd24c0ea24245ced99 100644 --- a/lite/demo/cxx/yolov3_detection/yolov3_detection.cc +++ b/lite/demo/cxx/yolov3_detection/yolov3_detection.cc @@ -182,10 +182,10 @@ std::vector detect_object(const float* data, return rect_out; } -void RunModel(std::string model_dir, std::string img_path) { +void RunModel(std::string model_file, std::string img_path) { // 1. Set MobileConfig MobileConfig config; - config.set_model_dir(model_dir); + config.set_model_from_file(model_file); // 2. Create PaddlePredictor by MobileConfig std::shared_ptr predictor = @@ -228,11 +228,11 @@ void RunModel(std::string model_dir, std::string img_path) { int main(int argc, char** argv) { if (argc < 3) { - std::cerr << "[ERROR] usage: " << argv[0] << " model_dir image_path\n"; + std::cerr << "[ERROR] usage: " << argv[0] << " model_file image_path\n"; exit(1); } - std::string model_dir = argv[1]; + std::string model_file = argv[1]; std::string img_path = argv[2]; - RunModel(model_dir, img_path); + RunModel(model_file, img_path); return 0; }