未验证 提交 6c90792b 编写于 作者: H huzhiqiang 提交者: GitHub

fix cpp demo to apply new API test=develop (#3052)

上级 aad9c8a4
...@@ -125,8 +125,8 @@ void pre_process(const cv::Mat& img, ...@@ -125,8 +125,8 @@ void pre_process(const cv::Mat& img,
neon_mean_scale(dimg, data, width * height, mean, scale); neon_mean_scale(dimg, data, width * height, mean, scale);
} }
void RunModel(std::string det_model_dir, void RunModel(std::string det_model_file,
std::string class_model_dir, std::string class_model_file,
std::string img_path) { std::string img_path) {
// Prepare // Prepare
cv::Mat img = imread(img_path, cv::IMREAD_COLOR); cv::Mat img = imread(img_path, cv::IMREAD_COLOR);
...@@ -138,7 +138,7 @@ void RunModel(std::string det_model_dir, ...@@ -138,7 +138,7 @@ void RunModel(std::string det_model_dir,
// Detection // Detection
MobileConfig config; MobileConfig config;
config.set_model_dir(det_model_dir); config.set_model_from_file(det_model_file);
// Create Predictor For Detction Model // Create Predictor For Detction Model
std::shared_ptr<PaddlePredictor> predictor = std::shared_ptr<PaddlePredictor> predictor =
...@@ -185,7 +185,7 @@ void RunModel(std::string det_model_dir, ...@@ -185,7 +185,7 @@ void RunModel(std::string det_model_dir,
} }
// Classification // Classification
config.set_model_dir(class_model_dir); config.set_model_from_file(class_model_file);
// Create Predictor For Classification Model // Create Predictor For Classification Model
predictor = CreatePaddlePredictor<MobileConfig>(config); predictor = CreatePaddlePredictor<MobileConfig>(config);
...@@ -290,12 +290,12 @@ void RunModel(std::string det_model_dir, ...@@ -290,12 +290,12 @@ void RunModel(std::string det_model_dir,
int main(int argc, char** argv) { int main(int argc, char** argv) {
if (argc < 3) { if (argc < 3) {
std::cerr << "[ERROR] usage: " << argv[0] std::cerr << "[ERROR] usage: " << argv[0]
<< " detction_model_dir classification_model_dir image_path\n"; << " detction_model_file classification_model_file image_path\n";
exit(1); exit(1);
} }
std::string detect_model_dir = argv[1]; std::string detect_model_file = argv[1];
std::string classify_model_dir = argv[2]; std::string classify_model_file = argv[2];
std::string img_path = argv[3]; std::string img_path = argv[3];
RunModel(detect_model_dir, classify_model_dir, img_path); RunModel(detect_model_file, classify_model_file, img_path);
return 0; return 0;
} }
...@@ -126,7 +126,7 @@ void pre_process(const cv::Mat& img, ...@@ -126,7 +126,7 @@ void pre_process(const cv::Mat& img,
neon_mean_scale(dimg, data, width * height, means, scales); neon_mean_scale(dimg, data, width * height, means, scales);
} }
void RunModel(std::string model_dir, void RunModel(std::string model_file,
std::string img_path, std::string img_path,
const std::vector<std::string>& labels, const std::vector<std::string>& labels,
const int topk, const int topk,
...@@ -134,7 +134,7 @@ void RunModel(std::string model_dir, ...@@ -134,7 +134,7 @@ void RunModel(std::string model_dir,
int height) { int height) {
// 1. Set MobileConfig // 1. Set MobileConfig
MobileConfig config; MobileConfig config;
config.set_model_dir(model_dir); config.set_model_from_file(model_file);
// 2. Create PaddlePredictor by MobileConfig // 2. Create PaddlePredictor by MobileConfig
std::shared_ptr<PaddlePredictor> predictor = std::shared_ptr<PaddlePredictor> predictor =
...@@ -169,12 +169,12 @@ void RunModel(std::string model_dir, ...@@ -169,12 +169,12 @@ void RunModel(std::string model_dir,
int main(int argc, char** argv) { int main(int argc, char** argv) {
if (argc < 4) { if (argc < 4) {
std::cerr << "[ERROR] usage: " << argv[0] std::cerr << "[ERROR] usage: " << argv[0]
<< " model_dir image_path label_file\n"; << " model_file image_path label_file\n";
exit(1); exit(1);
} }
printf("parameter: model_dir, image_path and label_file are necessary \n"); printf("parameter: model_file, image_path and label_file are necessary \n");
printf("parameter: topk, input_width, input_height, are optional \n"); printf("parameter: topk, input_width, input_height, are optional \n");
std::string model_dir = argv[1]; std::string model_file = argv[1];
std::string img_path = argv[2]; std::string img_path = argv[2];
std::string label_file = argv[3]; std::string label_file = argv[3];
std::vector<std::string> labels; std::vector<std::string> labels;
...@@ -190,6 +190,6 @@ int main(int argc, char** argv) { ...@@ -190,6 +190,6 @@ int main(int argc, char** argv) {
height = atoi(argv[6]); height = atoi(argv[6]);
} }
RunModel(model_dir, img_path, labels, topk, width, height); RunModel(model_file, img_path, labels, topk, width, height);
return 0; return 0;
} }
...@@ -162,10 +162,10 @@ std::vector<Object> detect_object(const float* data, ...@@ -162,10 +162,10 @@ std::vector<Object> detect_object(const float* data,
return rect_out; return rect_out;
} }
void RunModel(std::string model_dir, std::string img_path) { void RunModel(std::string model_file, std::string img_path) {
// 1. Set MobileConfig // 1. Set MobileConfig
MobileConfig config; MobileConfig config;
config.set_model_dir(model_dir); config.set_model_from_file(model_file);
// 2. Create PaddlePredictor by MobileConfig // 2. Create PaddlePredictor by MobileConfig
std::shared_ptr<PaddlePredictor> predictor = std::shared_ptr<PaddlePredictor> predictor =
...@@ -199,11 +199,11 @@ void RunModel(std::string model_dir, std::string img_path) { ...@@ -199,11 +199,11 @@ void RunModel(std::string model_dir, std::string img_path) {
int main(int argc, char** argv) { int main(int argc, char** argv) {
if (argc < 3) { if (argc < 3) {
std::cerr << "[ERROR] usage: " << argv[0] << " model_dir image_path\n"; std::cerr << "[ERROR] usage: " << argv[0] << " model_file image_path\n";
exit(1); exit(1);
} }
std::string model_dir = argv[1]; std::string model_file = argv[1];
std::string img_path = argv[2]; std::string img_path = argv[2];
RunModel(model_dir, img_path); RunModel(model_file, img_path);
return 0; return 0;
} }
...@@ -50,7 +50,7 @@ void test_img(std::vector<int> cluster_id, ...@@ -50,7 +50,7 @@ void test_img(std::vector<int> cluster_id,
float rotate, float rotate,
FlipParam flip, FlipParam flip,
LayoutType layout, LayoutType layout,
std::string model_dir, std::string model_file,
int test_iter = 1) { int test_iter = 1) {
// init // init
// paddle::lite::DeviceInfo::Init(); // paddle::lite::DeviceInfo::Init();
...@@ -65,10 +65,10 @@ void test_img(std::vector<int> cluster_id, ...@@ -65,10 +65,10 @@ void test_img(std::vector<int> cluster_id,
std::cout << "cluster: " << cls << ", threads: " << th << std::endl; std::cout << "cluster: " << cls << ", threads: " << th << std::endl;
// 1. Set MobileConfig // 1. Set MobileConfig
MobileConfig config; MobileConfig config;
config.set_model_dir(model_dir); config.set_model_from_file(model_file);
config.set_power_mode((PowerMode)cls); config.set_power_mode((PowerMode)cls);
config.set_threads(th); config.set_threads(th);
std::cout << "model: " << model_dir; std::cout << "model: " << model_file;
// 2. Create PaddlePredictor by MobileConfig // 2. Create PaddlePredictor by MobileConfig
std::shared_ptr<PaddlePredictor> predictor = std::shared_ptr<PaddlePredictor> predictor =
...@@ -359,9 +359,9 @@ int main(int argc, char** argv) { ...@@ -359,9 +359,9 @@ int main(int argc, char** argv) {
int flip = -1; int flip = -1;
float rotate = 90; float rotate = 90;
int layout = 1; int layout = 1;
std::string model_dir = "mobilenet_v1"; std::string model_file = "mobilenet_v1.nb";
if (argc > 7) { if (argc > 7) {
model_dir = argv[7]; model_file = argv[7];
} }
if (argc > 8) { if (argc > 8) {
flip = atoi(argv[8]); flip = atoi(argv[8]);
...@@ -383,7 +383,7 @@ int main(int argc, char** argv) { ...@@ -383,7 +383,7 @@ int main(int argc, char** argv) {
rotate, rotate,
(FlipParam)flip, (FlipParam)flip,
(LayoutType)layout, (LayoutType)layout,
model_dir, model_file,
20); 20);
return 0; return 0;
} }
...@@ -111,7 +111,7 @@ void pre_process(const cv::Mat& img, int width, int height, Tensor dstTensor) { ...@@ -111,7 +111,7 @@ void pre_process(const cv::Mat& img, int width, int height, Tensor dstTensor) {
#endif #endif
} }
void RunModel(std::string model_dir, void RunModel(std::string model_file,
std::string img_path, std::string img_path,
std::vector<int> input_shape, std::vector<int> input_shape,
PowerMode power_mode, PowerMode power_mode,
...@@ -120,7 +120,7 @@ void RunModel(std::string model_dir, ...@@ -120,7 +120,7 @@ void RunModel(std::string model_dir,
int warmup = 0) { int warmup = 0) {
// 1. Set MobileConfig // 1. Set MobileConfig
MobileConfig config; MobileConfig config;
config.set_model_dir(model_dir); config.set_model_from_file(model_file);
config.set_power_mode(power_mode); config.set_power_mode(power_mode);
config.set_threads(thread_num); config.set_threads(thread_num);
...@@ -161,7 +161,7 @@ void RunModel(std::string model_dir, ...@@ -161,7 +161,7 @@ void RunModel(std::string model_dir,
} }
std::cout << "================== Speed Report ===================" std::cout << "================== Speed Report ==================="
<< std::endl; << std::endl;
std::cout << "Model: " << model_dir std::cout << "Model: " << model_file
<< ", power_mode: " << static_cast<int>(power_mode) << ", power_mode: " << static_cast<int>(power_mode)
<< ", threads num " << thread_num << ", warmup: " << warmup << ", threads num " << thread_num << ", warmup: " << warmup
<< ", repeats: " << test_iter << ", avg time: " << lps / test_iter << ", repeats: " << test_iter << ", avg time: " << lps / test_iter
...@@ -187,10 +187,10 @@ void RunModel(std::string model_dir, ...@@ -187,10 +187,10 @@ void RunModel(std::string model_dir,
int main(int argc, char** argv) { int main(int argc, char** argv) {
if (argc < 7) { if (argc < 7) {
std::cerr << "[ERROR] usage: " << argv[0] std::cerr << "[ERROR] usage: " << argv[0]
<< " model_dir image_path input_shape\n"; << " model_file image_path input_shape\n";
exit(1); exit(1);
} }
std::string model_dir = argv[1]; std::string model_file = argv[1];
std::string img_path = argv[2]; std::string img_path = argv[2];
std::vector<int> input_shape; std::vector<int> input_shape;
input_shape.push_back(atoi(argv[3])); input_shape.push_back(atoi(argv[3]));
...@@ -213,7 +213,7 @@ int main(int argc, char** argv) { ...@@ -213,7 +213,7 @@ int main(int argc, char** argv) {
if (argc > 10) { if (argc > 10) {
warmup = atoi(argv[10]); warmup = atoi(argv[10]);
} }
RunModel(model_dir, RunModel(model_file,
img_path, img_path,
input_shape, input_shape,
(PowerMode)power_mode, (PowerMode)power_mode,
......
...@@ -182,10 +182,10 @@ std::vector<Object> detect_object(const float* data, ...@@ -182,10 +182,10 @@ std::vector<Object> detect_object(const float* data,
return rect_out; return rect_out;
} }
void RunModel(std::string model_dir, std::string img_path) { void RunModel(std::string model_file, std::string img_path) {
// 1. Set MobileConfig // 1. Set MobileConfig
MobileConfig config; MobileConfig config;
config.set_model_dir(model_dir); config.set_model_from_file(model_file);
// 2. Create PaddlePredictor by MobileConfig // 2. Create PaddlePredictor by MobileConfig
std::shared_ptr<PaddlePredictor> predictor = std::shared_ptr<PaddlePredictor> predictor =
...@@ -228,11 +228,11 @@ void RunModel(std::string model_dir, std::string img_path) { ...@@ -228,11 +228,11 @@ void RunModel(std::string model_dir, std::string img_path) {
int main(int argc, char** argv) { int main(int argc, char** argv) {
if (argc < 3) { if (argc < 3) {
std::cerr << "[ERROR] usage: " << argv[0] << " model_dir image_path\n"; std::cerr << "[ERROR] usage: " << argv[0] << " model_file image_path\n";
exit(1); exit(1);
} }
std::string model_dir = argv[1]; std::string model_file = argv[1];
std::string img_path = argv[2]; std::string img_path = argv[2];
RunModel(model_dir, img_path); RunModel(model_file, img_path);
return 0; return 0;
} }
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册