提交 0bdce1d4 编写于 作者: D dongshuilong

fix cpp_infer bugs

上级 3c7f6e4d
......@@ -57,7 +57,7 @@ double Classifier::Run(cv::Mat &img, std::vector<double> *times) {
cv::Mat resize_img;
img.copyTo(srcimg);
auto preprocess_start = std::chrono::steady_clock::now();
auto preprocess_start = std::chrono::system_clock::now();
this->resize_op_.Run(img, resize_img, this->resize_short_size_);
this->crop_op_.Run(resize_img, this->crop_size_);
......@@ -92,9 +92,9 @@ double Classifier::Run(cv::Mat &img, std::vector<double> *times) {
max_element(out_data.begin(), out_data.end()) - out_data.begin();
auto postprocess_end = std::chrono::system_clock::now();
// std::chrono::duration<float> preprocess_diff = preprocess_end -
// preprocess_start;
// times->push_back(double(preprocess_diff.count() * 1000));
std::chrono::duration<float> preprocess_diff =
preprocess_end - preprocess_start;
times->push_back(double(preprocess_diff.count() * 1000));
std::chrono::duration<float> inference_diff = infer_end - infer_start;
double inference_cost_time = double(inference_diff.count() * 1000);
times->push_back(inference_cost_time);
......
......@@ -10,8 +10,8 @@ gpu_mem 2000
# use_fp16 0
# cls config
cls_model_path inference/inference.pdmodel
cls_params_path inference/inference.pdiparams
cls_model_path ../inference/inference.pdmodel
cls_params_path ../inference/inference.pdiparams
resize_short_size 256
crop_size 224
......
# model load config
gpu_id 0
gpu_mem 2000
# whole chain test will add following config
# use_gpu 0
# cpu_threads 10
# use_mkldnn 1
# use_tensorrt 0
# use_fp16 0
# cls config
cls_model_path inference/inference.pdmodel
cls_params_path inference/inference.pdiparams
resize_short_size 256
crop_size 224
# for log env info
benchmark 1
eval "$cpp_use_gpu_key $use_gpu"
eval "$cpp_use_gpu_key" "$use_gpu"
${cpp_use_gpu_key} ${use_gpu}
1 2
1 2
......@@ -182,6 +182,7 @@ function func_cpp_inference(){
echo "${cpp_cpu_threads_key} ${threads}" >> cpp_config.txt
echo "${cpp_use_mkldnn_key} ${use_mkldnn}" >> cpp_config.txt
echo "${cpp_use_tensorrt_key} 0" >> cpp_config.txt
echo "${cpp_use_fp16_key} 0" >> cpp_config.txt
command="${_script} cpp_config.txt ${_img_dir} > ${_save_log_path} 2>&1 "
eval $command
last_status=${PIPESTATUS[0]}
......@@ -203,7 +204,8 @@ function func_cpp_inference(){
echo "${cpp_use_gpu_key} ${use_gpu}" >> cpp_config.txt
echo "${cpp_cpu_threads_key} ${threads}" >> cpp_config.txt
echo "${cpp_use_mkldnn_key} ${use_mkldnn}" >> cpp_config.txt
echo "${cpp_use_tensorrt_key} ${precision}" >> cpp_config.txt
echo "${cpp_use_tensorrt_key} ${use_trt}" >> cpp_config.txt
echo "${cpp_use_fp16_key} ${precision}" >> cpp_config.txt
command="${_script} cpp_config.txt ${_img_dir} > ${_save_log_path} 2>&1 "
eval $command
last_status=${PIPESTATUS[0]}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册