未验证 提交 504f32c5 编写于 作者: W Walter 提交者: GitHub

Merge pull request #1557 from RainFrost1/cpp_infer

分类Cpp添加打印label及topk功能
......@@ -58,7 +58,8 @@ namespace PaddleClas {
void LoadModel(const std::string &model_path, const std::string &params_path);
// Run predictor
double Run(cv::Mat &img, std::vector<double> *times);
void Run(cv::Mat &img, std::vector<float> &out_data, std::vector<int> &idx,
std::vector<double> &times);
private:
std::shared_ptr <Predictor> predictor_;
......
......@@ -80,6 +80,20 @@ namespace PaddleClas {
this->benchmark = this->config_file["Global"]["benchmark"].as<bool>();
else
this->benchmark = false;
if (this->config_file["PostProcess"]["Topk"]["topk"].IsDefined())
this->topk = this->config_file["PostProcess"]["Topk"]["topk"].as<int>();
if (this->config_file["PostProcess"]["Topk"]["class_id_map_file"]
.IsDefined())
this->class_id_map_path =
this->config_file["PostProcess"]["Topk"]["class_id_map_file"]
.as<std::string>();
if (this->config_file["PostProcess"]["SavePreLabel"]["save_dir"]
.IsDefined())
this->label_save_dir =
this->config_file["PostProcess"]["SavePreLabel"]["save_dir"]
.as<std::string>();
ReadLabelMap();
}
YAML::Node config_file;
......@@ -105,9 +119,15 @@ namespace PaddleClas {
float scale = 0.00392157;
std::vector<float> mean = {0.485, 0.456, 0.406};
std::vector<float> std = {0.229, 0.224, 0.225};
int topk = 5;
std::string class_id_map_path;
std::map<int, std::string> id_map;
std::string label_save_dir;
void PrintConfigInfo();
void ReadLabelMap();
void ReadYamlConfig(const std::string &path);
};
} // namespace PaddleClas
......@@ -209,7 +209,7 @@ cp ../configs/inference_cls.yaml tools/
根据[python预测推理](../../docs/zh_CN/inference_deployment/python_deploy.md)`图像分类推理`部分修改好`tools`目录下`inference_cls.yaml`文件。`yaml`文件的参数说明详见[python预测推理](../../docs/zh_CN/inference_deployment/python_deploy.md)
请根据实际存放文件,修改好`Global.infer_imgs``Global.inference_model_dir`等参数。
请根据实际存放文件,修改好`Global.infer_imgs``Global.inference_model_dir``PostProcess.Topk.topk``PostProcess.Topk.class_id_map_file`等参数。
#### 2.3.2 执行
......
......@@ -12,7 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <include/cls.h>
#include <numeric>
namespace PaddleClas {
......@@ -52,7 +54,8 @@ namespace PaddleClas {
this->predictor_ = CreatePredictor(config);
}
double Classifier::Run(cv::Mat &img, std::vector<double> *times) {
void Classifier::Run(cv::Mat &img, std::vector<float> &out_data,
std::vector<int> &idx, std::vector<double> &times) {
cv::Mat srcimg;
cv::Mat resize_img;
img.copyTo(srcimg);
......@@ -75,7 +78,6 @@ namespace PaddleClas {
input_t->CopyFromCpu(input.data());
this->predictor_->Run();
std::vector<float> out_data;
auto output_names = this->predictor_->GetOutputNames();
auto output_t = this->predictor_->GetOutputHandle(output_names[0]);
std::vector<int> output_shape = output_t->shape();
......@@ -83,30 +85,32 @@ namespace PaddleClas {
std::multiplies<int>());
out_data.resize(out_num);
idx.resize(out_num);
output_t->CopyToCpu(out_data.data());
auto infer_end = std::chrono::system_clock::now();
auto postprocess_start = std::chrono::system_clock::now();
int maxPosition =
max_element(out_data.begin(), out_data.end()) - out_data.begin();
// int maxPosition =
// max_element(out_data.begin(), out_data.end()) - out_data.begin();
iota(idx.begin(), idx.end(), 0);
stable_sort(idx.begin(), idx.end(), [&out_data](int i1, int i2) {
return out_data[i1] > out_data[i2];
});
auto postprocess_end = std::chrono::system_clock::now();
std::chrono::duration<float> preprocess_diff =
preprocess_end - preprocess_start;
times->push_back(double(preprocess_diff.count() * 1000));
times[0] = double(preprocess_diff.count() * 1000);
std::chrono::duration<float> inference_diff = infer_end - infer_start;
double inference_cost_time = double(inference_diff.count() * 1000);
times->push_back(inference_cost_time);
times[1] = inference_cost_time;
std::chrono::duration<float> postprocess_diff =
postprocess_end - postprocess_start;
times->push_back(double(postprocess_diff.count() * 1000));
times[2] = double(postprocess_diff.count() * 1000);
std::cout << "result: " << std::endl;
std::cout << "\tclass id: " << maxPosition << std::endl;
std::cout << std::fixed << std::setprecision(10)
<< "\tscore: " << double(out_data[maxPosition]) << std::endl;
return inference_cost_time;
/* std::cout << "result: " << std::endl; */
/* std::cout << "\tclass id: " << maxPosition << std::endl; */
/* std::cout << std::fixed << std::setprecision(10) */
/* << "\tscore: " << double(out_data[maxPosition]) << std::endl; */
}
} // namespace PaddleClas
......@@ -13,6 +13,7 @@
// limitations under the License.
#include <include/cls_config.h>
#include <ostream>
namespace PaddleClas {
......@@ -32,4 +33,20 @@ namespace PaddleClas {
exit(1);
}
}
void ClsConfig::ReadLabelMap() {
if (this->class_id_map_path.empty()) {
std::cout << "The Class Label file dose not input" << std::endl;
return;
}
std::ifstream in(this->class_id_map_path);
std::string line;
if (in) {
while (getline(in, line)) {
int split_flag = line.find_first_of(" ");
this->id_map[std::stoi(line.substr(0, split_flag))] =
line.substr(split_flag + 1, line.size());
}
}
}
}; // namespace PaddleClas
......@@ -22,6 +22,7 @@
#include <ostream>
#include <vector>
#include <algorithm>
#include <cstring>
#include <fstream>
#include <numeric>
......@@ -74,9 +75,13 @@ int main(int argc, char **argv) {
Classifier classifier(config);
double elapsed_time = 0.0;
std::vector<double> cls_times;
int warmup_iter = img_files_list.size() > 5 ? 5 : 0;
std::vector<double> cls_times = {0, 0, 0};
std::vector<double> cls_times_total = {0, 0, 0};
double infer_time;
std::vector<float> out_data;
std::vector<int> result_index;
int warmup_iter = 5;
bool label_output_equal_flag = true;
for (int idx = 0; idx < img_files_list.size(); ++idx) {
std::string img_path = img_files_list[idx];
cv::Mat srcimg = cv::imread(img_path, cv::IMREAD_COLOR);
......@@ -87,27 +92,44 @@ int main(int argc, char **argv) {
}
cv::cvtColor(srcimg, srcimg, cv::COLOR_BGR2RGB);
classifier.Run(srcimg, out_data, result_index, cls_times);
if (label_output_equal_flag and out_data.size() != config.id_map.size()) {
std::cout << "Warning: the label size is not equal to output size!"
<< std::endl;
label_output_equal_flag = false;
}
double run_time = classifier.Run(srcimg, &cls_times);
int max_len = std::min(config.topk, int(out_data.size()));
std::cout << "Current image path: " << img_path << std::endl;
infer_time = cls_times[0] + cls_times[1] + cls_times[2];
std::cout << "Current total inferen time cost: " << infer_time << " ms."
<< std::endl;
for (int i = 0; i < max_len; ++i) {
printf("\tTop%d: class_id: %d, score: %.4f, ", i + 1, result_index[i],
out_data[result_index[i]]);
if (label_output_equal_flag)
printf("label: %s\n", config.id_map[result_index[i]].c_str());
}
if (idx >= warmup_iter) {
elapsed_time += run_time;
std::cout << "Current image path: " << img_path << std::endl;
std::cout << "Current time cost: " << run_time << " s, "
<< "average time cost in all: "
<< elapsed_time / (idx + 1 - warmup_iter) << " s." << std::endl;
} else {
std::cout << "Current time cost: " << run_time << " s." << std::endl;
for (int i = 0; i < cls_times.size(); ++i)
cls_times_total[i] += cls_times[i];
}
}
if (img_files_list.size() > warmup_iter) {
std::string presion = "fp32";
infer_time = cls_times_total[0] + cls_times_total[1] + cls_times_total[2];
std::cout << "average time cost in all: "
<< infer_time / (img_files_list.size() - warmup_iter) << " ms."
<< std::endl;
}
std::string presion = "fp32";
if (config.use_fp16)
presion = "fp16";
if (config.benchmark) {
AutoLogger autolog("Classification", config.use_gpu, config.use_tensorrt,
config.use_mkldnn, config.cpu_threads, 1,
"1, 3, 224, 224", presion, cls_times,
"1, 3, 224, 224", presion, cls_times_total,
img_files_list.size());
autolog.report();
}
......
......@@ -20,20 +20,20 @@
namespace PaddleClas {
std::vector<std::string> Utility::ReadDict(const std::string &path) {
std::ifstream in(path);
std::string line;
std::vector<std::string> m_vec;
if (in) {
while (getline(in, line)) {
m_vec.push_back(line);
std::vector <std::string> Utility::ReadDict(const std::string &path) {
std::ifstream in(path);
std::string line;
std::vector <std::string> m_vec;
if (in) {
while (getline(in, line)) {
m_vec.push_back(line);
}
} else {
std::cout << "no such label file: " << path << ", exit the program..."
<< std::endl;
exit(1);
}
return m_vec;
}
} else {
std::cout << "no such label file: " << path << ", exit the program..."
<< std::endl;
exit(1);
}
return m_vec;
}
} // namespace PaddleClas
\ No newline at end of file
OPENCV_DIR=/work/project/project/cpp_infer/opencv-3.4.7/opencv3
LIB_DIR=/work/project/project/cpp_infer/paddle_inference/
OPENCV_DIR=/work/project/project/test/opencv-3.4.7/opencv3
LIB_DIR=/work/project/project/test/paddle_inference/
CUDA_LIB_DIR=/usr/local/cuda/lib64
CUDNN_LIB_DIR=/usr/lib/x86_64-linux-gnu/
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册