提交 be8c86a1 编写于 作者: S syyxsxx

add namespace InferenceEngine::

上级 4824363c
......@@ -222,7 +222,7 @@ bool Model::predict(const cv::Mat& im, SegResult* result) {
//
infer_request.Infer();
OInferenceEngine::utputsDataMap out_map = network_.getOutputsInfo();
InferenceEngine::OutputsDataMap out_map = network_.getOutputsInfo();
auto iter = out_map.begin();
iter++;
std::string output_name_score = iter->first;
......
......@@ -49,7 +49,7 @@ class ImageBlob {
// Resize scale
float scale = 1.0;
// Buffer for image data after preprocessing
std::unique_ptr<Tensor> input_tensor_;
std::unique_ptr<paddle::lite_api::Tensor> input_tensor_;
void clear() {
im_size_before_resize_.clear();
......
......@@ -26,7 +26,9 @@ void Model::create_predictor(const std::string& model_dir,
config.set_model_from_file(model_dir);
config.set_threads(thread_num);
load_config(cfg_dir);
predictor_ = CreatePaddlePredictor<paddle::lite_api::MobileConfig>(config);
predictor_ =
paddle::lite_api::CreatePaddlePredictor<paddle::lite_api::MobileConfig>(
config);
}
bool Model::load_config(const std::string& cfg_dir) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册