From 20c0c3ee2bcb12f5fe88ecdfdfae268fa0828c54 Mon Sep 17 00:00:00 2001 From: Zhaolong Xing Date: Thu, 27 Jun 2019 15:51:50 +0800 Subject: [PATCH] Fix inferene naive_infer.md doc typo (#949) * refine inference api test=develop * fix typo * fix typo * fix comments * fix typo --- .../advanced_usage/deploy/inference/native_infer.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/doc/fluid/advanced_usage/deploy/inference/native_infer.md b/doc/fluid/advanced_usage/deploy/inference/native_infer.md index b714464be..0b520ee19 100644 --- a/doc/fluid/advanced_usage/deploy/inference/native_infer.md +++ b/doc/fluid/advanced_usage/deploy/inference/native_infer.md @@ -53,7 +53,7 @@ void CreateConfig(NativeConfig *config, const std::string& model_dirname) { void RunNative(int batch_size, const std::string& model_dirname) { // 1. 创建NativeConfig NativeConfig config; - CreateConfig(&config); + CreateConfig(&config, model_dirname); // 2. 根据config 创建predictor auto predictor = CreatePaddlePredictor(config); @@ -61,7 +61,7 @@ void RunNative(int batch_size, const std::string& model_dirname) { int channels = 3; int height = 224; int width = 224; - float data[batch_size * channels * height * width] = {0}; + float *data = new float[batch_size * channels * height * width]; // 3. 创建输入 tensor PaddleTensor tensor; @@ -78,7 +78,7 @@ void RunNative(int batch_size, const std::string& model_dirname) { predictor->Run(paddle_tensor_feeds, &outputs, batch_size); const size_t num_elements = outputs.front().data.length() / sizeof(float); - auto *data = static_cast(outputs.front().data.data()); + auto *data_out = static_cast(outputs.front().data.data()); } } // namespace paddle @@ -95,6 +95,9 @@ AnalysisConfig 创建了一个高性能预测引擎。该引擎通过对计算 #### AnalysisPredictor 使用样例 ```c++ +#include "paddle_inference_api.h" + +namespace paddle { void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) { // 模型从磁盘进行加载 config->SetModel(model_dirname + "/model", @@ -122,7 +125,7 @@ void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) { void RunAnalysis(int batch_size, std::string model_dirname) { // 1. 创建AnalysisConfig AnalysisConfig config; - CreateConfig(&config); + CreateConfig(&config, model_dirname); // 2. 根据config 创建predictor auto predictor = CreatePaddlePredictor(config); -- GitLab