提交 20c0c3ee 编写于 作者: Z Zhaolong Xing 提交者: xsrobin

Fix inferene naive_infer.md doc typo (#949)

* refine inference api

test=develop

* fix typo

* fix typo

* fix comments

* fix typo
上级 7757dc36
...@@ -53,7 +53,7 @@ void CreateConfig(NativeConfig *config, const std::string& model_dirname) { ...@@ -53,7 +53,7 @@ void CreateConfig(NativeConfig *config, const std::string& model_dirname) {
void RunNative(int batch_size, const std::string& model_dirname) { void RunNative(int batch_size, const std::string& model_dirname) {
// 1. 创建NativeConfig // 1. 创建NativeConfig
NativeConfig config; NativeConfig config;
CreateConfig(&config); CreateConfig(&config, model_dirname);
// 2. 根据config 创建predictor // 2. 根据config 创建predictor
auto predictor = CreatePaddlePredictor(config); auto predictor = CreatePaddlePredictor(config);
...@@ -61,7 +61,7 @@ void RunNative(int batch_size, const std::string& model_dirname) { ...@@ -61,7 +61,7 @@ void RunNative(int batch_size, const std::string& model_dirname) {
int channels = 3; int channels = 3;
int height = 224; int height = 224;
int width = 224; int width = 224;
float data[batch_size * channels * height * width] = {0}; float *data = new float[batch_size * channels * height * width];
// 3. 创建输入 tensor // 3. 创建输入 tensor
PaddleTensor tensor; PaddleTensor tensor;
...@@ -78,7 +78,7 @@ void RunNative(int batch_size, const std::string& model_dirname) { ...@@ -78,7 +78,7 @@ void RunNative(int batch_size, const std::string& model_dirname) {
predictor->Run(paddle_tensor_feeds, &outputs, batch_size); predictor->Run(paddle_tensor_feeds, &outputs, batch_size);
const size_t num_elements = outputs.front().data.length() / sizeof(float); const size_t num_elements = outputs.front().data.length() / sizeof(float);
auto *data = static_cast<float *>(outputs.front().data.data()); auto *data_out = static_cast<float *>(outputs.front().data.data());
} }
} // namespace paddle } // namespace paddle
...@@ -95,6 +95,9 @@ AnalysisConfig 创建了一个高性能预测引擎。该引擎通过对计算 ...@@ -95,6 +95,9 @@ AnalysisConfig 创建了一个高性能预测引擎。该引擎通过对计算
#### AnalysisPredictor 使用样例 #### AnalysisPredictor 使用样例
```c++ ```c++
#include "paddle_inference_api.h"
namespace paddle {
void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) { void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) {
// 模型从磁盘进行加载 // 模型从磁盘进行加载
config->SetModel(model_dirname + "/model", config->SetModel(model_dirname + "/model",
...@@ -122,7 +125,7 @@ void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) { ...@@ -122,7 +125,7 @@ void CreateConfig(AnalysisConfig* config, const std::string& model_dirname) {
void RunAnalysis(int batch_size, std::string model_dirname) { void RunAnalysis(int batch_size, std::string model_dirname) {
// 1. 创建AnalysisConfig // 1. 创建AnalysisConfig
AnalysisConfig config; AnalysisConfig config;
CreateConfig(&config); CreateConfig(&config, model_dirname);
// 2. 根据config 创建predictor // 2. 根据config 创建predictor
auto predictor = CreatePaddlePredictor(config); auto predictor = CreatePaddlePredictor(config);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册