diff --git a/_all_pages/develop/cpp_demo.md b/_all_pages/develop/cpp_demo.md index 36de4ffc0d43d9173bd867a2a0aa78821a25791f..02dcd61bd5e93d84408129078aab3788f8aa662d 100644 --- a/_all_pages/develop/cpp_demo.md +++ b/_all_pages/develop/cpp_demo.md @@ -24,13 +24,21 @@ title: C++ Demo 编译完成后 `./build.lite.android.armv8.gcc/inference_lite_lib.android.armv8/` 文件夹下包含: -{% highlight shell %} -cxx/include/ -cxx/lib/libpaddle_api_full_bundled.a -cxx/lib/libpaddle_api_light_bundled.a -demo/cxx/ #其中包括{include Makefile.def mobile_light} -third_party/gflags/ -{% endhighlight %} +- cxx + - include (头文件文件夹) + - lib (库文件文件夹) + - libpaddle_api_full_bundled.a + - libpaddle_api_light_bundled.a + - libpaddle_light_api_shared.so + - libpaddle_full_api_shared.so +- demo + - cxx (C++ demo) + - mobile_light (light api demo) + - mobile_full (full api demo) + - Makefile.def + - include +- third_party (第三方库文件夹) + - gflags ## 准备执行环境 @@ -130,7 +138,7 @@ MobileConfig config; // 2. Load model config.set_model_dir("path to your model directory"); // model dir -//load model: Lite supports loading model from file or from memory (naive buffer from optimized model) +/*load model: Lite supports loading model from file or from memory (naive buffer from optimized model) //Method One: Load model from memory: void set_model_buffer(const char* model_buffer, size_t model_buffer_size, diff --git a/_all_pages/v2.0.0-beta1/cpp_demo.md b/_all_pages/v2.0.0-beta1/cpp_demo.md index f6b477eb8f1577b38fbc990ebbcda1bb7992518a..53ef571ff078f8ca21ed005f098d39ded38ac1bd 100644 --- a/_all_pages/v2.0.0-beta1/cpp_demo.md +++ b/_all_pages/v2.0.0-beta1/cpp_demo.md @@ -36,13 +36,22 @@ title: C++ Demo 编译完成后 `./build.lite.android.armv8.gcc/inference_lite_lib.android.armv8/` 文件夹下包含: -{% highlight shell %} -cxx/include/ -cxx/lib/libpaddle_api_full_bundled.a -cxx/lib/libpaddle_api_light_bundled.a -demo/cxx/ #其中包括{include Makefile.def mobile_light} -third_party/gflags/ -{% endhighlight %} +- cxx + - include (头文件文件夹) + - lib (库文件文件夹) + - libpaddle_api_full_bundled.a + - libpaddle_api_light_bundled.a + - libpaddle_light_api_shared.so + - libpaddle_full_api_shared.so +- demo + - cxx (C++ demo) + - mobile_light (light api demo) + - mobile_full (full api demo) + - Makefile.def + - include +- third_party (第三方库文件夹) + - gflags + ## 准备执行环境 @@ -142,7 +151,7 @@ MobileConfig config; // 2. Load model config.set_model_dir("path to your model directory"); // model dir -//load model: Lite supports loading model from file or from memory (naive buffer from optimized model) +/*load model: Lite supports loading model from file or from memory (naive buffer from optimized model) //Method One: Load model from memory: void set_model_buffer(const char* model_buffer, size_t model_buffer_size,