From c90c162e51d8c718eca7c51827034d9ecf1c23aa Mon Sep 17 00:00:00 2001 From: Huihuang Zheng Date: Mon, 1 Jul 2019 06:36:02 +0000 Subject: [PATCH] Fix Android demo which misses some folders. --- paddle/fluid/lite/CMakeLists.txt | 28 ++++--------------- paddle/fluid/lite/demo/java/README.md | 28 ++++++++----------- .../app/src/main/assets/README.txt | 13 +++++---- 3 files changed, 24 insertions(+), 45 deletions(-) diff --git a/paddle/fluid/lite/CMakeLists.txt b/paddle/fluid/lite/CMakeLists.txt index 470e52e53a..b244df075d 100644 --- a/paddle/fluid/lite/CMakeLists.txt +++ b/paddle/fluid/lite/CMakeLists.txt @@ -252,7 +252,6 @@ add_custom_target(publish_inference_cxx_lib ${TARGET} COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/models" COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" @@ -261,12 +260,10 @@ add_custom_target(publish_inference_cxx_lib ${TARGET} COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v1" "${INFER_LITE_PUBLISH_ROOT}/demo/models" ) add_dependencies(publish_inference_cxx_lib model_optimize_tool) add_dependencies(publish_inference_cxx_lib paddle_code_generator) add_dependencies(publish_inference_cxx_lib bundle_full_api) -add_dependencies(publish_inference_cxx_lib extern_lite_download_mobilenet_v1_tar_gz) add_dependencies(publish_inference_lite publish_inference_cxx_lib) @@ -290,26 +287,11 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/java/android" "${INFER_LITE_PUBLISH_ROOT}/demo/java" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/inception_v4_simple" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/lite_naive_model" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v1" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v2_relu" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/resnet50" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" - "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86" ) add_dependencies(publish_java_inference_mobile_lib paddle_lite_jni) add_dependencies(publish_inference_lite publish_java_inference_mobile_lib) diff --git a/paddle/fluid/lite/demo/java/README.md b/paddle/fluid/lite/demo/java/README.md index f6e748a3c4..317d26ab0b 100644 --- a/paddle/fluid/lite/demo/java/README.md +++ b/paddle/fluid/lite/demo/java/README.md @@ -5,12 +5,7 @@ 1. 一台能运行安卓程序的安卓手机 2. 一台带有AndroidStudio的开发机 -## 如果你使用我们的 cmake 生成 demo 程序, - -可以直接把 `${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor` 载入你的AndroidStudio, -运行,查看本文末尾的程序运行显示结果。 - -## 手动编译(给测试人员需要更新 demo 模型或 .so 库可阅读) +## 手动编译 ### 编译: 首先在PaddleLite的开发Docker镜像中,拉取最新PaddleLite代码,编译对应你手机架构的预测库, @@ -32,7 +27,7 @@ cmake .. \ -DWITH_TESTING=ON \ -DARM_TARGET_OS=android -DARM_TARGET_ARCH_ABI=armv8 -DARM_TARGET_LANG=gcc -make -j 4 +make publish_inference_lite -j 4 ``` Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so` @@ -43,15 +38,16 @@ Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/a 拷贝进 `PaddlePredictor/app/src/main/jinLibs/所有架构文件夹下` 比如文件夹arm8里要包含该 .so文件: ### 把demo使用到的模型文件拷贝进安卓程序: -在 `build.lite.android.arm8.gcc/third_party/install` 文件夹下,把以下我们的模型文件夹拷贝进 -`PaddlePredictor/app/src/main/assets` 这个文件夹 -需要拷贝的模型文件: - - inception_v4_simple - lite_naive_model - mobilenet_v1 - mobilenet_v2_relu - resnet50 +下载我们的5个模型文件,并复制到 `PaddlePredictor/app/src/main/assets` 这个文件夹中 +需要拷贝的模型文件和下载地址: + + inception_v4_simple http://paddle-inference-dist.bj.bcebos.com/inception_v4_simple.tar.gz + lite_naive_model http://paddle-inference-dist.bj.bcebos.com/lite_naive_model.tar.gz + mobilenet_v1 http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz + mobilenet_v2_relu http://paddle-inference-dist.bj.bcebos.com/mobilenet_v2_relu.tar.gz + resnet50 http://paddle-inference-dist.bj.bcebos.com/resnet50.tar.gz + +下载完后,assets文件夹里要包含上面五个模型文件夹。 ## 运行 Android 程序结果 以上准备工作完成,就可以开始Build ,安装,和跑安卓demo程序。当你运行PaddlePredictor 程序时,大概会等10秒, diff --git a/paddle/fluid/lite/demo/java/android/PaddlePredictor/app/src/main/assets/README.txt b/paddle/fluid/lite/demo/java/android/PaddlePredictor/app/src/main/assets/README.txt index 2022a37d2a..c46719ac18 100644 --- a/paddle/fluid/lite/demo/java/android/PaddlePredictor/app/src/main/assets/README.txt +++ b/paddle/fluid/lite/demo/java/android/PaddlePredictor/app/src/main/assets/README.txt @@ -1,7 +1,8 @@ -After build PaddleLite in your build folder, copy following models in this directory: +After building PaddleLite in your build folder, download, unzip and copy +following models to this directory: -inception_v4_simple -lite_naive_model -mobilenet_v1 -mobilenet_v2_relu -resnet50 + inception_v4_simple http://paddle-inference-dist.bj.bcebos.com/inception_v4_simple.tar.gz + lite_naive_model http://paddle-inference-dist.bj.bcebos.com/lite_naive_model.tar.gz + mobilenet_v1 http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz + mobilenet_v2_relu http://paddle-inference-dist.bj.bcebos.com/mobilenet_v2_relu.tar.gz + resnet50 http://paddle-inference-dist.bj.bcebos.com/resnet50.tar.gz -- GitLab