提交 c90c162e 编写于 作者: H Huihuang Zheng

Fix Android demo which misses some folders.

上级 90b458d2
...@@ -252,7 +252,6 @@ add_custom_target(publish_inference_cxx_lib ${TARGET} ...@@ -252,7 +252,6 @@ add_custom_target(publish_inference_cxx_lib ${TARGET}
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include"
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party"
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx"
COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/models"
COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include" COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include"
COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin"
...@@ -261,12 +260,10 @@ add_custom_target(publish_inference_cxx_lib ${TARGET} ...@@ -261,12 +260,10 @@ add_custom_target(publish_inference_cxx_lib ${TARGET}
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party" COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party"
COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx"
COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v1" "${INFER_LITE_PUBLISH_ROOT}/demo/models"
) )
add_dependencies(publish_inference_cxx_lib model_optimize_tool) add_dependencies(publish_inference_cxx_lib model_optimize_tool)
add_dependencies(publish_inference_cxx_lib paddle_code_generator) add_dependencies(publish_inference_cxx_lib paddle_code_generator)
add_dependencies(publish_inference_cxx_lib bundle_full_api) add_dependencies(publish_inference_cxx_lib bundle_full_api)
add_dependencies(publish_inference_cxx_lib extern_lite_download_mobilenet_v1_tar_gz)
add_dependencies(publish_inference_lite publish_inference_cxx_lib) add_dependencies(publish_inference_lite publish_inference_cxx_lib)
...@@ -290,26 +287,11 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) ...@@ -290,26 +287,11 @@ if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK)
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so" COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so"
COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java"
COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/java/android" "${INFER_LITE_PUBLISH_ROOT}/demo/java" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/java/android" "${INFER_LITE_PUBLISH_ROOT}/demo/java"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/inception_v4_simple" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/lite_naive_model" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v1" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v2_relu"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets"
COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/resnet50"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/assets"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a"
COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so"
"${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86"
) )
add_dependencies(publish_java_inference_mobile_lib paddle_lite_jni) add_dependencies(publish_java_inference_mobile_lib paddle_lite_jni)
add_dependencies(publish_inference_lite publish_java_inference_mobile_lib) add_dependencies(publish_inference_lite publish_java_inference_mobile_lib)
......
...@@ -5,12 +5,7 @@ ...@@ -5,12 +5,7 @@
1. 一台能运行安卓程序的安卓手机 1. 一台能运行安卓程序的安卓手机
2. 一台带有AndroidStudio的开发机 2. 一台带有AndroidStudio的开发机
## 如果你使用我们的 cmake 生成 demo 程序, ## 手动编译
可以直接把 `${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor` 载入你的AndroidStudio,
运行,查看本文末尾的程序运行显示结果。
## 手动编译(给测试人员需要更新 demo 模型或 .so 库可阅读)
### 编译: ### 编译:
首先在PaddleLite的开发Docker镜像中,拉取最新PaddleLite代码,编译对应你手机架构的预测库, 首先在PaddleLite的开发Docker镜像中,拉取最新PaddleLite代码,编译对应你手机架构的预测库,
...@@ -32,7 +27,7 @@ cmake .. \ ...@@ -32,7 +27,7 @@ cmake .. \
-DWITH_TESTING=ON \ -DWITH_TESTING=ON \
-DARM_TARGET_OS=android -DARM_TARGET_ARCH_ABI=armv8 -DARM_TARGET_LANG=gcc -DARM_TARGET_OS=android -DARM_TARGET_ARCH_ABI=armv8 -DARM_TARGET_LANG=gcc
make -j 4 make publish_inference_lite -j 4
``` ```
Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so` Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so`
...@@ -43,15 +38,16 @@ Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/a ...@@ -43,15 +38,16 @@ Make完成后查看要存在`build.lite.android.arm8.gcc/paddle/fluid/lite/api/a
拷贝进 `PaddlePredictor/app/src/main/jinLibs/所有架构文件夹下` 比如文件夹arm8里要包含该 .so文件: 拷贝进 `PaddlePredictor/app/src/main/jinLibs/所有架构文件夹下` 比如文件夹arm8里要包含该 .so文件:
### 把demo使用到的模型文件拷贝进安卓程序: ### 把demo使用到的模型文件拷贝进安卓程序:
`build.lite.android.arm8.gcc/third_party/install` 文件夹下,把以下我们的模型文件夹拷贝进 下载我们的5个模型文件,并复制到 `PaddlePredictor/app/src/main/assets` 这个文件夹中
`PaddlePredictor/app/src/main/assets` 这个文件夹 需要拷贝的模型文件和下载地址:
需要拷贝的模型文件:
inception_v4_simple http://paddle-inference-dist.bj.bcebos.com/inception_v4_simple.tar.gz
inception_v4_simple lite_naive_model http://paddle-inference-dist.bj.bcebos.com/lite_naive_model.tar.gz
lite_naive_model mobilenet_v1 http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz
mobilenet_v1 mobilenet_v2_relu http://paddle-inference-dist.bj.bcebos.com/mobilenet_v2_relu.tar.gz
mobilenet_v2_relu resnet50 http://paddle-inference-dist.bj.bcebos.com/resnet50.tar.gz
resnet50
下载完后,assets文件夹里要包含上面五个模型文件夹。
## 运行 Android 程序结果 ## 运行 Android 程序结果
以上准备工作完成,就可以开始Build ,安装,和跑安卓demo程序。当你运行PaddlePredictor 程序时,大概会等10秒, 以上准备工作完成,就可以开始Build ,安装,和跑安卓demo程序。当你运行PaddlePredictor 程序时,大概会等10秒,
......
After build PaddleLite in your build folder, copy following models in this directory: After building PaddleLite in your build folder, download, unzip and copy
following models to this directory:
inception_v4_simple inception_v4_simple http://paddle-inference-dist.bj.bcebos.com/inception_v4_simple.tar.gz
lite_naive_model lite_naive_model http://paddle-inference-dist.bj.bcebos.com/lite_naive_model.tar.gz
mobilenet_v1 mobilenet_v1 http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz
mobilenet_v2_relu mobilenet_v2_relu http://paddle-inference-dist.bj.bcebos.com/mobilenet_v2_relu.tar.gz
resnet50 resnet50 http://paddle-inference-dist.bj.bcebos.com/resnet50.tar.gz
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册