diff --git a/paddle/fluid/lite/CMakeLists.txt b/paddle/fluid/lite/CMakeLists.txt index b244df075d89cd83f9f4d4cd35fa44dee217872b..d71c33bf3821e8981b709012c5ccb06a22df752f 100644 --- a/paddle/fluid/lite/CMakeLists.txt +++ b/paddle/fluid/lite/CMakeLists.txt @@ -222,13 +222,10 @@ add_subdirectory(api) add_subdirectory(gen_code) add_subdirectory(tools) - -# Deployment required -lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") - if (WITH_TESTING) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "lite_naive_model.tar.gz") if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2_relu.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "resnet50.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "inception_v4_simple.tar.gz") @@ -238,62 +235,82 @@ if (WITH_TESTING) endif() endif() -# for publish -set(INFER_LITE_PUBLISH_ROOT "${CMAKE_BINARY_DIR}/inference_lite_lib.${ARM_TARGET_OS}.${ARM_TARGET_ARCH_ABI}") -message(STATUS "publish inference lib to ${INFER_LITE_PUBLISH_ROOT}") - -# The final target for publish lite lib -add_custom_target(publish_inference_lite) - -#cc_library(inference_cxx_lib DEPS cxx_api_lite) -add_custom_target(publish_inference_cxx_lib ${TARGET} - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/bin" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" - COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include" - COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/gen_code/paddle_code_generator" "${INFER_LITE_PUBLISH_ROOT}/bin" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/glog" "${INFER_LITE_PUBLISH_ROOT}/third_party" - COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party" - COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" - COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" - ) -add_dependencies(publish_inference_cxx_lib model_optimize_tool) -add_dependencies(publish_inference_cxx_lib paddle_code_generator) -add_dependencies(publish_inference_cxx_lib bundle_full_api) -add_dependencies(publish_inference_lite publish_inference_cxx_lib) - - -if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) - #cc_library(inference_mobile_lib DEPS light_api_lite) - # copy cpp mobile_light demo/lib - add_custom_target(publish_inference_mobile_lib ${TARGET} +if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND LITE_WITH_ARM) + # for publish + set(INFER_LITE_PUBLISH_ROOT "${CMAKE_BINARY_DIR}/inference_lite_lib.${ARM_TARGET_OS}.${ARM_TARGET_ARCH_ABI}") + if (LITE_WITH_OPENCL) + set(INFER_LITE_PUBLISH_ROOT "${INFER_LITE_PUBLISH_ROOT}.opencl") + endif(LITE_WITH_OPENCL) + message(STATUS "publish inference lib to ${INFER_LITE_PUBLISH_ROOT}") + + # The final target for publish lite lib + add_custom_target(publish_inference_lite) + + # add cxx lib + add_custom_target(publish_inference_cxx_lib ${TARGET} COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/bin" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include" + COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include" + COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_light_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" + COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" + COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/gen_code/paddle_code_generator" "${INFER_LITE_PUBLISH_ROOT}/bin" + ) + add_dependencies(publish_inference_cxx_lib model_optimize_tool) + add_dependencies(publish_inference_cxx_lib paddle_code_generator) + add_dependencies(publish_inference_cxx_lib bundle_full_api) + add_dependencies(publish_inference_cxx_lib bundle_light_api) + add_dependencies(publish_inference_lite publish_inference_cxx_lib) + + if (LITE_WITH_JAVA) + # add java lib + add_custom_target(publish_inference_java_lib ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/java/so" + COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java" + ) + add_dependencies(publish_inference_java_lib paddle_lite_jni) + add_dependencies(publish_inference_lite publish_inference_java_lib) + endif() + + if ((ARM_TARGET_OS STREQUAL "android") AND (NOT LITE_WITH_OPENCL) AND + ((ARM_TARGET_ARCH_ABI STREQUAL armv7) OR (ARM_TARGET_ARCH_ABI STREQUAL armv8))) + # copy + add_custom_target(publish_inference_android_cxx_demos ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/glog" "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.${ARM_TARGET_OS}.${ARM_TARGET_ARCH_ABI}" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx/mobile_full/Makefile" COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_light" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.${ARM_TARGET_OS}.${ARM_TARGET_ARCH_ABI}" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx/mobile_light/Makefile" + ) + add_dependencies(publish_inference_android_cxx_demos glog gflags) + add_dependencies(publish_inference_cxx_lib publish_inference_android_cxx_demos) + + if (LITE_WITH_JAVA) + # copy java mobile_light demo/lib + add_custom_target(publish_inference_android_java_demo ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/java/android" "${INFER_LITE_PUBLISH_ROOT}/demo/java" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86" ) - add_dependencies(publish_inference_mobile_lib paddle_api_light bundle_light_api) - add_dependencies(publish_inference_lite publish_inference_mobile_lib) - - if (LITE_WITH_JAVA AND LITE_WITH_ARM) - # copy java mobile_light demo/lib - add_custom_target(publish_java_inference_mobile_lib ${TARGET} - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/java/so" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java" - COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so" - COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java" - COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/java/android" "${INFER_LITE_PUBLISH_ROOT}/demo/java" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm7" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm8" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/arm64-v8a" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/armeabi-v7a" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/java/android/PaddlePredictor/app/src/main/jniLibs/x86" - ) - add_dependencies(publish_java_inference_mobile_lib paddle_lite_jni) - add_dependencies(publish_inference_lite publish_java_inference_mobile_lib) + add_dependencies(publish_inference_java_lib publish_inference_android_java_demo) + endif() + endif() + + if (LITE_WITH_OPENCL) + add_custom_target(publish_inference_opencl ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/opencl" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/opencl/cl_kernel" "${INFER_LITE_PUBLISH_ROOT}/opencl" + ) + add_dependencies(publish_inference_cxx_lib publish_inference_opencl) endif() endif() diff --git a/paddle/fluid/lite/demo/cxx/README.md b/paddle/fluid/lite/demo/cxx/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bc5dd806f921931cadc1d08cfc104a1f61879d3d --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/README.md @@ -0,0 +1,43 @@ +# C++ Android Demo +1. 使用`paddle/fluid/lite/tools/Dockerfile.mobile`生成docker镜像 +2. 运行并进入docker镜像环境,执行`wget http://http://paddle-inference-dist.bj.bcebos.com/inference_lite_lib.android.armv8.tar.gz `下载所需demo环境。(armv7 demo可使用命令`wget http://http://paddle-inference-dist.bj.bcebos.com/inference_lite_lib.android.armv7.tar.gz` 进行下载)。 +3. 解压下载文件`tar zxvf inference_lite_lib.android.armv8.tar.gz ` +4. 执行以下命令准备模拟器环境 +```shell +# armv8 +adb kill-server +adb devices | grep emulator | cut -f1 | while read line; do adb -s $line emu kill; done +echo n | avdmanager create avd -f -n paddle-armv8 -k "system-images;android-24;google_apis;arm64-v8a" +echo -ne '\n' | ${ANDROID_HOME}/emulator/emulator -avd paddle-armv8 -noaudio -no-window -gpu off -port 5554 & +sleep 1m +``` +```shell +# armv7 +adb kill-server +adb devices | grep emulator | cut -f1 | while read line; do adb -s $line emu kill; done +echo n | avdmanager create avd -f -n paddle-armv7 -k "system-images;android-24;google_apis;armeabi-v7a" +echo -ne '\n' | ${ANDROID_HOME}/emulator/emulator -avd paddle-armv7 -noaudio -no-window -gpu off -port 5554 & +sleep 1m +``` +5. 准备模型、编译并运行完整api的demo +```shell +cd inference_lite_lib.android.armv8/demo/cxx/mobile_full +wget http://http://paddle-inference-dist.bj.bcebos.com/mobilenet_v1.tar.gz +tar zxvf mobilenet_v1.tar.gz +make +adb -s emulator-5554 push mobilenet_v1 /data/local/tmp/ +adb -s emulator-5554 push mobilenetv1_full_api /data/local/tmp/ +adb -s emulator-5554 shell chmod +x /data/local/tmp/mobilenetv1_full_api +adb -s emulator-5554 shell "/data/local/tmp/mobilenetv1_full_api --model_dir=/data/local/tmp/mobilenet_v1 --optimized_model_dir=/data/local/tmp/mobilenet_v1.opt" +``` +运行成功将在控制台输出预测结果的前10个类别的预测概率 + +6. 编译并运行轻量级api的demo +```shell +cd ../mobile_light +make +adb -s emulator-5554 push mobilenetv1_light_api /data/local/tmp/ +adb -s emulator-5554 shell chmod +x /data/local/tmp/mobilenetv1_light_api +adb -s emulator-5554 shell "/data/local/tmp/mobilenetv1_light_api --model_dir=/data/local/tmp/mobilenet_v1.opt +``` +运行成功将在控制台输出预测结果的前10个类别的预 diff --git a/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7 b/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7 new file mode 100644 index 0000000000000000000000000000000000000000..6c9b7413f49956d09c1fe285f5e040e599f17073 --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv7 @@ -0,0 +1,22 @@ +ARM_ABI = arm7 +export ARM_ABI + +include ../Makefile.def + +LITE_ROOT=../../../ + +CXX_INCLUDES = $(INCLUDES) -I$(LITE_ROOT)/cxx/include + +CXX_LIBS = $(THIRD_PARTY_LIBS) $(LITE_ROOT)/cxx/lib/libpaddle_api_full_bundled.a $(SYSTEM_LIBS) + +mobilenetv1_full_api: mobilenetv1_full_api.o + $(CC) $(SYSROOT_LINK) $(CXXFLAGS_LINK) mobilenetv1_full_api.o -o mobilenetv1_full_api $(CXX_LIBS) $(LDFLAGS) + +mobilenetv1_full_api.o: mobilenetv1_full_api.cc + $(CC) $(SYSROOT_COMPLILE) $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o mobilenetv1_full_api.o -c mobilenetv1_full_api.cc + + +.PHONY: clean +clean: + rm mobilenetv1_full_api.o + rm mobilenetv1_full_api diff --git a/paddle/fluid/lite/demo/cxx/mobile_full/Makefile b/paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv8 similarity index 100% rename from paddle/fluid/lite/demo/cxx/mobile_full/Makefile rename to paddle/fluid/lite/demo/cxx/makefiles/mobile_full/Makefile.android.armv8 diff --git a/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv7 b/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv7 new file mode 100644 index 0000000000000000000000000000000000000000..66a6d8f31dc7d35aa92e3c385fe4e68c2b926afc --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv7 @@ -0,0 +1,22 @@ +ARM_ABI = arm7 +export ARM_ABI + +include ../Makefile.def + +LITE_ROOT=../../../ + +CXX_INCLUDES = $(INCLUDES) -I$(LITE_ROOT)/cxx/include + +CXX_LIBS = $(THIRD_PARTY_LIBS) $(LITE_ROOT)/cxx/lib/libpaddle_api_light_bundled.a $(SYSTEM_LIBS) + +mobilenetv1_light_api: mobilenetv1_light_api.o + $(CC) $(SYSROOT_LINK) $(CXXFLAGS_LINK) mobilenetv1_light_api.o -o mobilenetv1_light_api $(CXX_LIBS) $(LDFLAGS) + +mobilenetv1_light_api.o: mobilenetv1_light_api.cc + $(CC) $(SYSROOT_COMPLILE) $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o mobilenetv1_light_api.o -c mobilenetv1_light_api.cc + + +.PHONY: clean +clean: + rm mobilenetv1_light_api.o + rm mobilenetv1_light_api diff --git a/paddle/fluid/lite/demo/cxx/mobile_light/Makefile b/paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv8 similarity index 100% rename from paddle/fluid/lite/demo/cxx/mobile_light/Makefile rename to paddle/fluid/lite/demo/cxx/makefiles/mobile_light/Makefile.android.armv8 diff --git a/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc b/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc index 9ce758a6e0d81c2ca0bfe30cc916be2c6fa7e2aa..e1c3c1a15e2c52c415c1815dcadbb61097d92933 100644 --- a/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc +++ b/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc @@ -36,9 +36,8 @@ void RunModel() { // 1. Set CxxConfig CxxConfig config; config.set_model_dir(FLAGS_model_dir); - config.set_preferred_place(Place{TARGET(kX86), PRECISION(kFloat)}); - config.set_valid_places({Place{TARGET(kX86), PRECISION(kFloat)}, - Place{TARGET(kARM), PRECISION(kFloat)}}); + config.set_preferred_place(Place{TARGET(kARM), PRECISION(kFloat)}); + config.set_valid_places({Place{TARGET(kARM), PRECISION(kFloat)}}); // 2. Create PaddlePredictor by CxxConfig std::shared_ptr predictor = diff --git a/paddle/fluid/lite/tools/build.sh b/paddle/fluid/lite/tools/build.sh index c46a8cf698e7030e69eb65029853f880ebc7a186..912ff3fb5d0b9db44672c893b7cf8c9a34563cbe 100755 --- a/paddle/fluid/lite/tools/build.sh +++ b/paddle/fluid/lite/tools/build.sh @@ -130,7 +130,7 @@ function build { make lite_compile_deps -j$NUM_CORES_FOR_COMPILE # test publish inference lib - make publish_inference_lite + # make publish_inference_lite } # It will eagerly test all lite related unittests.