From ec0ea5ca8f9915c6fae36f59a09d4a6a4004f269 Mon Sep 17 00:00:00 2001 From: sangoly Date: Fri, 28 Jun 2019 10:57:05 +0000 Subject: [PATCH] complete deployment & add cxx demo --- paddle/fluid/lite/CMakeLists.txt | 33 ++++++-- paddle/fluid/lite/api/CMakeLists.txt | 2 +- paddle/fluid/lite/demo/cxx/Makefile.def | 37 +++++++++ .../fluid/lite/demo/cxx/mobile_full/Makefile | 22 ++++++ .../cxx/mobile_full/mobilenetv1_full_api.cc | 75 +++++++++++++++++++ .../fluid/lite/demo/cxx/mobile_light/Makefile | 22 ++++++ .../cxx/mobile_light/mobilenetv1_light_api.cc | 68 +++++++++++++++++ 7 files changed, 251 insertions(+), 8 deletions(-) create mode 100644 paddle/fluid/lite/demo/cxx/Makefile.def create mode 100644 paddle/fluid/lite/demo/cxx/mobile_full/Makefile create mode 100644 paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc create mode 100644 paddle/fluid/lite/demo/cxx/mobile_light/Makefile create mode 100644 paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc diff --git a/paddle/fluid/lite/CMakeLists.txt b/paddle/fluid/lite/CMakeLists.txt index a484856991..dbdbca1e04 100644 --- a/paddle/fluid/lite/CMakeLists.txt +++ b/paddle/fluid/lite/CMakeLists.txt @@ -221,10 +221,12 @@ add_subdirectory(gen_code) add_subdirectory(tools) +# Deployment required +lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") + if (WITH_TESTING) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "lite_naive_model.tar.gz") if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) - lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2_relu.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "resnet50.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "inception_v4_simple.tar.gz") @@ -246,27 +248,44 @@ add_custom_target(publish_inference_cxx_lib ${TARGET} COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/cxx/include" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/demo/models" COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/cxx/include" COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_full_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/model_optimize_tool" "${INFER_LITE_PUBLISH_ROOT}/bin" COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/gen_code/paddle_code_generator" "${INFER_LITE_PUBLISH_ROOT}/bin" + COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/glog" "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/gflags" "${INFER_LITE_PUBLISH_ROOT}/third_party" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_full" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/Makefile.def" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" + COMMAND cp -r "${CMAKE_BINARY_DIR}/third_party/install/mobilenet_v1" "${INFER_LITE_PUBLISH_ROOT}/demo/models" ) add_dependencies(publish_inference_cxx_lib model_optimize_tool) add_dependencies(publish_inference_cxx_lib paddle_code_generator) add_dependencies(publish_inference_cxx_lib bundle_full_api) +add_dependencies(publish_inference_cxx_lib extern_lite_download_mobilenet_v1_tar_gz) add_dependencies(publish_inference_lite publish_inference_cxx_lib) if (LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) #cc_library(inference_mobile_lib DEPS light_api_lite) - + # copy cpp mobile_light demo/lib add_custom_target(publish_inference_mobile_lib ${TARGET} - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/mobile/lib" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/mobile/bin" - COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/mobile/include" - COMMAND cp "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/paddle_*.h" "${INFER_LITE_PUBLISH_ROOT}/mobile/include" - COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_light_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/mobile/lib" + COMMAND cp "${CMAKE_BINARY_DIR}/libpaddle_api_light_bundled.a" "${INFER_LITE_PUBLISH_ROOT}/cxx/lib" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/demo/cxx/mobile_light" "${INFER_LITE_PUBLISH_ROOT}/demo/cxx" ) add_dependencies(publish_inference_mobile_lib paddle_api_light bundle_light_api) add_dependencies(publish_inference_lite publish_inference_mobile_lib) + + if (LITE_WITH_JAVA AND LITE_WITH_ARM) + # copy java mobile_light demo/lib + add_custom_target(publish_java_inference_mobile_lib ${TARGET} + COMMAND mkdir -p "${INFER_LITE_PUBLISH_ROOT}/java/so" + COMMAND cp "${CMAKE_BINARY_DIR}/paddle/fluid/lite/api/android/jni/libpaddle_lite_jni.so" "${INFER_LITE_PUBLISH_ROOT}/java/so" + COMMAND cp -r "${CMAKE_SOURCE_DIR}/paddle/fluid/lite/api/android/jni/src" "${INFER_LITE_PUBLISH_ROOT}/java" + ) + add_dependencies(publish_java_inference_mobile_lib paddle_lite_jni) + add_dependencies(publish_inference_lite publish_java_inference_mobile_lib) + endif() endif() diff --git a/paddle/fluid/lite/api/CMakeLists.txt b/paddle/fluid/lite/api/CMakeLists.txt index bfb238d4d7..3d106a1d86 100644 --- a/paddle/fluid/lite/api/CMakeLists.txt +++ b/paddle/fluid/lite/api/CMakeLists.txt @@ -113,7 +113,7 @@ lite_cc_library(paddle_api_full SRCS cxx_api_impl.cc DEPS cxx_api_lite paddle_ap ARM_DEPS ${arm_kernels} CL_DEPS ${opencl_kernels}) # The final inference library for just MobileConfig. -lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api_lite paddle_api_lite) +lite_cc_library(paddle_api_light SRCS light_api_impl.cc DEPS light_api_lite paddle_api_lite mir_passes) bundle_static_library(paddle_api_full paddle_api_full_bundled bundle_full_api) bundle_static_library(paddle_api_light paddle_api_light_bundled bundle_light_api) diff --git a/paddle/fluid/lite/demo/cxx/Makefile.def b/paddle/fluid/lite/demo/cxx/Makefile.def new file mode 100644 index 0000000000..a5a0b4e221 --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/Makefile.def @@ -0,0 +1,37 @@ +CXX_DEFINES = -DARM_WITH_OMP -DHPPL_STUB_FUNC -DLITE_WITH_ARM -DLITE_WITH_LIGHT_WEIGHT_FRAMEWORK \ + -DLITE_WITH_LINUX -DPADDLE_DISABLE_PROFILER -DPADDLE_NO_PYTHON -DPADDLE_WITH_TESTING +LDFLAGS = -latomic -pthread -ldl + +SYSROOT_COMPLILE = --sysroot=/opt/android-ndk-r17c/sysroot + +THIRD_PARTY_LIBS = ../../../third_party/glog/lib/libglog.a \ + ../../../third_party/gflags/lib/libgflags.a + +SYSTEM_INCLUDES = -I/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/include \ + -I/opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++abi/include \ + -I/opt/android-ndk-r17c/sources/android/support/include \ + -I/opt/android-ndk-r17c/sysroot/usr/include \ + +THIRD_PARTY_INCLUDES = -I../../../third_party/gflags/include \ + -I../../../third_party/glog/include + +ifeq ($(ARM_ABI), arm8) + CC = /opt/android-ndk-r17c/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-g++ + CXX_FLAGS = -funwind-tables -no-canonical-prefixes -D__ANDROID_API__=22 -fexceptions -frtti -std=c++11 -fopenmp -O3 -DNDEBUG -fPIE + CXXFLAGS_LINK = $(CXX_FLAGS) -pie -Wl,--gc-sections + SYSROOT_LINK = --sysroot=/opt/android-ndk-r17c/platforms/android-24/arch-arm64 + SYSTEM_LIBS = /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++_static.a \ + /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/arm64-v8a/libc++abi.a + INCLUDES = $(SYSTEM_INCLUDES) -I/opt/android-ndk-r17c/sysroot/usr/include/aarch64-linux-android $(THIRD_PARTY_INCLUDES) +else + CC = /opt/android-ndk-r17c/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-g++ + CXX_FLAGS = -march=armv7-a -mthumb -mfpu=neon -mfloat-abi=softfp -funwind-tables -no-canonical-prefixes \ + -D__ANDROID_API__=22 -fexceptions -frtti -std=c++11 -fopenmp -O3 -DNDEBUG -fPIE + CXXFLAGS_LINK = $(CXX_FLAGS) -pie -Wl,--fix-cortex-a8 -Wl,--gc-sections -Wl,-z,nocopyreloc + SYSROOT_LINK = --sysroot=/opt/android-ndk-r17c/platforms/android-22/arch-arm + SYSTEM_LIBS = /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libc++_static.a \ + /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libc++abi.a \ + /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libandroid_support.a \ + /opt/android-ndk-r17c/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a/libunwind.a + INCLUDES = $(SYSTEM_INCLUDES) -I/opt/android-ndk-r17c/sysroot/usr/include/arm-linux-androideabi $(THIRD_PARTY_INCLUDES) +endif diff --git a/paddle/fluid/lite/demo/cxx/mobile_full/Makefile b/paddle/fluid/lite/demo/cxx/mobile_full/Makefile new file mode 100644 index 0000000000..7735f74d10 --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/mobile_full/Makefile @@ -0,0 +1,22 @@ +ARM_ABI = arm8 +export ARM_ABI + +include ../Makefile.def + +LITE_ROOT=../../../ + +CXX_INCLUDES = $(INCLUDES) -I$(LITE_ROOT)/cxx/include + +CXX_LIBS = $(THIRD_PARTY_LIBS) $(LITE_ROOT)/cxx/lib/libpaddle_api_full_bundled.a $(SYSTEM_LIBS) + +mobilenetv1_full_api: mobilenetv1_full_api.o + $(CC) $(SYSROOT_LINK) $(CXXFLAGS_LINK) mobilenetv1_full_api.o -o mobilenetv1_full_api $(CXX_LIBS) $(LDFLAGS) + +mobilenetv1_full_api.o: mobilenetv1_full_api.cc + $(CC) $(SYSROOT_COMPLILE) $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o mobilenetv1_full_api.o -c mobilenetv1_full_api.cc + + +.PHONY: clean +clean: + rm mobilenetv1_full_api.o + rm mobilenetv1_full_api diff --git a/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc b/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc new file mode 100644 index 0000000000..9ce758a6e0 --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/mobile_full/mobilenetv1_full_api.cc @@ -0,0 +1,75 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include "paddle_api.h" // NOLINT +#include "paddle_use_kernels.h" // NOLINT +#include "paddle_use_ops.h" // NOLINT +#include "paddle_use_passes.h" // NOLINT + +using namespace paddle::lite_api; // NOLINT + +DEFINE_string(model_dir, "", "Model dir path."); +DEFINE_string(optimized_model_dir, "", "Optimized model dir."); + +int64_t ShapeProduction(const shape_t& shape) { + int64_t res = 1; + for (auto i : shape) res *= i; + return res; +} + +void RunModel() { + // 1. Set CxxConfig + CxxConfig config; + config.set_model_dir(FLAGS_model_dir); + config.set_preferred_place(Place{TARGET(kX86), PRECISION(kFloat)}); + config.set_valid_places({Place{TARGET(kX86), PRECISION(kFloat)}, + Place{TARGET(kARM), PRECISION(kFloat)}}); + + // 2. Create PaddlePredictor by CxxConfig + std::shared_ptr predictor = + CreatePaddlePredictor(config); + + // 3. Prepare input data + std::unique_ptr input_tensor(std::move(predictor->GetInput(0))); + input_tensor->Resize(shape_t({1, 3, 224, 224})); + auto* data = input_tensor->mutable_data(); + for (int i = 0; i < ShapeProduction(input_tensor->shape()); ++i) { + data[i] = 1; + } + + // 4. Run predictor + predictor->Run(); + + // 5. Get output + std::unique_ptr output_tensor( + std::move(predictor->GetOutput(0))); + LOG(INFO) << "Ouput dim: " << output_tensor->shape()[1] << std::endl; + for (int i = 0; i < ShapeProduction(output_tensor->shape()); i += 100) { + LOG(INFO) << "Output[" << i << "]: " << output_tensor->data()[i] + << std::endl; + } + + // 6. Save optimition model + predictor->SaveOptimizedModel(FLAGS_optimized_model_dir); +} + +int main(int argc, char** argv) { + google::ParseCommandLineFlags(&argc, &argv, true); + RunModel(); + return 0; +} diff --git a/paddle/fluid/lite/demo/cxx/mobile_light/Makefile b/paddle/fluid/lite/demo/cxx/mobile_light/Makefile new file mode 100644 index 0000000000..91b281c49c --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/mobile_light/Makefile @@ -0,0 +1,22 @@ +ARM_ABI = arm8 +export ARM_ABI + +include ../Makefile.def + +LITE_ROOT=../../../ + +CXX_INCLUDES = $(INCLUDES) -I$(LITE_ROOT)/cxx/include + +CXX_LIBS = $(THIRD_PARTY_LIBS) $(LITE_ROOT)/cxx/lib/libpaddle_api_light_bundled.a $(SYSTEM_LIBS) + +mobilenetv1_light_api: mobilenetv1_light_api.o + $(CC) $(SYSROOT_LINK) $(CXXFLAGS_LINK) mobilenetv1_light_api.o -o mobilenetv1_light_api $(CXX_LIBS) $(LDFLAGS) + +mobilenetv1_light_api.o: mobilenetv1_light_api.cc + $(CC) $(SYSROOT_COMPLILE) $(CXX_DEFINES) $(CXX_INCLUDES) $(CXX_FLAGS) -o mobilenetv1_light_api.o -c mobilenetv1_light_api.cc + + +.PHONY: clean +clean: + rm mobilenetv1_light_api.o + rm mobilenetv1_light_api diff --git a/paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc b/paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc new file mode 100644 index 0000000000..aa71b7cc2f --- /dev/null +++ b/paddle/fluid/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc @@ -0,0 +1,68 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include "paddle_api.h" // NOLINT +#include "paddle_use_kernels.h" // NOLINT +#include "paddle_use_ops.h" // NOLINT +#include "paddle_use_passes.h" // NOLINT + +using namespace paddle::lite_api; // NOLINT + +DEFINE_string(model_dir, "", "Model dir path."); + +int64_t ShapeProduction(const shape_t& shape) { + int64_t res = 1; + for (auto i : shape) res *= i; + return res; +} + +void RunModel() { + // 1. Set MobileConfig + MobileConfig config; + config.set_model_dir(FLAGS_model_dir); + + // 2. Create PaddlePredictor by MobileConfig + std::shared_ptr predictor = + CreatePaddlePredictor(config); + + // 3. Prepare input data + std::unique_ptr input_tensor(std::move(predictor->GetInput(0))); + input_tensor->Resize({1, 3, 224, 224}); + auto* data = input_tensor->mutable_data(); + for (int i = 0; i < ShapeProduction(input_tensor->shape()); ++i) { + data[i] = 1; + } + + // 4. Run predictor + predictor->Run(); + + // 5. Get output + std::unique_ptr output_tensor( + std::move(predictor->GetOutput(0))); + LOG(INFO) << "Ouput dim: " << output_tensor->shape()[1] << std::endl; + for (int i = 0; i < ShapeProduction(output_tensor->shape()); i += 100) { + LOG(INFO) << "Output[" << i << "]: " << output_tensor->data()[i] + << std::endl; + } +} + +int main(int argc, char** argv) { + google::ParseCommandLineFlags(&argc, &argv, true); + RunModel(); + return 0; +} -- GitLab