diff --git a/paddle/fluid/lite/CMakeLists.txt b/paddle/fluid/lite/CMakeLists.txt index 542a70a5fe1aa6abc303d4c24eff67c0035184b8..7b6dd0703d410ad228a11e60dda7ceea9f5a7983 100644 --- a/paddle/fluid/lite/CMakeLists.txt +++ b/paddle/fluid/lite/CMakeLists.txt @@ -11,7 +11,7 @@ message(STATUS "LITE_WITH_PROFILE:\t${LITE_WITH_PROFILE}") set(LITE_MODEL_DIR "${THIRD_PARTY_PATH}/install") -set(LITE_ON_MOBILE LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) +set(LITE_ON_MOBILE ${LITE_WITH_LIGHT_WEIGHT_FRAMEWORK}) set(LITE_URL "http://paddle-inference-dist.bj.bcebos.com" CACHE STRING "inference download url") @@ -189,4 +189,7 @@ add_subdirectory(gen_code) if (WITH_TESTING) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "lite_naive_model.tar.gz") + if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2_relu.tar.gz") + endif() endif() diff --git a/paddle/fluid/lite/api/CMakeLists.txt b/paddle/fluid/lite/api/CMakeLists.txt index 5c70a42d1738e6305a8b3f630820715300c06518..52961d0cc49187fa79e55942a1abaceed9dc2d19 100644 --- a/paddle/fluid/lite/api/CMakeLists.txt +++ b/paddle/fluid/lite/api/CMakeLists.txt @@ -33,16 +33,26 @@ include(ExternalProject) set(LITE_DEMO_INSTALL_DIR "${THIRD_PARTY_PATH}/inference_demo" CACHE STRING "A path setting inference demo download directories.") -if((NOT LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) AND WITH_TESTING) +if(WITH_TESTING) + set(eval_model_dir "") + set(test_cxx_api_deps cxx_api_lite mir_passes ${ops_lite} ${host_kernels} ${x86_kernels}) + + if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) + set(eval_model_dir ${LITE_MODEL_DIR}/mobilenet_v2_relu) + set(test_cxx_api_deps ${test_cxx_api_deps} ${arm_kernels}) + endif() lite_cc_test(test_cxx_api_lite SRCS cxx_api_test.cc - DEPS cxx_api_lite mir_passes - ${ops_lite} ${host_kernels} ${x86_kernels} + DEPS ${test_cxx_api_deps} ARGS --model_dir=${LITE_MODEL_DIR}/lite_naive_model - --optimized_model=${LITE_MODEL_DIR}/lite_naive_model_opt SERIAL) + --optimized_model=${LITE_MODEL_DIR}/lite_naive_model_opt + --eval_model_dir=eval_model_dir SERIAL) + add_dependencies(test_cxx_api_lite extern_lite_download_lite_naive_model_tar_gz) + if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) + add_dependencies(test_cxx_api_lite extern_lite_download_mobilenet_v2_relu_tar_gz) + endif() endif() - # These tests needs CLI arguments, and is not supported in ARM CI. # TODO(Superjomn) support latter. if(NOT LITE_ON_MOBILE) diff --git a/paddle/fluid/lite/api/cxx_api_test.cc b/paddle/fluid/lite/api/cxx_api_test.cc index 11ae1f9ca804402cd3d7a9bb3e0c44217f61cff5..1b337c06a981447fd8b8f87905ce5d3d10c56d8c 100644 --- a/paddle/fluid/lite/api/cxx_api_test.cc +++ b/paddle/fluid/lite/api/cxx_api_test.cc @@ -19,7 +19,6 @@ #include "paddle/fluid/lite/api/lite_api_test_helper.h" #include "paddle/fluid/lite/core/compatible_tensor.h" #include "paddle/fluid/lite/core/mir/use_passes.h" -#include "paddle/fluid/lite/core/mir/use_passes.h" #include "paddle/fluid/lite/core/op_registry.h" #include "paddle/fluid/lite/kernels/use_kernels.h" #include "paddle/fluid/lite/operators/use_ops.h" @@ -28,9 +27,13 @@ DEFINE_string(startup_program_path, "", ""); DEFINE_string(main_program_path, "", ""); +// for eval +DEFINE_string(eval_model_dir, "", ""); + namespace paddle { namespace lite { +#ifndef LITE_WITH_LIGHT_WEIGHT_FRAMEWORK TEST(CXXApi, test) { const lite::Tensor* out = RunHvyModel(); LOG(INFO) << out << " memory size " << out->data_size(); @@ -41,7 +44,6 @@ TEST(CXXApi, test) { // LOG(INFO) << "out " << *out; } -#ifndef LITE_WITH_LIGHT_WEIGHT_FRAMEWORK TEST(CXXApi, save_model) { lite::ExecutorLite predictor; std::vector valid_places({Place{TARGET(kHost), PRECISION(kFloat)}, @@ -52,9 +54,7 @@ TEST(CXXApi, save_model) { LOG(INFO) << "Save optimized model to " << FLAGS_optimized_model; predictor.SaveModel(FLAGS_optimized_model); } -#endif // LITE_WITH_LIGHT_WEIGHT_FRAMEWORK -#ifndef LITE_WITH_LIGHT_WEIGHT_FRAMEWORK /*TEST(CXXTrainer, train) { Place prefer_place({TARGET(kHost), PRECISION(kFloat), DATALAYOUT(kNCHW)}); std::vector valid_places({prefer_place}); @@ -88,5 +88,37 @@ TEST(CXXApi, save_model) { }*/ #endif // LITE_WITH_LIGHT_WEIGHT_FRAMEWORK +#ifdef LITE_WITH_ARM +TEST(CXXApi, eval) { + DeviceInfo::Init(); + lite::ExecutorLite predictor; + std::vector valid_places({Place{TARGET(kHost), PRECISION(kFloat)}, + Place{TARGET(kARM), PRECISION(kFloat)}}); + + predictor.Build(FLAGS_eval_model_dir, Place{TARGET(kARM), PRECISION(kFloat)}, + valid_places); + + auto* input_tensor = predictor.GetInput(0); + input_tensor->Resize(DDim(std::vector({1, 3, 224, 224}))); + auto* data = input_tensor->mutable_data(); + for (int i = 0; i < input_tensor->dims().production(); i++) { + data[i] = 1; + } + + predictor.Run(); + + auto* out = predictor.GetOutput(0); + std::vector results({0.00097802, 0.00099822, 0.00103093, 0.00100121, + 0.00098268, 0.00104065, 0.00099962, 0.00095181, + 0.00099694, 0.00099406}); + for (int i = 0; i < results.size(); ++i) { + EXPECT_NEAR(out->data()[i], results[i], 1e-5); + } + ASSERT_EQ(out->dims().size(), 2); + ASSERT_EQ(out->dims()[0], 1); + ASSERT_EQ(out->dims()[1], 1000); +} +#endif + } // namespace lite } // namespace paddle diff --git a/paddle/fluid/lite/kernels/use_kernels.h b/paddle/fluid/lite/kernels/use_kernels.h index f7d17374010ff6564bfe3ffd085f130bd61d2a16..2c06092e3856467c031abaf36c63bd61aef65bae 100644 --- a/paddle/fluid/lite/kernels/use_kernels.h +++ b/paddle/fluid/lite/kernels/use_kernels.h @@ -36,6 +36,19 @@ USE_LITE_KERNEL(depthwise_conv2d, kX86, kFloat, kNCHW, def); USE_LITE_KERNEL(pool2d, kX86, kFloat, kNCHW, def); #endif +#ifdef LITE_WITH_ARM +USE_LITE_KERNEL(fc, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(mul, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(scale, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(conv2d, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(batch_norm, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(relu, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(depthwise_conv2d, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(pool2d, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(elementwise_add, kARM, kFloat, kNCHW, def); +USE_LITE_KERNEL(softmax, kARM, kFloat, kNCHW, def); +#endif + #ifdef LITE_WITH_CUDA USE_LITE_KERNEL(mul, kCUDA, kFloat, kNCHW, def); USE_LITE_KERNEL(io_copy, kCUDA, kAny, kAny, host_to_device); diff --git a/paddle/fluid/lite/operators/use_ops.h b/paddle/fluid/lite/operators/use_ops.h index b747a4659b183faf73c0242eff005b1af2ec6180..8f7599042b5538a9bff248a84c5f3f3980c9500b 100644 --- a/paddle/fluid/lite/operators/use_ops.h +++ b/paddle/fluid/lite/operators/use_ops.h @@ -33,3 +33,4 @@ USE_LITE_OP(concat) USE_LITE_OP(conv2d) USE_LITE_OP(depthwise_conv2d) USE_LITE_OP(pool2d) +USE_LITE_OP(batch_norm) diff --git a/paddle/fluid/lite/tools/build.sh b/paddle/fluid/lite/tools/build.sh index 44c546e1de35447f258d051b5ddb73e2ed1b86cf..29fa9d9ad0b6c0b46e41ad12cee615bee4928bcc 100755 --- a/paddle/fluid/lite/tools/build.sh +++ b/paddle/fluid/lite/tools/build.sh @@ -114,14 +114,47 @@ function test_arm_android { echo "test name: ${test_name}" adb_work_dir="/data/local/tmp" - skip_list="test_model_parser_lite" # add more with space - [[ $skip_list =~ (^|[[:space:]])$test_name($|[[:space:]]) ]] && continue || echo 'skip $test_name' + + skip_list=("test_model_parser_lite" "test_cxx_api_lite") + for skip_name in ${skip_list[@]} ; do + [[ $skip_name =~ (^|[[:space:]])$test_name($|[[:space:]]) ]] && echo "skip $test_name" && return + done + testpath=$(find ./paddle/fluid -name ${test_name}) adb -s emulator-${port} push ${testpath} ${adb_work_dir} adb -s emulator-${port} shell chmod +x "${adb_work_dir}/${test_name}" adb -s emulator-${port} shell "./${adb_work_dir}/${test_name}" } +function test_arm_model { + local test_name=$1 + local port=$2 + local model_dir=$3 + + if [[ "${test_name}x" == "x" ]]; then + echo "test_name can not be empty" + exit 1 + fi + if [[ "${port}x" == "x" ]]; then + echo "Port can not be empty" + exit 1 + fi + if [[ "${model_dir}x" == "x" ]]; then + echo "Model dir can not be empty" + exit 1 + fi + + echo "test name: ${test_name}" + adb_work_dir="/data/local/tmp" + + testpath=$(find ./paddle/fluid -name ${test_name}) + adb -s emulator-${port} push ${model_dir} ${adb_work_dir} + adb -s emulator-${port} push ${testpath} ${adb_work_dir} + adb -s emulator-${port} shell chmod +x "${adb_work_dir}/${test_name}" + local adb_model_path="./${adb_work_dir}/`basename ${model_dir}`" + adb -s emulator-${port} shell "./${adb_work_dir}/${test_name} --eval_model_dir=$adb_model_path" +} + # Build the code and run lite arm tests. This is executed in the CI system. function build_test_arm { # 1. Build goes first @@ -196,6 +229,8 @@ function build_test_arm { for _test in $(cat $TESTS_FILE); do test_arm_android $_test $port done + # TODO(sangoly): refine this + test_arm_model "test_cxx_api_lite" $port "./third_party/install/mobilenet_v2_relu" done done