diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8228ce27c59b9649d1a39838104ced57e07a7179..9276ed4dee42e43db7da79abc47c615031a18e11 100755 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -87,7 +87,6 @@ build:mobile_android: dependencies: - build:server - build:mobile_armlinux: tags: - lite @@ -108,16 +107,15 @@ build:mobile_armlinux: dependencies: - build:server - -build:mobile_model_resnet50: +build:mobile_model_mobilenetv1: tags: - lite stage: build_mobile image: $MOBILE_LITE_DOCKER_IMAGE script: - - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_resnet50 - - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_resnet50 + - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_mobilenetv1 + - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_mobilenetv1 dependencies: - build:server @@ -127,18 +125,17 @@ build:mobile_model_resnet50: paths: - build.lite.android.armv8.gcc - ~/.ccache - - $CI_PROJECT_DIR/build_mobile_model_resnet50 - + - $CI_PROJECT_DIR/build_mobile_model_mobilenetv1 -build:mobile_model_mobilenetv1: +build:mobile_model_mobilenetv2: tags: - lite stage: build_mobile image: $MOBILE_LITE_DOCKER_IMAGE script: - - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_mobilenetv1 - - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_mobilenetv1 + - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_mobilenetv2 + - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_mobilenetv2 dependencies: - build:server @@ -148,17 +145,17 @@ build:mobile_model_mobilenetv1: paths: - build.lite.android.armv8.gcc - ~/.ccache - - $CI_PROJECT_DIR/build_mobile_model_mobilenetv1 + - $CI_PROJECT_DIR/build_mobile_model_mobilenetv2 -build:mobile_model_mobilenetv2: +build:mobile_model_resnet50: tags: - lite stage: build_mobile image: $MOBILE_LITE_DOCKER_IMAGE script: - - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_mobilenetv2 - - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_mobilenetv2 + - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_resnet50 + - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_resnet50 dependencies: - build:server @@ -168,30 +165,24 @@ build:mobile_model_mobilenetv2: paths: - build.lite.android.armv8.gcc - ~/.ccache - - $CI_PROJECT_DIR/build_mobile_model_mobilenetv2 + - $CI_PROJECT_DIR/build_mobile_model_resnet50 + +build:mobile_model_inceptionv4: + tags: + - lite + stage: build_mobile + image: $MOBILE_LITE_DOCKER_IMAGE -#build:mobile_model_inceptionv4: -# tags: -# - lite -# stage: build_mobile -# image: $MOBILE_LITE_DOCKER_IMAGE -# cache: -# key: mobile_thirdparty -# paths: -# - $MOBILE_LITE_CACHE0 -# - $MOBILE_LITE_CACHE1 -# - ~/.ccache -# script: -# - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_inceptionv4 -# - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_inceptionv4 -# -# dependencies: -# - build:server -# -# cache: -# key: mobile_thirdparty -# paths: -# - $MOBILE_LITE_CACHE0 -# - $MOBILE_LITE_CACHE1 -# - ~/.ccache -# - $CI_PROJECT_DIR/build_mobile_model_inceptionv4 + script: + - export CCACHE_DIR=$CI_PROJECT_DIR/build_mobile_model_inceptionv4 + - ./paddle/fluid/lite/tools/build.sh build_test_arm_model_inceptionv4 + + dependencies: + - build:server + + cache: + key: mobile_thirdparty + paths: + - build.lite.android.armv8.gcc + - ~/.ccache + - $CI_PROJECT_DIR/build_mobile_model_inceptionv4 diff --git a/paddle/fluid/lite/CMakeLists.txt b/paddle/fluid/lite/CMakeLists.txt index 0819c81773a77aac9d7e2c71c9ca416f0786a952..05a1f339cd5f29b0807c5101d0b398d96950819a 100644 --- a/paddle/fluid/lite/CMakeLists.txt +++ b/paddle/fluid/lite/CMakeLists.txt @@ -200,9 +200,9 @@ if (WITH_TESTING) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "lite_naive_model.tar.gz") if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") - lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2.tar.gz") + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2_relu.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "resnet50.tar.gz") - lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "inception_v4.tar.gz") + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "inception_v4_simple.tar.gz") endif() if(NOT LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "GoogleNet_inference.tar.gz") diff --git a/paddle/fluid/lite/api/CMakeLists.txt b/paddle/fluid/lite/api/CMakeLists.txt index ab04f33f161ebfc8ae92f53ae4d65f7f4ccf3bdb..044bb902df1555dd3e08ec9880b16a2d70c2d40e 100644 --- a/paddle/fluid/lite/api/CMakeLists.txt +++ b/paddle/fluid/lite/api/CMakeLists.txt @@ -74,7 +74,7 @@ if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND WITH_TESTING) lite_cc_test(test_mobilenetv2_lite SRCS mobilenetv2_test.cc DEPS ${lite_model_test_DEPS} ARGS --model_dir=${LITE_MODEL_DIR}/mobilenet_v2 SERIAL) - add_dependencies(test_mobilenetv2_lite extern_lite_download_mobilenet_v2_tar_gz) + add_dependencies(test_mobilenetv2_lite extern_lite_download_mobilenet_v2_relu_tar_gz) lite_cc_test(test_resnet50_lite SRCS resnet50_test.cc DEPS ${lite_model_test_DEPS} @@ -84,7 +84,7 @@ if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND WITH_TESTING) lite_cc_test(test_inceptionv4_lite SRCS inceptionv4_test.cc DEPS ${lite_model_test_DEPS} ARGS --model_dir=${LITE_MODEL_DIR}/inception_v4 SERIAL) - add_dependencies(test_inceptionv4_lite extern_lite_download_inception_v4_tar_gz) + add_dependencies(test_inceptionv4_lite extern_lite_download_inception_v4_simple_tar_gz) endif() # These tests needs CLI arguments, and is not supported in ARM CI. diff --git a/paddle/fluid/lite/api/inceptionv4_test.cc b/paddle/fluid/lite/api/inceptionv4_test.cc index c955a6179b3f3a43393aae6ade79a756cb6eef9e..45fb79851f8356d460777428d6d91ffbee6f2976 100644 --- a/paddle/fluid/lite/api/inceptionv4_test.cc +++ b/paddle/fluid/lite/api/inceptionv4_test.cc @@ -39,7 +39,8 @@ TEST(InceptionV4, test) { auto* input_tensor = predictor.GetInput(0); input_tensor->Resize(DDim(std::vector({1, 3, 224, 224}))); auto* data = input_tensor->mutable_data(); - for (int i = 0; i < input_tensor->dims().production(); i++) { + auto item_size = input_tensor->dims().production(); + for (int i = 0; i < item_size; i++) { data[i] = 1; } @@ -58,16 +59,30 @@ TEST(InceptionV4, test) { << ", spend " << (GetCurrentUS() - start) / FLAGS_repeats / 1000.0 << " ms in average."; + // std::vector results({0.00078033, 0.00083865, 0.00060029, 0.00057083, + // 0.00070094, 0.00080584, 0.00044525, 0.00074907, + // 0.00059774, 0.00063654}); + // + std::vector> results; + // i = 1 + results.emplace_back(std::vector( + {0.0011684548, 0.0010390386, 0.0011301535, 0.0010133048, + 0.0010259597, 0.0010982729, 0.00093195855, 0.0009141837, + 0.00096620916, 0.00089982944, 0.0010064574, 0.0010474789, + 0.0009782845, 0.0009230255, 0.0010548076, 0.0010974824, + 0.0010612885, 0.00089107914, 0.0010112736, 0.00097655767})); auto* out = predictor.GetOutput(0); - std::vector results({0.00078033, 0.00083865, 0.00060029, 0.00057083, - 0.00070094, 0.00080584, 0.00044525, 0.00074907, - 0.00059774, 0.00063654}); - for (int i = 0; i < results.size(); ++i) { - EXPECT_NEAR(out->data()[i], results[i], 1e-5); - } ASSERT_EQ(out->dims().size(), 2); ASSERT_EQ(out->dims()[0], 1); ASSERT_EQ(out->dims()[1], 1000); + + int step = 50; + for (int i = 0; i < results.size(); ++i) { + for (int j = 0; j < results[i].size(); ++j) { + EXPECT_NEAR(out->data()[j * step + (out->dims()[1] * i)], + results[i][j], 1e-6); + } + } } #endif diff --git a/paddle/fluid/lite/api/mobilenetv1_test.cc b/paddle/fluid/lite/api/mobilenetv1_test.cc index 33a4e8daaedfdf57d490f51670406c0e01c38eeb..38863ff6a2ccd80da926aa7e2c6a6fcb97f2473e 100644 --- a/paddle/fluid/lite/api/mobilenetv1_test.cc +++ b/paddle/fluid/lite/api/mobilenetv1_test.cc @@ -36,7 +36,8 @@ void TestModel(const std::vector& valid_places, auto* input_tensor = predictor.GetInput(0); input_tensor->Resize(DDim(std::vector({1, 3, 224, 224}))); auto* data = input_tensor->mutable_data(); - for (int i = 0; i < input_tensor->dims().production(); i++) { + auto item_size = input_tensor->dims().production(); + for (int i = 0; i < item_size; i++) { data[i] = 1; } @@ -55,17 +56,26 @@ void TestModel(const std::vector& valid_places, << ", spend " << (GetCurrentUS() - start) / FLAGS_repeats / 1000.0 << " ms in average."; + std::vector> results; + // i = 1 + results.emplace_back(std::vector( + {0.00019130898, 9.467885e-05, 0.00015971427, 0.0003650665, + 0.00026431272, 0.00060884043, 0.0002107942, 0.0015819625, + 0.0010323516, 0.00010079765, 0.00011006987, 0.0017364529, + 0.0048292773, 0.0013995157, 0.0018453331, 0.0002428986, + 0.00020211363, 0.00013668182, 0.0005855956, 0.00025901722})); auto* out = predictor.GetOutput(0); - std::vector results({1.91308980e-04, 5.92055148e-04, 1.12303176e-04, - 6.27335685e-05, 1.27507330e-04, 1.32147351e-03, - 3.13812525e-05, 6.52209565e-05, 4.78087313e-05, - 2.58822285e-04}); - for (int i = 0; i < results.size(); ++i) { - EXPECT_NEAR(out->data()[i], results[i], 1e-6); - } ASSERT_EQ(out->dims().size(), 2); ASSERT_EQ(out->dims()[0], 1); ASSERT_EQ(out->dims()[1], 1000); + + int step = 50; + for (int i = 0; i < results.size(); ++i) { + for (int j = 0; j < results[i].size(); ++j) { + EXPECT_NEAR(out->data()[j * step + (out->dims()[1] * i)], + results[i][j], 1e-6); + } + } } TEST(MobileNetV1, test_arm) { diff --git a/paddle/fluid/lite/api/mobilenetv2_test.cc b/paddle/fluid/lite/api/mobilenetv2_test.cc index cdcef149eba3bdcfb08550cde5817520f7f52ed4..deb0a244b72dd3e1723024b9e069ed8ec28b674b 100644 --- a/paddle/fluid/lite/api/mobilenetv2_test.cc +++ b/paddle/fluid/lite/api/mobilenetv2_test.cc @@ -39,7 +39,8 @@ TEST(MobileNetV2, test) { auto* input_tensor = predictor.GetInput(0); input_tensor->Resize(DDim(std::vector({1, 3, 224, 224}))); auto* data = input_tensor->mutable_data(); - for (int i = 0; i < input_tensor->dims().production(); i++) { + auto item_size = input_tensor->dims().production(); + for (int i = 0; i < item_size; i++) { data[i] = 1; } @@ -58,16 +59,26 @@ TEST(MobileNetV2, test) { << ", spend " << (GetCurrentUS() - start) / FLAGS_repeats / 1000.0 << " ms in average."; + std::vector> results; + // i = 1 + results.emplace_back(std::vector( + {0.00017082224, 5.699624e-05, 0.000260885, 0.00016412718, + 0.00034818667, 0.00015230637, 0.00032959113, 0.0014772735, + 0.0009059976, 9.5378724e-05, 5.386537e-05, 0.0006427285, + 0.0070957416, 0.0016094646, 0.0018807327, 0.00010506048, + 6.823785e-05, 0.00012269315, 0.0007806194, 0.00022354358})); auto* out = predictor.GetOutput(0); - std::vector results({0.00097802, 0.00099822, 0.00103093, 0.00100121, - 0.00098268, 0.00104065, 0.00099962, 0.00095181, - 0.00099694, 0.00099406}); - for (int i = 0; i < results.size(); ++i) { - EXPECT_NEAR(out->data()[i], results[i], 1e-5); - } ASSERT_EQ(out->dims().size(), 2); ASSERT_EQ(out->dims()[0], 1); ASSERT_EQ(out->dims()[1], 1000); + + int step = 50; + for (int i = 0; i < results.size(); ++i) { + for (int j = 0; j < results[i].size(); ++j) { + EXPECT_NEAR(out->data()[j * step + (out->dims()[1] * i)], + results[i][j], 1e-6); + } + } } #endif diff --git a/paddle/fluid/lite/api/resnet50_test.cc b/paddle/fluid/lite/api/resnet50_test.cc index 55247ae22a2ebc5a45e79f0fbf330f40be4ad4d7..a20e5ca3d5be3e713fa52d70aba90409ad3a817b 100644 --- a/paddle/fluid/lite/api/resnet50_test.cc +++ b/paddle/fluid/lite/api/resnet50_test.cc @@ -39,7 +39,8 @@ TEST(ResNet50, test) { auto* input_tensor = predictor.GetInput(0); input_tensor->Resize(DDim(std::vector({1, 3, 224, 224}))); auto* data = input_tensor->mutable_data(); - for (int i = 0; i < input_tensor->dims().production(); i++) { + auto item_size = input_tensor->dims().production(); + for (int i = 0; i < item_size; i++) { data[i] = 1; } @@ -58,17 +59,26 @@ TEST(ResNet50, test) { << ", spend " << (GetCurrentUS() - start) / FLAGS_repeats / 1000.0 << " ms in average."; + std::vector> results; + // i = 1 + results.emplace_back(std::vector( + {0.00024139918, 0.00020566184, 0.00022418296, 0.00041731037, + 0.0005366107, 0.00016948722, 0.00028638865, 0.0009257241, + 0.00072681636, 8.531815e-05, 0.0002129998, 0.0021168243, + 0.006387163, 0.0037145028, 0.0012812682, 0.00045948103, + 0.00013535398, 0.0002483765, 0.00076759676, 0.0002773295})); auto* out = predictor.GetOutput(0); - std::vector results({2.41399175e-04, 4.13724629e-04, 2.64324830e-04, - 9.68795503e-05, 2.01968738e-04, 8.14945495e-04, - 7.45922662e-05, 1.76479152e-04, 7.47223166e-05, - 6.06825110e-04}); - for (int i = 0; i < results.size(); ++i) { - EXPECT_NEAR(out->data()[i], results[i], 1e-5); - } ASSERT_EQ(out->dims().size(), 2); ASSERT_EQ(out->dims()[0], 1); ASSERT_EQ(out->dims()[1], 1000); + + int step = 50; + for (int i = 0; i < results.size(); ++i) { + for (int j = 0; j < results[i].size(); ++j) { + EXPECT_NEAR(out->data()[j * step + (out->dims()[1] * i)], + results[i][j], 1e-6); + } + } } #endif diff --git a/paddle/fluid/lite/tools/build.sh b/paddle/fluid/lite/tools/build.sh index 7ca891686cef36c439655e7a01116126a8c567b9..7a2ad12555c7415aa146d28ea1bc2992bd0ff96f 100755 --- a/paddle/fluid/lite/tools/build.sh +++ b/paddle/fluid/lite/tools/build.sh @@ -512,7 +512,7 @@ function main { shift ;; build_test_arm_model_mobilenetv2) - build_test_arm_subtask_model test_mobilenetv2_lite mobilenet_v2 + build_test_arm_subtask_model test_mobilenetv2_lite mobilenet_v2_relu shift ;; build_test_arm_model_resnet50) @@ -520,7 +520,7 @@ function main { shift ;; build_test_arm_model_inceptionv4) - build_test_arm_subtask_model test_inceptionv4_lite inception_v4 + build_test_arm_subtask_model test_inceptionv4_lite inception_v4_simple shift ;; check_style)