diff --git a/lite/CMakeLists.txt b/lite/CMakeLists.txt index ce83c413169671b0e480bea85d74976ec76597d4..d69f6d6d9e77668c5789baff3f2f1051afe5df46 100755 --- a/lite/CMakeLists.txt +++ b/lite/CMakeLists.txt @@ -40,7 +40,8 @@ endif() if (WITH_TESTING) lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "lite_naive_model.tar.gz") if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK) - lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1.tar.gz") + lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v1_int16.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "mobilenet_v2_relu.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "resnet50.tar.gz") lite_download_and_uncompress(${LITE_MODEL_DIR} ${LITE_URL} "inception_v4_simple.tar.gz") diff --git a/lite/api/CMakeLists.txt b/lite/api/CMakeLists.txt index b3c243b63c0ab2994f9bfe44ef8a6b3a25e715d3..3e8fd5fd637c02842e068801278fab94ac7d5d4f 100644 --- a/lite/api/CMakeLists.txt +++ b/lite/api/CMakeLists.txt @@ -291,6 +291,14 @@ if(LITE_WITH_LIGHT_WEIGHT_FRAMEWORK AND WITH_TESTING) set(LINK_FLAGS "-Wl,--version-script ${PADDLE_SOURCE_DIR}/lite/core/lite.map") set_target_properties(test_mobilenetv1 PROPERTIES LINK_FLAGS "${LINK_FLAGS}") endif() + + lite_cc_test(test_mobilenetv1_int16 SRCS mobilenetv1_int16_test.cc + DEPS ${lite_model_test_DEPS} ${light_lib_DEPS} + CL_DEPS ${opencl_kernels} + NPU_DEPS ${npu_kernels} ${npu_bridges} + ARGS --cl_path=${CMAKE_SOURCE_DIR}/lite/backends/opencl + --model_dir=${LITE_MODEL_DIR}/mobilenet_v1_int16 SERIAL) + add_dependencies(test_mobilenetv1 extern_lite_download_mobilenet_v1_int16_tar_gz) lite_cc_test(test_mobilenetv2 SRCS mobilenetv2_test.cc DEPS ${lite_model_test_DEPS} diff --git a/lite/api/mobilenetv1_int16_test.cc b/lite/api/mobilenetv1_int16_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..266052044ef6543a0f00ad50bc9b89b70656bbe6 --- /dev/null +++ b/lite/api/mobilenetv1_int16_test.cc @@ -0,0 +1,83 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include "lite/api/cxx_api.h" +#include "lite/api/light_api.h" +#include "lite/api/paddle_use_kernels.h" +#include "lite/api/paddle_use_ops.h" +#include "lite/api/paddle_use_passes.h" +#include "lite/api/test_helper.h" +#include "lite/core/op_registry.h" + +DEFINE_string(optimized_model, + "/data/local/tmp/int16_model", + "optimized_model"); +DEFINE_int32(N, 1, "input_batch"); +DEFINE_int32(C, 3, "input_channel"); +DEFINE_int32(H, 224, "input_height"); +DEFINE_int32(W, 224, "input_width"); + +namespace paddle { +namespace lite { + +void TestModel(const std::vector& valid_places, + const std::string& model_dir) { + DeviceInfo::Init(); + DeviceInfo::Global().SetRunMode(lite_api::LITE_POWER_NO_BIND, FLAGS_threads); + + LOG(INFO) << "Optimize model."; + lite::Predictor cxx_predictor; + cxx_predictor.Build(model_dir, "", "", valid_places); + cxx_predictor.SaveModel(FLAGS_optimized_model, + paddle::lite_api::LiteModelType::kNaiveBuffer); + + LOG(INFO) << "Load optimized model."; + lite::LightPredictor predictor(FLAGS_optimized_model + ".nb", false); + + auto* input_tensor = predictor.GetInput(0); + input_tensor->Resize(DDim( + std::vector({FLAGS_N, FLAGS_C, FLAGS_H, FLAGS_W}))); + auto* data = input_tensor->mutable_data(); + auto item_size = FLAGS_N * FLAGS_C * FLAGS_H * FLAGS_W; + for (int i = 0; i < item_size; i++) { + data[i] = 1.; + } + + LOG(INFO) << "Predictor run."; + predictor.Run(); + + auto* out = predictor.GetOutput(0); + const auto* pdata = out->data(); + + std::vector ref = { + 0.000191383, 0.000592063, 0.000112282, 6.27426e-05, 0.000127522}; + double eps = 1e-5; + for (int i = 0; i < ref.size(); ++i) { + EXPECT_NEAR(pdata[i], ref[i], eps); + } +} + +TEST(MobileNetV1_Int16, test_arm) { + std::vector valid_places({ + Place{TARGET(kARM), PRECISION(kFloat)}, + }); + std::string model_dir = FLAGS_model_dir; + TestModel(valid_places, model_dir); +} + +} // namespace lite +} // namespace paddle diff --git a/lite/tools/ci_build.sh b/lite/tools/ci_build.sh index 11c1a9edc6d9d770748a39216705df65590f56a3..166137bf02b034219f2d6afc6c486ed553cdfe7a 100755 --- a/lite/tools/ci_build.sh +++ b/lite/tools/ci_build.sh @@ -466,7 +466,7 @@ function test_arm_android { echo "test name: ${test_name}" adb_work_dir="/data/local/tmp" - skip_list=("test_model_parser" "test_mobilenetv1" "test_mobilenetv2" "test_resnet50" "test_inceptionv4" "test_light_api" "test_apis" "test_paddle_api" "test_cxx_api" "test_gen_code" "test_mobilenetv1_int8" "test_subgraph_pass" "test_grid_sampler_image_opencl" "test_lrn_image_opencl" "test_pad2d_image_opencl" "test_transformer_with_mask_fp32_arm") + skip_list=("test_model_parser" "test_mobilenetv1" "test_mobilenetv2" "test_resnet50" "test_inceptionv4" "test_light_api" "test_apis" "test_paddle_api" "test_cxx_api" "test_gen_code" "test_mobilenetv1_int8" "test_subgraph_pass" "test_grid_sampler_image_opencl" "test_lrn_image_opencl" "test_pad2d_image_opencl" "test_transformer_with_mask_fp32_arm" "test_mobilenetv1_int16") for skip_name in ${skip_list[@]} ; do [[ $skip_name =~ (^|[[:space:]])$test_name($|[[:space:]]) ]] && echo "skip $test_name" && return done @@ -1251,6 +1251,7 @@ function main { build_test_arm_subtask_android build_test_arm_subtask_model test_mobilenetv1 mobilenet_v1 build_test_arm_subtask_model test_mobilenetv1_int8 MobileNetV1_quant + build_test_arm_subtask_model test_mobilenetv1_int16 mobilenet_v1_int16 build_test_arm_subtask_model test_mobilenetv2 mobilenet_v2_relu build_test_arm_subtask_model test_resnet50 resnet50 build_test_arm_subtask_model test_inceptionv4 inception_v4_simple