未验证 提交 cf2d6ef8 编写于 作者: O openharmony_ci 提交者: Gitee

!8917 MindSpore新增离线模型用例

Merge pull request !8917 from zhuguodong/master
......@@ -20,20 +20,24 @@
"push": [
"ActsMindSporeTest->/data/local/tmp/ActsMindSporeTest",
"resource/ai/mindspore/aiy_vision_classifier_plants_V1_3/aiy_vision_classifier_plants_V1_3.ms -> /data/test",
"resource/ai/mindspore/aiy_vision_classifier_plants_V1_3/aiy_vision_classifier_plants_V1_3_offline_model.ms -> /data/test",
"resource/ai/mindspore/aiy_vision_classifier_plants_V1_3/aiy_vision_classifier_plants_V1_3.input -> /data/test",
"resource/ai/mindspore/aiy_vision_classifier_plants_V1_3/aiy_vision_classifier_plants_V1_30.output -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface.ms -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface_r13.ms -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface_offline_model.ms -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface.input -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface_0.input -> /data/test",
"resource/ai/mindspore/ml_face_is_face/ml_face_isface0.output -> /data/test",
"resource/ai/mindspore/ml_Hand_deploy/ml_Hand_deploy.ms -> /data/test",
"resource/ai/mindspore/ml_Hand_deploy/ml_Hand_deploy_0.input -> /data/test",
"resource/ai/mindspore/ml_Hand_deploy/ml_Hand_deploy0.output -> /data/test",
"resource/ai/mindspore/ml_ocr_cn/ml_ocr_cn_0.input -> /data/test",
"resource/ai/mindspore/ml_ocr_cn/ml_ocr_cn.ms -> /data/test",
"resource/ai/mindspore/ml_ocr_cn/ml_headpose_pb2tflite_offline_model.ms -> /data/test",
"resource/ai/mindspore/ml_ocr_cn/ml_ocr_cn_0.input -> /data/test",
"resource/ai/mindspore/ml_ocr_cn/ml_ocr_cn0.output -> /data/test",
"resource/ai/mindspore/ml_headpose_pb2tflite/ml_headpose_pb2tflite.ms -> /data/test",
"resource/ai/mindspore/ml_headpose_pb2tflite/ml_ocr_cn_offline_model.ms -> /data/test",
"resource/ai/mindspore/ml_headpose_pb2tflite/ml_headpose_pb2tflite_0.input -> /data/test",
"resource/ai/mindspore/ml_headpose_pb2tflite/ml_headpose_pb2tflite_1.input -> /data/test",
"resource/ai/mindspore/ml_headpose_pb2tflite/ml_headpose_pb2tflite_2.input -> /data/test",
......
......@@ -60,6 +60,17 @@ void AddContextDeviceCPU(OH_AI_ContextHandle context) {
OH_AI_ContextAddDeviceInfo(context, cpu_device_info);
}
bool IsNNRTAvailable() {
size_t num = 0;
auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
if (desc == nullptr) {
return false;
}
OH_AI_DestroyAllNNRTDeviceDescs(&desc);
return true;
}
// add nnrt device info
void AddContextDeviceNNRT(OH_AI_ContextHandle context) {
size_t num = 0;
......@@ -95,9 +106,11 @@ void AddContextDeviceNNRT(OH_AI_ContextHandle context) {
void AddContextDeviceNNRTByType(OH_AI_ContextHandle context) {
size_t num = 0;
auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
// 返回值desc是NNRTDeviceDesc结构体数组首地址
if (desc == nullptr) {
return;
}
// 目前nnrt仅提供了rk3568的驱动,只有cpu一个设备,故不用判断
std::cout << "found " << num << " nnrt devices" << std::endl;
auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
......@@ -120,9 +133,11 @@ void AddContextDeviceNNRTByType(OH_AI_ContextHandle context) {
void AddContextDeviceNNRTByName(OH_AI_ContextHandle context) {
size_t num = 0;
auto desc = OH_AI_GetAllNNRTDeviceDescs(&num);
// 返回值desc是NNRTDeviceDesc结构体数组首地址
if (desc == nullptr) {
return;
}
// 目前nnrt仅提供了rk3568的驱动,只有cpu一个设备,故不用判断
std::cout << "found " << num << " nnrt devices" << std::endl;
auto id = OH_AI_GetDeviceIdFromNNRTDeviceDesc(desc);
auto name = OH_AI_GetNameFromNNRTDeviceDesc(desc);
......@@ -223,6 +238,7 @@ void ModelPredict(OH_AI_ModelHandle model, OH_AI_ContextHandle context, string m
printf("==========Resizes return code:%d\n", resize_ret);
ASSERT_EQ(resize_ret, OH_AI_STATUS_SUCCESS);
}
FillInputsData(inputs, model_name, is_transpose);
OH_AI_TensorHandleArray outputs;
OH_AI_Status predict_ret = OH_AI_STATUS_SUCCESS;
......@@ -235,9 +251,13 @@ void ModelPredict(OH_AI_ModelHandle model, OH_AI_ContextHandle context, string m
printf("==========Model Predict==========\n");
predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
}
printf("==========Model Predict End==========\n");
ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
printf("=========CompareResult===========\n");
CompareResult(outputs, model_name);
printf("=========OH_AI_ModelDestroy===========\n");
OH_AI_ModelDestroy(&model);
printf("=========OH_AI_ModelDestroy End===========\n");
}
// predict on cpu
......@@ -1510,6 +1530,86 @@ HWTEST(MSLiteTest, OHOS_Input_0001, Function | MediumTest | Level1) {
ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
}
// 正常场景:多输入模型
HWTEST(MSLiteTest, OHOS_Input_0002, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceCPU(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite.ms",
OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
FillInputsData(inputs, "ml_headpose_pb2tflite", false);
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
OH_AI_ModelDestroy(&model);
}
// 正常场景:输入为uint8模型
HWTEST(MSLiteTest, OHOS_Input_0003, Function | MediumTest | Level1) {
printf("==========ReadFile==========\n");
size_t size1;
size_t *ptr_size1 = &size1;
const char *imagePath = "/data/test/aiy_vision_classifier_plants_V1_3_0.input";
char *imageBuf = ReadFile(imagePath, ptr_size1);
ASSERT_NE(imageBuf, nullptr);
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceCPU(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/aiy_vision_classifier_plants_V1_3.ms", OH_AI_MODELTYPE_MINDIR,
context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
for (size_t i = 0; i < inputs.handle_num; ++i) {
OH_AI_TensorHandle tensor = inputs.handle_list[i];
int64_t element_num = OH_AI_TensorGetElementNum(tensor);
printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
void *input_data = OH_AI_TensorGetMutableData(inputs.handle_list[i]);
ASSERT_NE(input_data, nullptr);
memcpy(input_data, imageBuf, size1);
}
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetOutput==========\n");
for (size_t i = 0; i < outputs.handle_num; ++i) {
OH_AI_TensorHandle tensor = outputs.handle_list[i];
int64_t element_num = OH_AI_TensorGetElementNum(tensor);
printf("Tensor name: %s, elements num: %" PRId64 ".\n", OH_AI_TensorGetName(tensor), element_num);
uint8_t *output_data = reinterpret_cast<uint8_t *>(OH_AI_TensorGetMutableData(tensor));
printf("output data is:");
for (int j = 0; j < element_num && j <= 20; ++j) {
printf("%d ", output_data[j]);
}
printf("\n");
printf("==========compFp32WithTData==========\n");
string expectedDataFile = "/data/test/aiy_vision_classifier_plants_V1_3" + std::to_string(i) + ".output";
bool result = compUint8WithTData(output_data, expectedDataFile, 0.01, 0.01, false);
EXPECT_EQ(result, true);
}
delete[] imageBuf;
OH_AI_ModelDestroy(&model);
}
// 正常场景:量化模型
HWTEST(MSLiteTest, OHOS_Input_0004, Function | MediumTest | Level1) {
......@@ -1530,6 +1630,26 @@ HWTEST(MSLiteTest, OHOS_Multiple_0001, Function | MediumTest | Level1) {
}
}
// 异常场景:Model创建一次,Build多次
HWTEST(MSLiteTest, OHOS_Multiple_0002, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceCPU(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
int ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========Build model==========\n");
int ret2 = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret2);
ASSERT_EQ(ret2, OH_AI_STATUS_SUCCESS);
OH_AI_ModelDestroy(&model);
}
// 正常场景:Model创建一次,Build一次,Predict多次
HWTEST(MSLiteTest, OHOS_Multiple_0003, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
......@@ -1613,7 +1733,207 @@ HWTEST(MSLiteTest, OHOS_Compatible_0001, Function | MediumTest | Level1) {
OH_AI_ModelDestroy(&model);
}
// delegate异构:选取第一个NNRT设备
// 正常场景:离线模型支持NNRT后端,单模型输入
HWTEST(MSLiteTest, OHOS_OfflineModel_0001, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms",
OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
FillInputsData(inputs, "ml_face_isface", true);
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
CompareResult(outputs, "ml_face_isface");
OH_AI_ModelDestroy(&model);
}
// 正常场景:离线模型支持NNRT后端,多输入模型
HWTEST(MSLiteTest, OHOS_OfflineModel_0002, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_headpose_pb2tflite_offline_model.ms",
OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
FillInputsData(inputs, "ml_headpose_pb2tflite", false);
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
CompareResult(outputs, "ml_headpose_pb2tflite", 0.02, 0.02);
OH_AI_ModelDestroy(&model);
}
// 正常场景:离线模型支持NNRT后端,Model创建一次,Build一次,Predict多次
HWTEST(MSLiteTest, OHOS_OfflineModel_0004, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
FillInputsData(inputs, "ml_face_isface", true);
OH_AI_TensorHandleArray outputs;
for (size_t i = 0; i < 50; ++i) {
printf("==========Model Predict==========\n");
OH_AI_Status predict_ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(predict_ret, OH_AI_STATUS_SUCCESS);
}
CompareResult(outputs, "ml_face_isface");
OH_AI_ModelDestroy(&model);
}
// 正常场景:离线模型支持NNRT后端,Model创建一次,Build多次
HWTEST(MSLiteTest, OHOS_OfflineModel_0005, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
int ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========Build model==========\n");
int ret2 = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret2);
ASSERT_EQ(ret2, OH_AI_STATUS_SUCCESS);
OH_AI_ModelDestroy(&model);
}
// 异常场景:离线模型支持NNRT后端,ModelPredict,input为空
HWTEST(MSLiteTest, OHOS_OfflineModel_0006, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_face_isface_offline_model.ms", OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray inputs;
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
printf("==========Model Predict return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_LITE_ERROR);
OH_AI_ModelDestroy(&model);
}
// 异常场景:非离线模型支持NNRT后端,ms模型未转换为NNRT后端模型
HWTEST(MSLiteTest, OHOS_OfflineModel_0007, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
AddContextDeviceCPU(context); // nnrt算子支持有限,加cpu设备走异构推理
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
ModelPredict(model, context, "ml_face_isface", {}, false, true, false);
}
// 正常场景:覆盖模型ml_ocr_cn
HWTEST(MSLiteTest, OHOS_OfflineModel_0008, Function | MediumTest | Level1) {
if (!IsNNRTAvailable()) {
printf("NNRt is not available, skip this test");
return;
}
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
ASSERT_NE(context, nullptr);
AddContextDeviceNNRT(context);
printf("==========Create model==========\n");
OH_AI_ModelHandle model = OH_AI_ModelCreate();
ASSERT_NE(model, nullptr);
printf("==========Build model==========\n");
OH_AI_Status ret = OH_AI_ModelBuildFromFile(model, "/data/test/ml_ocr_cn_offline_model.ms",
OH_AI_MODELTYPE_MINDIR, context);
printf("==========build model return code:%d\n", ret);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
printf("==========GetInputs==========\n");
OH_AI_TensorHandleArray inputs = OH_AI_ModelGetInputs(model);
ASSERT_NE(inputs.handle_list, nullptr);
FillInputsData(inputs, "ml_ocr_cn", false);
printf("==========Model Predict==========\n");
OH_AI_TensorHandleArray outputs;
ret = OH_AI_ModelPredict(model, inputs, &outputs, nullptr, nullptr);
ASSERT_EQ(ret, OH_AI_STATUS_SUCCESS);
CompareResult(outputs, "ml_ocr_cn");
OH_AI_ModelDestroy(&model);
}
// 正常场景:delegate异构,使用低级接口创建nnrt device info,选取第一个NNRT设备
HWTEST(MSLiteTest, OHOS_NNRT_0001, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
......@@ -1640,7 +1960,7 @@ HWTEST(MSLiteTest, OHOS_NNRT_0001, Function | MediumTest | Level1) {
OH_AI_ModelDestroy(&model);
}
// delegate异构:根据类型确定NNRT设备
// 正常场景:delegate异构,使用高级接口创建nnrt device info,根据类型确定NNRT设备
HWTEST(MSLiteTest, OHOS_NNRT_0002, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
......@@ -1667,7 +1987,7 @@ HWTEST(MSLiteTest, OHOS_NNRT_0002, Function | MediumTest | Level1) {
OH_AI_ModelDestroy(&model);
}
// delegate异构:根据名称确定NNRT设备
// 正常场景:delegate异构,使用高级接口创建nnrt device info,根据名称确定NNRT设备
HWTEST(MSLiteTest, OHOS_NNRT_0003, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
......@@ -1694,7 +2014,7 @@ HWTEST(MSLiteTest, OHOS_NNRT_0003, Function | MediumTest | Level1) {
OH_AI_ModelDestroy(&model);
}
// delegate异构:多输入单输出
// 正常场景:delegate异构,使用高低级接口创建nnrt device info,多输入单输出
HWTEST(MSLiteTest, OHOS_NNRT_0004, Function | MediumTest | Level1) {
printf("==========Init Context==========\n");
OH_AI_ContextHandle context = OH_AI_ContextCreate();
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册