diff --git a/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp b/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp index 447f0ec247b084baad988724521c7ef988de931a..ff6c8fe0c1b19a81672aecf72c35b1e464e28f49 100644 --- a/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp +++ b/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp @@ -34,6 +34,11 @@ sptr INnrtDevice::Get(const std::string &serviceName, bool isStub) return mockIDevice; } +MockIDevice::MockIDevice() +{ + m_bufferFd = 0; +} + MockIDevice::~MockIDevice() { for (auto ash : m_ashmems) { diff --git a/ai/neural_network_runtime/v1_0/common/mock_idevice.h b/ai/neural_network_runtime/v1_0/common/mock_idevice.h index d2e6530235d2c1750494ea153e39d27b2bc3953c..5c79682140b913cae675cb009aeaca9d7f13b629 100644 --- a/ai/neural_network_runtime/v1_0/common/mock_idevice.h +++ b/ai/neural_network_runtime/v1_0/common/mock_idevice.h @@ -89,7 +89,7 @@ public: static MockIDevice *GetInstance(); - MockIDevice() = default; + MockIDevice(); virtual ~MockIDevice(); private: diff --git a/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp b/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp index d26905772d1610dddb871432432e6c17b212611b..7d059674b97d241bbc21b453aabb89cd3eca3fcb 100644 --- a/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp +++ b/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp @@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, LOGE("[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d\n", ret); return ret; } - memcpy_s(inputMemory->data, operandTem.length, (void *) operandTem.data, operandTem.length); + memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != diff --git a/ai/neural_network_runtime/v1_0/common/nnrt_utils.h b/ai/neural_network_runtime/v1_0/common/nnrt_utils.h index bed67c12945e7e4b364c5f751de2ba47655993b8..826fe48e56c8acd0d223c7fe00e551a8cb9b43bf 100644 --- a/ai/neural_network_runtime/v1_0/common/nnrt_utils.h +++ b/ai/neural_network_runtime/v1_0/common/nnrt_utils.h @@ -67,7 +67,7 @@ struct OHNNCompileParam { bool enableFp16 = false; }; -int BuildSingleOpGraph(OH_NNModel *modelptr, const OHNNGraphArgs &args); +int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs); int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[], float* expect); diff --git a/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp index 2ef9177e23fa95d5472786bcec7c2fea415a5ea3..861685d3a8d0b530ad3a71d218b7035db8207772 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp @@ -68,7 +68,7 @@ public: } protected: - OHNNCompileParam compileParam; + OHNNCompileParam m_compileParam; AddModel addModel; OHNNGraphArgs graphArgs = addModel.graphArgs; }; @@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function | OH_NNCompilation *compilation2 = OH_NNCompilation_Construct(model2); ASSERT_NE(nullptr, compilation2); - std::thread th1(CompileModel, compilation1, compileParam); - std::thread th2(CompileModel, compilation2, compileParam); + std::thread th1(CompileModel, compilation1, m_compileParam); + std::thread th2(CompileModel, compilation2, m_compileParam); th1.join(); th2.join(); Free(model1, compilation1); diff --git a/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp index 146a497320e1670a63c2aa26af023ec54c7c9f7e..4ff81e23c7c4448f13c653f5d1ea5eb67d0aa5df 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp @@ -1135,7 +1135,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != @@ -1205,7 +1205,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_Run(executor)); // check result EXPECT_TRUE(CheckOutput(static_cast(const_cast(outputMemory->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, 0, &outputMemory); ASSERT_EQ(outputMemory, nullptr); diff --git a/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp index 151c329e2be809df7f45e6c0ac565419df706062..4937bf06a73053242113c21d37c2ff8e139d8643 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp @@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function | ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != graphArgs.outputIndices.end()) { @@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function | OH_NN_Memory *inputMemory = OH_NNExecutor_AllocateInputMemory(executor, inputIndex, operandTem.length); ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } } ASSERT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(executor)); @@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } @@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) avgModel.expectValue)); + static_cast(avgModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } diff --git a/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp b/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp index e4ca3ac272be05d94f4a4872151ce07e7cc28f1e..aef259cac0f15926e2252af3205ed7462d6c073a 100644 --- a/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp +++ b/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp @@ -42,6 +42,11 @@ MockIDevice::~MockIDevice() } } +MockIDevice::MockIDevice() +{ + m_bufferFd = 0; +} + MockIPreparedModel::~MockIPreparedModel() { for (auto ash : m_ashmems) { diff --git a/ai/neural_network_runtime/v2_0/common/mock_idevice.h b/ai/neural_network_runtime/v2_0/common/mock_idevice.h index 8bfae99576e2568caf6cc182e275dff740e92ebb..b1cfaa821ee4f67d8985567bdf70df2166e6d864 100644 --- a/ai/neural_network_runtime/v2_0/common/mock_idevice.h +++ b/ai/neural_network_runtime/v2_0/common/mock_idevice.h @@ -92,7 +92,7 @@ public: static MockIDevice *GetInstance(); - MockIDevice() = default; + MockIDevice(); virtual ~MockIDevice(); private: diff --git a/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp b/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp index 36be2f909a46380fa13950ca78e854ec41719e3f..897dade5b7cfa772d977e16f4640af554f9fe828 100644 --- a/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp +++ b/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp @@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, LOGE("[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d\n", ret); return ret; } - memcpy_s(inputMemory->data, operandTem.length, (void *) operandTem.data, operandTem.length); + memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != diff --git a/ai/neural_network_runtime/v2_0/common/nnrt_utils.h b/ai/neural_network_runtime/v2_0/common/nnrt_utils.h index c7d51260d0bd4cfb4331ec7be10718dafd851fb1..e16feb4e2a66e09a6d349b046e6ee92d130207e0 100644 --- a/ai/neural_network_runtime/v2_0/common/nnrt_utils.h +++ b/ai/neural_network_runtime/v2_0/common/nnrt_utils.h @@ -67,7 +67,7 @@ struct OHNNCompileParam { bool enableFp16 = false; }; -int BuildSingleOpGraph(OH_NNModel *modelptr, const OHNNGraphArgs &args); +int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs); int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[], float* expect); diff --git a/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp index dd7e3ddd274484e1aeb5aff2de473bedba3b1687..95d27d42753a4395a6f3eaa87b454997756caf8a 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp @@ -68,7 +68,7 @@ public: } protected: - OHNNCompileParam compileParam; + OHNNCompileParam m_compileParam; AddModel addModel; OHNNGraphArgs graphArgs = addModel.graphArgs; }; @@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function | OH_NNCompilation *compilation2 = OH_NNCompilation_Construct(model2); ASSERT_NE(nullptr, compilation2); - std::thread th1(CompileModel, compilation1, compileParam); - std::thread th2(CompileModel, compilation2, compileParam); + std::thread th1(CompileModel, compilation1, m_compileParam); + std::thread th2(CompileModel, compilation2, m_compileParam); th1.join(); th2.join(); Free(model1, compilation1); diff --git a/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp index 6d6d69e030d5410a7763b4fe104937b22a41b9ec..d0def3146496e799d3c8fc2e72f1278a2c4fa40a 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp @@ -1240,7 +1240,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != @@ -1310,7 +1310,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_Run(executor)); // check result EXPECT_TRUE(CheckOutput(static_cast(const_cast(outputMemory->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, 0, &outputMemory); ASSERT_EQ(outputMemory, nullptr); diff --git a/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp index ae19874c531de51c565b1eceef9607c589056143..a4b14153d440daea60d8b78ad660589eb03c7822 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp @@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function | ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != graphArgs.outputIndices.end()) { @@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function | OH_NN_Memory *inputMemory = OH_NNExecutor_AllocateInputMemory(executor, inputIndex, operandTem.length); ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } } ASSERT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(executor)); @@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } @@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) avgModel.expectValue)); + static_cast(avgModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); }