From fbb10dfa08f9a23a9bec1f05d7801e6f7f72c16a Mon Sep 17 00:00:00 2001 From: scholar-lc <1936194231@qq.com> Date: Sun, 25 Jun 2023 17:01:41 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=20acts=20=E4=BB=93=E5=AE=89?= =?UTF-8?q?=E5=85=A8=E5=91=8A=E8=AD=A6=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: scholar-lc <1936194231@qq.com> --- ai/neural_network_runtime/v1_0/common/mock_idevice.cpp | 5 +++++ ai/neural_network_runtime/v1_0/common/mock_idevice.h | 2 +- ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp | 2 +- ai/neural_network_runtime/v1_0/common/nnrt_utils.h | 2 +- .../v1_0/interface/src/CompileTest.cpp | 6 +++--- .../v1_0/interface/src/ExecutorTest.cpp | 4 ++-- .../v1_0/interface/src/MemoryTest.cpp | 8 ++++---- ai/neural_network_runtime/v2_0/common/mock_idevice.cpp | 5 +++++ ai/neural_network_runtime/v2_0/common/mock_idevice.h | 2 +- ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp | 2 +- ai/neural_network_runtime/v2_0/common/nnrt_utils.h | 2 +- .../v2_0/interface/src/CompileTest.cpp | 6 +++--- .../v2_0/interface/src/ExecutorTest.cpp | 4 ++-- .../v2_0/interface/src/MemoryTest.cpp | 8 ++++---- 14 files changed, 34 insertions(+), 24 deletions(-) diff --git a/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp b/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp index 447f0ec24..ff6c8fe0c 100644 --- a/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp +++ b/ai/neural_network_runtime/v1_0/common/mock_idevice.cpp @@ -34,6 +34,11 @@ sptr INnrtDevice::Get(const std::string &serviceName, bool isStub) return mockIDevice; } +MockIDevice::MockIDevice() +{ + m_bufferFd = 0; +} + MockIDevice::~MockIDevice() { for (auto ash : m_ashmems) { diff --git a/ai/neural_network_runtime/v1_0/common/mock_idevice.h b/ai/neural_network_runtime/v1_0/common/mock_idevice.h index d2e653023..5c7968214 100644 --- a/ai/neural_network_runtime/v1_0/common/mock_idevice.h +++ b/ai/neural_network_runtime/v1_0/common/mock_idevice.h @@ -89,7 +89,7 @@ public: static MockIDevice *GetInstance(); - MockIDevice() = default; + MockIDevice(); virtual ~MockIDevice(); private: diff --git a/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp b/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp index d26905772..7d059674b 100644 --- a/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp +++ b/ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp @@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, LOGE("[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d\n", ret); return ret; } - memcpy_s(inputMemory->data, operandTem.length, (void *) operandTem.data, operandTem.length); + memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != diff --git a/ai/neural_network_runtime/v1_0/common/nnrt_utils.h b/ai/neural_network_runtime/v1_0/common/nnrt_utils.h index bed67c129..826fe48e5 100644 --- a/ai/neural_network_runtime/v1_0/common/nnrt_utils.h +++ b/ai/neural_network_runtime/v1_0/common/nnrt_utils.h @@ -67,7 +67,7 @@ struct OHNNCompileParam { bool enableFp16 = false; }; -int BuildSingleOpGraph(OH_NNModel *modelptr, const OHNNGraphArgs &args); +int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs); int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[], float* expect); diff --git a/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp index 2ef9177e2..861685d3a 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp @@ -68,7 +68,7 @@ public: } protected: - OHNNCompileParam compileParam; + OHNNCompileParam m_compileParam; AddModel addModel; OHNNGraphArgs graphArgs = addModel.graphArgs; }; @@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function | OH_NNCompilation *compilation2 = OH_NNCompilation_Construct(model2); ASSERT_NE(nullptr, compilation2); - std::thread th1(CompileModel, compilation1, compileParam); - std::thread th2(CompileModel, compilation2, compileParam); + std::thread th1(CompileModel, compilation1, m_compileParam); + std::thread th2(CompileModel, compilation2, m_compileParam); th1.join(); th2.join(); Free(model1, compilation1); diff --git a/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp index 146a49732..4ff81e23c 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp @@ -1135,7 +1135,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != @@ -1205,7 +1205,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_Run(executor)); // check result EXPECT_TRUE(CheckOutput(static_cast(const_cast(outputMemory->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, 0, &outputMemory); ASSERT_EQ(outputMemory, nullptr); diff --git a/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp b/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp index 151c329e2..4937bf06a 100644 --- a/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp +++ b/ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp @@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function | ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != graphArgs.outputIndices.end()) { @@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function | OH_NN_Memory *inputMemory = OH_NNExecutor_AllocateInputMemory(executor, inputIndex, operandTem.length); ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } } ASSERT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(executor)); @@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } @@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) avgModel.expectValue)); + static_cast(avgModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } diff --git a/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp b/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp index e4ca3ac27..aef259cac 100644 --- a/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp +++ b/ai/neural_network_runtime/v2_0/common/mock_idevice.cpp @@ -42,6 +42,11 @@ MockIDevice::~MockIDevice() } } +MockIDevice::MockIDevice() +{ + m_bufferFd = 0; +} + MockIPreparedModel::~MockIPreparedModel() { for (auto ash : m_ashmems) { diff --git a/ai/neural_network_runtime/v2_0/common/mock_idevice.h b/ai/neural_network_runtime/v2_0/common/mock_idevice.h index 8bfae9957..b1cfaa821 100644 --- a/ai/neural_network_runtime/v2_0/common/mock_idevice.h +++ b/ai/neural_network_runtime/v2_0/common/mock_idevice.h @@ -92,7 +92,7 @@ public: static MockIDevice *GetInstance(); - MockIDevice() = default; + MockIDevice(); virtual ~MockIDevice(); private: diff --git a/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp b/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp index 36be2f909..897dade5b 100644 --- a/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp +++ b/ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp @@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, LOGE("[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d\n", ret); return ret; } - memcpy_s(inputMemory->data, operandTem.length, (void *) operandTem.data, operandTem.length); + memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != diff --git a/ai/neural_network_runtime/v2_0/common/nnrt_utils.h b/ai/neural_network_runtime/v2_0/common/nnrt_utils.h index c7d51260d..e16feb4e2 100644 --- a/ai/neural_network_runtime/v2_0/common/nnrt_utils.h +++ b/ai/neural_network_runtime/v2_0/common/nnrt_utils.h @@ -67,7 +67,7 @@ struct OHNNCompileParam { bool enableFp16 = false; }; -int BuildSingleOpGraph(OH_NNModel *modelptr, const OHNNGraphArgs &args); +int BuildSingleOpGraph(OH_NNModel *model, const OHNNGraphArgs &graphArgs); int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, OH_NN_Memory *OHNNMemory[], float* expect); diff --git a/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp index dd7e3ddd2..95d27d427 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp @@ -68,7 +68,7 @@ public: } protected: - OHNNCompileParam compileParam; + OHNNCompileParam m_compileParam; AddModel addModel; OHNNGraphArgs graphArgs = addModel.graphArgs; }; @@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function | OH_NNCompilation *compilation2 = OH_NNCompilation_Construct(model2); ASSERT_NE(nullptr, compilation2); - std::thread th1(CompileModel, compilation1, compileParam); - std::thread th2(CompileModel, compilation2, compileParam); + std::thread th1(CompileModel, compilation1, m_compileParam); + std::thread th2(CompileModel, compilation2, m_compileParam); th1.join(); th2.join(); Free(model1, compilation1); diff --git a/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp index 6d6d69e03..d0def3146 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp @@ -1240,7 +1240,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); OHNNMemory[inputIndex] = inputMemory; inputIndex += 1; } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != @@ -1310,7 +1310,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function | ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_Run(executor)); // check result EXPECT_TRUE(CheckOutput(static_cast(const_cast(outputMemory->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, 0, &outputMemory); ASSERT_EQ(outputMemory, nullptr); diff --git a/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp b/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp index ae19874c5..a4b14153d 100644 --- a/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp +++ b/ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp @@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function | ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } else if (std::find(graphArgs.outputIndices.begin(), graphArgs.outputIndices.end(), i) != graphArgs.outputIndices.end()) { @@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function | OH_NN_Memory *inputMemory = OH_NNExecutor_AllocateInputMemory(executor, inputIndex, operandTem.length); ASSERT_NE(nullptr, inputMemory); ASSERT_EQ(OH_NN_SUCCESS, OH_NNExecutor_SetInputWithMemory(executor, inputIndex, &operand, inputMemory)); - ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, (void *)operandTem.data, operandTem.length)); + ASSERT_EQ(EOK, memcpy_s(inputMemory->data, operandTem.length, static_cast(operandTem.data), operandTem.length)); } } ASSERT_EQ(OH_NN_INVALID_PARAMETER, OH_NNExecutor_Run(executor)); @@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) addModel.expectValue)); + static_cast(addModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } @@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function | auto outputIndex = graphArgs.inputIndices.size() + j; // check memory output EXPECT_TRUE(CheckOutput(static_cast(const_cast(OHNNMemory[outputIndex]->data)), - (float*) avgModel.expectValue)); + static_cast(avgModel.expectValue))); OH_NNExecutor_DestroyOutputMemory(executor, j, &OHNNMemory[outputIndex]); ASSERT_EQ(OHNNMemory[outputIndex], nullptr); } -- GitLab