From 2620890a834d91e15707b536c02d61df85d2bfe1 Mon Sep 17 00:00:00 2001 From: tangshihua Date: Tue, 3 Jan 2023 17:34:29 +0800 Subject: [PATCH] add mutex lock Signed-off-by: tangshihua --- ai/neural_network_runtime/common/mock_idevice.cpp | 3 +++ ai/neural_network_runtime/common/mock_idevice.h | 2 +- ai/neural_network_runtime/common/nnrt_utils.cpp | 4 +++- ai/neural_network_runtime/stability/src/MultiThreadTest.cpp | 3 +-- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/ai/neural_network_runtime/common/mock_idevice.cpp b/ai/neural_network_runtime/common/mock_idevice.cpp index 650c77aaf..04ea181ed 100644 --- a/ai/neural_network_runtime/common/mock_idevice.cpp +++ b/ai/neural_network_runtime/common/mock_idevice.cpp @@ -147,6 +147,7 @@ int32_t MockIDevice::IsModelCacheSupported(bool& isSupported) int32_t MockIDevice::AllocateBuffer(uint32_t length, SharedBuffer &buffer) { + std::lock_guard lock(m_mtx); sptr ashptr = Ashmem::CreateAshmem("allocateBuffer", length); if (ashptr == nullptr) { LOGE("[NNRtTest] Create shared memory failed."); @@ -170,6 +171,7 @@ int32_t MockIDevice::AllocateBuffer(uint32_t length, SharedBuffer &buffer) int32_t MockIDevice::ReleaseBuffer(const SharedBuffer &buffer) { + std::lock_guard lock(m_mtx); auto ash = m_ashmems[buffer.fd]; ash->UnmapAshmem(); return HDF_SUCCESS; @@ -177,6 +179,7 @@ int32_t MockIDevice::ReleaseBuffer(const SharedBuffer &buffer) int32_t MockIDevice::MemoryCopy(float* data, uint32_t length) { + std::lock_guard lock(m_mtx); auto memManager = NeuralNetworkRuntime::MemoryManager::GetInstance(); auto memAddress = memManager->MapMemory(m_bufferFd, length); if (memAddress == nullptr) { diff --git a/ai/neural_network_runtime/common/mock_idevice.h b/ai/neural_network_runtime/common/mock_idevice.h index 3c2d8e4d1..4f5887cf2 100644 --- a/ai/neural_network_runtime/common/mock_idevice.h +++ b/ai/neural_network_runtime/common/mock_idevice.h @@ -97,7 +97,7 @@ private: bool m_priority = true; bool m_cache = true; bool m_dynamic = true; - std::vector m_operations{true}; + std::mutex m_mtx; }; class MockIPreparedModel : public IPreparedModel { diff --git a/ai/neural_network_runtime/common/nnrt_utils.cpp b/ai/neural_network_runtime/common/nnrt_utils.cpp index 8692dffc3..01f10ffed 100644 --- a/ai/neural_network_runtime/common/nnrt_utils.cpp +++ b/ai/neural_network_runtime/common/nnrt_utils.cpp @@ -222,7 +222,9 @@ int ExecuteGraphMock(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs, LOGE("[NNRtTest] OH_NNExecutor_SetOutput failed! ret=%d\n", ret); return ret; } - ret = device->MemoryCopy(expect, operandTem.length); + if(expect!=nullptr){ + ret = device->MemoryCopy(expect, operandTem.length); + } if (ret != OH_NN_SUCCESS) { LOGE("[NNRtTest] device set expect output failed! ret=%d\n", ret); return ret; diff --git a/ai/neural_network_runtime/stability/src/MultiThreadTest.cpp b/ai/neural_network_runtime/stability/src/MultiThreadTest.cpp index dee5ea9a9..a609bf446 100644 --- a/ai/neural_network_runtime/stability/src/MultiThreadTest.cpp +++ b/ai/neural_network_runtime/stability/src/MultiThreadTest.cpp @@ -48,8 +48,7 @@ void CompileModel(OH_NNCompilation *compilation, const OHNNCompileParam &compile void ExecuteModel(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs) { - float addExpectValue[4] = {0, 1, 2, 3}; - ASSERT_EQ(OH_NN_SUCCESS, ExecuteGraphMock(executor, graphArgs, addExpectValue)); + ASSERT_EQ(OH_NN_SUCCESS, ExecuteGraphMock(executor, graphArgs, nullptr)); } -- GitLab