Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
OpenHarmony
Xts Acts
提交
a22633e9
X
Xts Acts
项目概览
OpenHarmony
/
Xts Acts
1 年多 前同步成功
通知
9
Star
22
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
Xts Acts
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
a22633e9
编写于
11月 22, 2022
作者:
O
openharmony_ci
提交者:
Gitee
11月 22, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
回退 'Pull Request !6553 : AI子系统NNRt北向接口测试用例'
上级
44e50725
变更
19
隐藏空白更改
内联
并排
Showing
19 changed file
with
0 addition
and
5563 deletion
+0
-5563
ai/BUILD.gn
ai/BUILD.gn
+0
-17
ai/neural_network_runtime/BUILD.gn
ai/neural_network_runtime/BUILD.gn
+0
-20
ai/neural_network_runtime/common/const.h
ai/neural_network_runtime/common/const.h
+0
-44
ai/neural_network_runtime/common/mock_idevice.cpp
ai/neural_network_runtime/common/mock_idevice.cpp
+0
-255
ai/neural_network_runtime/common/mock_idevice.h
ai/neural_network_runtime/common/mock_idevice.h
+0
-116
ai/neural_network_runtime/common/model.h
ai/neural_network_runtime/common/model.h
+0
-111
ai/neural_network_runtime/common/nnrt_utils.cpp
ai/neural_network_runtime/common/nnrt_utils.cpp
+0
-449
ai/neural_network_runtime/common/nnrt_utils.h
ai/neural_network_runtime/common/nnrt_utils.h
+0
-99
ai/neural_network_runtime/interface/BUILD.gn
ai/neural_network_runtime/interface/BUILD.gn
+0
-55
ai/neural_network_runtime/interface/Test.json
ai/neural_network_runtime/interface/Test.json
+0
-18
ai/neural_network_runtime/interface/src/CompileTest.cpp
ai/neural_network_runtime/interface/src/CompileTest.cpp
+0
-862
ai/neural_network_runtime/interface/src/DeviceTest.cpp
ai/neural_network_runtime/interface/src/DeviceTest.cpp
+0
-199
ai/neural_network_runtime/interface/src/ExecutorTest.cpp
ai/neural_network_runtime/interface/src/ExecutorTest.cpp
+0
-1213
ai/neural_network_runtime/interface/src/MemoryTest.cpp
ai/neural_network_runtime/interface/src/MemoryTest.cpp
+0
-899
ai/neural_network_runtime/interface/src/ModelTest.cpp
ai/neural_network_runtime/interface/src/ModelTest.cpp
+0
-1003
ai/neural_network_runtime/stability/BUILD.gn
ai/neural_network_runtime/stability/BUILD.gn
+0
-51
ai/neural_network_runtime/stability/Test.json
ai/neural_network_runtime/stability/Test.json
+0
-18
ai/neural_network_runtime/stability/src/MultiThreadTest.cpp
ai/neural_network_runtime/stability/src/MultiThreadTest.cpp
+0
-133
test_packages.gni
test_packages.gni
+0
-1
未找到文件。
ai/BUILD.gn
已删除
100644 → 0
浏览文件 @
44e50725
# Copyright (c) 2021-2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
group("ai") {
testonly = true
deps = [ "neural_network_runtime:neural_network_runtime" ]
}
ai/neural_network_runtime/BUILD.gn
已删除
100644 → 0
浏览文件 @
44e50725
# Copyright (c) 2021-2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
group("neural_network_runtime") {
testonly = true
deps = [
"interface:ActsAiNnrtFunctionTest",
"stability:ActsAiNnrtStabilityTest",
]
}
ai/neural_network_runtime/common/const.h
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CONST_H
#define CONST_H
#include <string>
#include <vector>
namespace
OHOS
{
namespace
NeuralNetworkRuntime
{
namespace
Test
{
const
uint32_t
ADD_DATA_LENGTH
=
4
*
sizeof
(
float
);
const
uint32_t
AVG_INPUT_LENGTH
=
9
*
sizeof
(
float
);
const
std
::
vector
<
int32_t
>
TENSOR_SHAPE
=
{
1
,
2
,
2
,
1
};
const
std
::
vector
<
int32_t
>
PARAM_INDEX
=
{
2
};
const
std
::
vector
<
int32_t
>
INPUT_INDEX
=
{
0
,
1
};
const
std
::
vector
<
int32_t
>
OUTPUT_INDEX
=
{
3
};
const
int32_t
ELEMENT_COUNT
=
4
;
const
std
::
string
CACHE_DIR
=
"./cache"
;
const
std
::
string
CACHE_PATH
=
CACHE_DIR
+
"/0.nncache"
;
const
std
::
string
CACHE_INFO_PATH
=
CACHE_DIR
+
"/cache_info.nncache"
;
const
uint32_t
NO_DEVICE_COUNT
=
0
;
const
int
STRESS_COUNT
=
100000
;
const
int
PRINT_FREQ
=
500
;
}
// namespace Test
}
// namespace NeuralNetworkRuntime
}
// namespace OHOS
#endif // CONST_H
\ No newline at end of file
ai/neural_network_runtime/common/mock_idevice.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "const.h"
#include "mock_idevice.h"
namespace
OHOS
{
namespace
HDI
{
namespace
Nnrt
{
namespace
V1_0
{
sptr
<
INnrtDevice
>
INnrtDevice
::
Get
(
bool
isStub
)
{
return
INnrtDevice
::
Get
(
"mock_device_service"
,
isStub
);
}
sptr
<
INnrtDevice
>
INnrtDevice
::
Get
(
const
std
::
string
&
serviceName
,
bool
isStub
)
{
if
(
isStub
)
{
return
nullptr
;
}
sptr
<
INnrtDevice
>
mockIDevice
=
sptr
<
MockIDevice
>
(
MockIDevice
::
GetInstance
());
return
mockIDevice
;
}
MockIDevice
::~
MockIDevice
()
{
for
(
auto
ash
:
m_ashmems
)
{
ash
.
second
->
UnmapAshmem
();
ash
.
second
->
CloseAshmem
();
}
}
MockIDevice
*
MockIDevice
::
GetInstance
()
{
static
MockIDevice
iDevice
;
return
&
iDevice
;
}
void
MockIDevice
::
SetFP16Supported
(
bool
isSupported
)
{
m_fp16
=
isSupported
;
}
void
MockIDevice
::
SetPerformanceSupported
(
bool
isSupported
)
{
m_performance
=
isSupported
;
}
void
MockIDevice
::
SetPrioritySupported
(
bool
isSupported
)
{
m_priority
=
isSupported
;
}
void
MockIDevice
::
SetModelCacheSupported
(
bool
isSupported
)
{
m_cache
=
isSupported
;
}
void
MockIDevice
::
SetOperationsSupported
(
std
::
vector
<
bool
>
isSupported
)
{
m_operations
=
isSupported
;
}
void
MockIDevice
::
SetDynamicInputSupported
(
bool
isSupported
)
{
m_dynamic
=
isSupported
;
}
int32_t
MockIDevice
::
GetDeviceName
(
std
::
string
&
name
)
{
name
=
"Device-CPU"
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
GetVendorName
(
std
::
string
&
name
)
{
name
=
"TestVendor"
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
GetDeviceType
(
DeviceType
&
deviceType
)
{
deviceType
=
DeviceType
::
CPU
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
GetDeviceStatus
(
DeviceStatus
&
status
)
{
status
=
DeviceStatus
::
AVAILABLE
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
GetVersion
(
uint32_t
&
majorVersion
,
uint32_t
&
minorVersion
)
{
majorVersion
=
1
;
minorVersion
=
0
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
GetSupportedOperation
(
const
Model
&
model
,
std
::
vector
<
bool
>&
ops
)
{
ops
=
m_operations
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
IsFloat16PrecisionSupported
(
bool
&
isSupported
)
{
isSupported
=
m_fp16
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
IsPerformanceModeSupported
(
bool
&
isSupported
)
{
isSupported
=
m_performance
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
IsPrioritySupported
(
bool
&
isSupported
)
{
isSupported
=
m_priority
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
IsDynamicInputSupported
(
bool
&
isSupported
)
{
isSupported
=
m_dynamic
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
IsModelCacheSupported
(
bool
&
isSupported
)
{
isSupported
=
m_cache
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
AllocateBuffer
(
uint32_t
length
,
SharedBuffer
&
buffer
)
{
sptr
<
Ashmem
>
ashptr
=
Ashmem
::
CreateAshmem
(
"allocateBuffer"
,
length
);
if
(
ashptr
==
nullptr
)
{
LOGE
(
"[NNRtTest] Create shared memory failed."
);
return
HDF_FAILURE
;
}
if
(
!
ashptr
->
MapReadAndWriteAshmem
())
{
LOGE
(
"[NNRtTest] Map allocate buffer failed."
);
return
HDF_FAILURE
;
}
buffer
.
fd
=
ashptr
->
GetAshmemFd
();
buffer
.
bufferSize
=
ashptr
->
GetAshmemSize
();
buffer
.
offset
=
0
;
buffer
.
dataSize
=
length
;
m_ashmems
[
buffer
.
fd
]
=
ashptr
;
m_bufferFd
=
buffer
.
fd
;
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
ReleaseBuffer
(
const
SharedBuffer
&
buffer
)
{
auto
ash
=
m_ashmems
[
buffer
.
fd
];
ash
->
UnmapAshmem
();
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
MemoryCopy
(
float
*
data
,
uint32_t
length
)
{
auto
memManager
=
NeuralNetworkRuntime
::
MemoryManager
::
GetInstance
();
auto
memAddress
=
memManager
->
MapMemory
(
m_bufferFd
,
length
);
if
(
memAddress
==
nullptr
)
{
LOGE
(
"[NNRtTest] Map fd to address failed."
);
return
HDF_FAILURE
;
}
auto
ret
=
memcpy_s
(
memAddress
,
length
,
data
,
length
);
if
(
ret
!=
EOK
)
{
LOGE
(
"[NNRtTest] MockIDevice memory cop failed."
);
return
HDF_FAILURE
;
}
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
PrepareModel
(
const
Model
&
model
,
const
ModelConfig
&
config
,
sptr
<
IPreparedModel
>&
preparedModel
)
{
preparedModel
=
new
(
std
::
nothrow
)
V1_0
::
MockIPreparedModel
();
return
HDF_SUCCESS
;
}
int32_t
MockIDevice
::
PrepareModelFromModelCache
(
const
std
::
vector
<
SharedBuffer
>&
modelCache
,
const
ModelConfig
&
config
,
sptr
<
IPreparedModel
>&
preparedModel
)
{
preparedModel
=
new
(
std
::
nothrow
)
V1_0
::
MockIPreparedModel
();
return
HDF_SUCCESS
;
}
int32_t
MockIPreparedModel
::
ExportModelCache
(
std
::
vector
<
SharedBuffer
>&
modelCache
)
{
if
(
!
modelCache
.
empty
())
{
LOGE
(
"[NNRtTest] The parameters of ExportModelCache should be an empty vector."
);
return
HDF_ERR_INVALID_PARAM
;
}
uint8_t
buffer
[
4
]
=
{
0
,
1
,
2
,
3
};
uint32_t
size
=
sizeof
(
buffer
);
sptr
<
Ashmem
>
cache
=
Ashmem
::
CreateAshmem
(
"cache"
,
size
);
if
(
cache
==
nullptr
)
{
LOGE
(
"[NNRtTest] Create shared memory failed."
);
return
HDF_ERR_MALLOC_FAIL
;
}
bool
ret
=
cache
->
MapReadAndWriteAshmem
();
if
(
!
ret
)
{
LOGE
(
"[NNRtTest] Map fd to write cache failed."
);
return
HDF_FAILURE
;
}
ret
=
cache
->
WriteToAshmem
(
buffer
,
size
,
0
);
cache
->
UnmapAshmem
();
if
(
!
ret
)
{
LOGE
(
"[NNRtTest] Write cache failed."
);
return
HDF_FAILURE
;
}
// SharedBuffer: fd, bufferSize, offset, dataSize
modelCache
.
emplace_back
(
SharedBuffer
{
cache
->
GetAshmemFd
(),
cache
->
GetAshmemSize
(),
0
,
cache
->
GetAshmemSize
()});
return
HDF_SUCCESS
;
}
int32_t
MockIPreparedModel
::
GetVersion
(
uint32_t
&
majorVersion
,
uint32_t
&
minorVersion
)
{
majorVersion
=
1
;
minorVersion
=
0
;
return
HDF_SUCCESS
;
}
int32_t
MockIPreparedModel
::
Run
(
const
std
::
vector
<
IOTensor
>&
inputs
,
const
std
::
vector
<
IOTensor
>&
outputs
,
std
::
vector
<
std
::
vector
<
int32_t
>>&
outputsDims
,
std
::
vector
<
bool
>&
isOutputBufferEnough
)
{
outputsDims
=
{{
1
,
2
,
2
,
1
}};
isOutputBufferEnough
=
{
true
};
return
HDF_SUCCESS
;
}
}
// namespace V1_0
}
// namespace Nnrt
}
// namespace HDI
}
// namespace OHOS
ai/neural_network_runtime/common/mock_idevice.h
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MOCK_IDEVICE_H
#define MOCK_IDEVICE_H
#include <iostream>
#include <hdi_support.h>
#include <string_ex.h>
#include <hdf_base.h>
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "mindir_lite_graph.h"
#include "mindir.h"
#include "securec.h"
#include "refbase.h"
#include "common/log.h"
#include "frameworks/native/hdi_interfaces.h"
#include "frameworks/native/memory_manager.h"
#include "ashmem.h"
namespace
OHOS
{
namespace
HDI
{
namespace
Nnrt
{
namespace
V1_0
{
class
MockIDevice
:
public
INnrtDevice
{
public:
int32_t
GetSupportedOperation
(
const
Model
&
model
,
std
::
vector
<
bool
>&
ops
)
override
;
int32_t
IsFloat16PrecisionSupported
(
bool
&
isSupported
)
override
;
int32_t
IsPerformanceModeSupported
(
bool
&
isSupported
)
override
;
int32_t
IsPrioritySupported
(
bool
&
isSupported
)
override
;
int32_t
IsDynamicInputSupported
(
bool
&
isSupported
)
override
;
int32_t
IsModelCacheSupported
(
bool
&
isSupported
)
override
;
int32_t
AllocateBuffer
(
uint32_t
length
,
SharedBuffer
&
buffer
)
override
;
int32_t
ReleaseBuffer
(
const
SharedBuffer
&
buffer
)
override
;
int32_t
GetDeviceName
(
std
::
string
&
name
)
override
;
int32_t
GetVendorName
(
std
::
string
&
name
)
override
;
int32_t
GetDeviceType
(
DeviceType
&
deviceType
)
override
;
int32_t
GetDeviceStatus
(
DeviceStatus
&
status
)
override
;
int32_t
GetVersion
(
uint32_t
&
majorVersion
,
uint32_t
&
minorVersion
)
override
;
int32_t
PrepareModel
(
const
Model
&
model
,
const
ModelConfig
&
config
,
sptr
<
IPreparedModel
>&
preparedModel
)
override
;
int32_t
PrepareModelFromModelCache
(
const
std
::
vector
<
SharedBuffer
>&
modelCache
,
const
ModelConfig
&
config
,
sptr
<
IPreparedModel
>&
preparedModel
)
override
;
int32_t
MemoryCopy
(
float
*
data
,
uint32_t
length
);
void
SetFP16Supported
(
bool
isSupported
);
void
SetPerformanceSupported
(
bool
isSupported
);
void
SetPrioritySupported
(
bool
isSupported
);
void
SetModelCacheSupported
(
bool
isSupported
);
void
SetOperationsSupported
(
std
::
vector
<
bool
>
isSupported
);
void
SetDynamicInputSupported
(
bool
isSupported
);
static
MockIDevice
*
GetInstance
();
MockIDevice
()
=
default
;
virtual
~
MockIDevice
();
private:
std
::
unordered_map
<
int
,
sptr
<
Ashmem
>>
m_ashmems
;
int
m_bufferFd
;
bool
m_fp16
=
true
;
bool
m_performance
=
true
;
bool
m_priority
=
true
;
bool
m_cache
=
true
;
bool
m_dynamic
=
true
;
std
::
vector
<
bool
>
m_operations
{
true
};
};
class
MockIPreparedModel
:
public
IPreparedModel
{
public:
int32_t
ExportModelCache
(
std
::
vector
<
SharedBuffer
>&
modelCache
)
override
;
int32_t
Run
(
const
std
::
vector
<
IOTensor
>&
inputs
,
const
std
::
vector
<
IOTensor
>&
outputs
,
std
::
vector
<
std
::
vector
<
int32_t
>>&
outputsDims
,
std
::
vector
<
bool
>&
isOutputBufferEnough
)
override
;
int32_t
GetVersion
(
uint32_t
&
majorVersion
,
uint32_t
&
minorVersion
)
override
;
MockIPreparedModel
()
=
default
;
};
}
// namespace V1_0
}
// namespace Nnrt
}
// namespace HDI
}
// namespace OHOS
#endif // MOCK_IDEVICE_H
ai/neural_network_runtime/common/model.h
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MODEL_H
#define MODEL_H
#include "const.h"
#include "nnrt_utils.h"
namespace
OHOS
{
namespace
NeuralNetworkRuntime
{
namespace
Test
{
struct
AddModel
{
// ADD MODEL
float
inputValue0
[
4
]
=
{
0
,
1
,
2
,
3
};
float
inputValue1
[
4
]
=
{
0
,
1
,
2
,
3
};
int8_t
activationValue
=
OH_NN_FUSED_NONE
;
float
outputValue
[
4
]
=
{
0
};
float
expectValue
[
4
]
=
{
0
,
2
,
4
,
6
};
OHNNOperandTest
input0
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
TENSOR_SHAPE
,
inputValue0
,
ADD_DATA_LENGTH
};
OHNNOperandTest
input1
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
TENSOR_SHAPE
,
inputValue1
,
ADD_DATA_LENGTH
};
OHNNOperandTest
activation
=
{
OH_NN_INT8
,
OH_NN_ADD_ACTIVATIONTYPE
,
{},
&
activationValue
,
sizeof
(
int8_t
)};
OHNNOperandTest
output
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
TENSOR_SHAPE
,
outputValue
,
ADD_DATA_LENGTH
};
OHNNGraphArgs
graphArgs
=
{.
operationType
=
OH_NN_OPS_ADD
,
.
operands
=
{
input0
,
input1
,
activation
,
output
},
.
paramIndices
=
{
2
},
.
inputIndices
=
{
0
,
1
},
.
outputIndices
=
{
3
}};
};
struct
AvgPoolDynamicModel
{
// AVG POOL MODEL
float
inputValue
[
9
]
=
{
0
,
1
,
2
,
3
,
4
,
5
,
6
,
7
,
8
};
uint64_t
kernelValue
[
2
]
=
{
2
,
2
};
uint64_t
strideValue
[
2
]
=
{
1
,
1
};
int8_t
padValue
=
1
;
int8_t
activationValue
=
OH_NN_FUSED_NONE
;
float
outputValue
[
4
]
=
{
0
};
float
expectValue
[
4
]
=
{
2
,
3
,
5
,
6
};
OHNNOperandTest
dynamicInput
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
{
-
1
,
-
1
,
-
1
,
-
1
},
inputValue
,
AVG_INPUT_LENGTH
};
OHNNOperandTest
kernel
=
{
OH_NN_INT64
,
OH_NN_AVG_POOL_KERNEL_SIZE
,
{
2
},
kernelValue
,
sizeof
(
kernelValue
)};
OHNNOperandTest
strides
=
{
OH_NN_INT64
,
OH_NN_AVG_POOL_STRIDE
,
{
2
},
strideValue
,
sizeof
(
strideValue
)};
OHNNOperandTest
padMode
=
{
OH_NN_INT8
,
OH_NN_AVG_POOL_PAD_MODE
,
{},
&
padValue
,
sizeof
(
padValue
)};
OHNNOperandTest
activation
=
{
OH_NN_INT8
,
OH_NN_AVG_POOL_ACTIVATION_TYPE
,
{},
&
activationValue
,
sizeof
(
int8_t
)};
OHNNOperandTest
output
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
{
-
1
,
-
1
,
-
1
,
-
1
},
outputValue
,
sizeof
(
outputValue
)};
OHNNGraphArgs
graphArgs
=
{.
operationType
=
OH_NN_OPS_AVG_POOL
,
.
operands
=
{
dynamicInput
,
kernel
,
strides
,
padMode
,
activation
,
output
},
.
paramIndices
=
{
1
,
2
,
3
,
4
},
.
inputIndices
=
{
0
},
.
outputIndices
=
{
5
}};
};
struct
TopKModel
{
// TopK Model
float
valueX
[
6
]
=
{
0
,
1
,
2
,
3
,
4
,
5
};
int8_t
valueK
=
2
;
bool
valueSorted
=
true
;
float
valueOutput1
[
2
];
int32_t
valueOutput2
[
2
];
OHNNOperandTest
x
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
{
1
,
6
},
valueX
,
6
*
sizeof
(
float
)};
OHNNOperandTest
k
=
{
OH_NN_INT8
,
OH_NN_TENSOR
,
{},
&
valueK
,
sizeof
(
int8_t
)};
OHNNOperandTest
sorted
=
{
OH_NN_BOOL
,
OH_NN_TOP_K_SORTED
,
{},
&
valueSorted
,
sizeof
(
bool
)};
OHNNOperandTest
output1
=
{
OH_NN_FLOAT32
,
OH_NN_TENSOR
,
{
1
,
2
},
valueOutput1
,
2
*
sizeof
(
float
)};
OHNNOperandTest
output2
=
{
OH_NN_INT32
,
OH_NN_TENSOR
,
{
1
,
2
},
valueOutput2
,
2
*
sizeof
(
int32_t
)};
OHNNGraphArgs
graphArgs
=
{.
operationType
=
OH_NN_OPS_TOP_K
,
.
operands
=
{
x
,
k
,
sorted
,
output1
,
output2
},
.
paramIndices
=
{
2
},
.
inputIndices
=
{
0
,
1
},
.
outputIndices
=
{
3
,
4
}};
};
class
AddTopKModel
{
// Build two ops Model
private:
AddModel
addModel
;
TopKModel
topKModel
;
public:
OHNNGraphArgsMulti
graphArgs
=
{
.
operationTypes
=
{
OH_NN_OPS_ADD
,
OH_NN_OPS_TOP_K
},
.
operands
=
{{
addModel
.
input0
,
addModel
.
input1
,
addModel
.
activation
,
addModel
.
output
},
{
topKModel
.
k
,
topKModel
.
sorted
,
topKModel
.
output1
,
topKModel
.
output2
}},
.
paramIndices
=
{{
2
},
{
5
}},
.
inputIndices
=
{{
0
,
1
},
{
3
,
4
}},
.
outputIndices
=
{{
3
},
{
6
,
7
}},
.
graphInput
=
{
0
,
1
,
4
},
.
graphOutput
=
{
6
,
7
}};
};
}
// namespace Test
}
// namespace NeuralNetworkRuntime
}
// namespace OHOS
#endif // MODEL_H
\ No newline at end of file
ai/neural_network_runtime/common/nnrt_utils.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "nnrt_utils.h"
#include "const.h"
#include <fstream>
namespace
OHOS
{
namespace
NeuralNetworkRuntime
{
namespace
Test
{
OH_NN_UInt32Array
TransformUInt32Array
(
const
std
::
vector
<
uint32_t
>&
vector
)
{
uint32_t
*
data
=
(
vector
.
empty
())
?
nullptr
:
const_cast
<
uint32_t
*>
(
vector
.
data
());
return
{
data
,
vector
.
size
()};
}
int
BuildMultiOpGraph
(
OH_NNModel
*
model
,
const
OHNNGraphArgsMulti
&
graphArgs
)
{
int
ret
=
0
;
int
opCnt
=
0
;
for
(
int
j
=
0
;
j
<
graphArgs
.
operationTypes
.
size
();
j
++
)
{
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
[
j
].
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
j
][
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ret
=
OH_NNModel_AddTensor
(
model
,
&
operand
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_AddTensor failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
if
(
std
::
find
(
graphArgs
.
paramIndices
[
j
].
begin
(),
graphArgs
.
paramIndices
[
j
].
end
(),
opCnt
)
!=
graphArgs
.
paramIndices
[
j
].
end
())
{
ret
=
OH_NNModel_SetTensorData
(
model
,
opCnt
,
operandTem
.
data
,
operandTem
.
length
);
}
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
opCnt
+=
1
;
}
auto
paramIndices
=
TransformUInt32Array
(
graphArgs
.
paramIndices
[
j
]);
auto
inputIndices
=
TransformUInt32Array
(
graphArgs
.
inputIndices
[
j
]);
auto
outputIndices
=
TransformUInt32Array
(
graphArgs
.
outputIndices
[
j
]);
ret
=
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationTypes
[
j
],
&
paramIndices
,
&
inputIndices
,
&
outputIndices
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_AddOperation failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
auto
graphInputs
=
TransformUInt32Array
(
graphArgs
.
graphInput
);
auto
graphOutputs
=
TransformUInt32Array
(
graphArgs
.
graphOutput
);
ret
=
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
graphInputs
,
&
graphOutputs
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
ret
=
OH_NNModel_Finish
(
model
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_Finish failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
return
ret
;
}
int
BuildSingleOpGraph
(
OH_NNModel
*
model
,
const
OHNNGraphArgs
&
graphArgs
)
{
int
ret
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ret
=
OH_NNModel_AddTensor
(
model
,
&
operand
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_AddTensor failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
if
(
std
::
find
(
graphArgs
.
paramIndices
.
begin
(),
graphArgs
.
paramIndices
.
end
(),
i
)
!=
graphArgs
.
paramIndices
.
end
())
{
ret
=
OH_NNModel_SetTensorData
(
model
,
i
,
operandTem
.
data
,
operandTem
.
length
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_SetTensorData failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
}
auto
paramIndices
=
TransformUInt32Array
(
graphArgs
.
paramIndices
);
auto
inputIndices
=
TransformUInt32Array
(
graphArgs
.
inputIndices
);
auto
outputIndices
=
TransformUInt32Array
(
graphArgs
.
outputIndices
);
if
(
graphArgs
.
addOperation
)
{
ret
=
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_AddOperation failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
if
(
graphArgs
.
specifyIO
)
{
ret
=
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_SpecifyInputsAndOutputs failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
if
(
graphArgs
.
build
)
{
ret
=
OH_NNModel_Finish
(
model
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNModel_Finish failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
return
ret
;
}
int
SetDevice
(
OH_NNCompilation
*
compilation
)
{
int
ret
=
0
;
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ret
=
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNDevice_GetAllDevicesID failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
if
(
devicesCount
<=
NO_DEVICE_COUNT
)
{
return
OH_NN_FAILED
;
}
size_t
targetDevice
=
devicesID
[
0
];
// Use the first device in system test.
ret
=
OH_NNCompilation_SetDevice
(
compilation
,
targetDevice
);
return
ret
;
}
int
CompileGraphMock
(
OH_NNCompilation
*
compilation
,
const
OHNNCompileParam
&
compileParam
)
{
int
ret
=
0
;
ret
=
SetDevice
(
compilation
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNCompilation_SetDevice failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
// set cache
if
(
!
compileParam
.
cacheDir
.
empty
())
{
ret
=
OH_NNCompilation_SetCache
(
compilation
,
compileParam
.
cacheDir
.
c_str
(),
compileParam
.
cacheVersion
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNCompilation_SetCache failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
// set performance
if
(
compileParam
.
performanceMode
!=
OH_NN_PERFORMANCE_NONE
)
{
ret
=
OH_NNCompilation_SetPerformanceMode
(
compilation
,
compileParam
.
performanceMode
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNCompilation_SetPerformanceMode failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
// set priority
if
(
compileParam
.
priority
!=
OH_NN_PRIORITY_NONE
)
{
ret
=
OH_NNCompilation_SetPriority
(
compilation
,
compileParam
.
priority
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNCompilation_SetPriority failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
// enable fp16
if
(
compileParam
.
enableFp16
)
{
ret
=
OH_NNCompilation_EnableFloat16
(
compilation
,
compileParam
.
enableFp16
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNCompilation_EnableFloat16 failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
}
// build
ret
=
OH_NNCompilation_Build
(
compilation
);
return
ret
;
}
int
ExecuteGraphMock
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
float
*
expect
)
{
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
int
ret
=
0
;
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ret
=
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNExecutor_SetInput failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ret
=
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNExecutor_SetOutput failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
ret
=
device
->
MemoryCopy
(
expect
,
operandTem
.
length
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] device set expect output failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
outputIndex
+=
1
;
}
}
ret
=
OH_NNExecutor_Run
(
executor
);
return
ret
;
}
int
ExecutorWithMemory
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
OH_NN_Memory
*
OHNNMemory
[],
float
*
expect
)
{
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
int
ret
=
0
;
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ret
=
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
);
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
OH_NN_Memory
*
outputMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
outputIndex
,
operandTem
.
length
);
ret
=
OH_NNExecutor_SetOutputWithMemory
(
executor
,
outputIndex
,
outputMemory
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] OH_NNExecutor_SetOutputWithMemory failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
ret
=
device
->
MemoryCopy
(
expect
,
operandTem
.
length
);
if
(
ret
!=
OH_NN_SUCCESS
)
{
LOGE
(
"[NNRtTest] device set expect output failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
OHNNMemory
[
inputIndex
+
outputIndex
]
=
outputMemory
;
outputIndex
+=
1
;
}
}
ret
=
OH_NNExecutor_Run
(
executor
);
return
ret
;
}
void
Free
(
OH_NNModel
*
model
,
OH_NNCompilation
*
compilation
,
OH_NNExecutor
*
executor
)
{
if
(
model
!=
nullptr
)
{
OH_NNModel_Destroy
(
&
model
);
ASSERT_EQ
(
nullptr
,
model
);
}
if
(
compilation
!=
nullptr
)
{
OH_NNCompilation_Destroy
(
&
compilation
);
ASSERT_EQ
(
nullptr
,
compilation
);
}
if
(
executor
!=
nullptr
)
{
OH_NNExecutor_Destroy
(
&
executor
);
ASSERT_EQ
(
nullptr
,
executor
);
}
}
PathType
CheckPath
(
const
std
::
string
&
path
)
{
if
(
path
.
empty
())
{
LOGI
(
"CheckPath: path is null"
);
return
PathType
::
NOT_FOUND
;
}
struct
stat
buf
{};
if
(
stat
(
path
.
c_str
(),
&
buf
)
==
0
)
{
if
(
buf
.
st_mode
&
S_IFDIR
)
{
return
PathType
::
DIR
;
}
else
if
(
buf
.
st_mode
&
S_IFREG
)
{
return
PathType
::
FILE
;
}
else
{
return
PathType
::
UNKNOWN
;
}
}
LOGI
(
"%s not found"
,
path
.
c_str
());
return
PathType
::
NOT_FOUND
;
}
bool
DeleteFile
(
const
std
::
string
&
path
)
{
if
(
path
.
empty
())
{
LOGI
(
"DeleteFile: path is null"
);
return
false
;
}
if
(
CheckPath
(
path
)
==
PathType
::
NOT_FOUND
)
{
LOGI
(
"not found: %s"
,
path
.
c_str
());
return
true
;
}
if
(
remove
(
path
.
c_str
())
==
0
)
{
LOGI
(
"deleted: %s"
,
path
.
c_str
());
return
true
;
}
LOGI
(
"delete failed: %s"
,
path
.
c_str
());
return
false
;
}
void
CopyFile
(
const
std
::
string
&
srcPath
,
const
std
::
string
&
dstPath
)
{
std
::
ifstream
src
(
srcPath
,
std
::
ios
::
binary
);
std
::
ofstream
dst
(
dstPath
,
std
::
ios
::
binary
);
dst
<<
src
.
rdbuf
();
}
std
::
string
ConcatPath
(
const
std
::
string
&
str1
,
const
std
::
string
&
str2
)
{
// boundary
if
(
str2
.
empty
())
{
return
str1
;
}
if
(
str1
.
empty
())
{
return
str2
;
}
// concat
char
end
=
str1
[
str1
.
size
()
-
1
];
if
(
end
==
'\\'
or
end
==
'/'
)
{
return
str1
+
str2
;
}
else
{
return
str1
+
'/'
+
str2
;
}
}
void
DeleteFolder
(
const
std
::
string
&
path
)
{
if
(
path
.
empty
())
{
LOGI
(
"DeletePath: path is null"
);
return
;
}
DIR
*
dir
=
opendir
(
path
.
c_str
());
// check is dir ?
if
(
dir
==
nullptr
)
{
LOGE
(
"[NNRtTest] Can not open dir. Check path or permission! path: %s"
,
path
.
c_str
());
return
;
}
struct
dirent
*
file
;
// read all the files in dir
std
::
vector
<
std
::
string
>
pathList
;
while
((
file
=
readdir
(
dir
))
!=
nullptr
)
{
// skip "." and ".."
if
(
strcmp
(
file
->
d_name
,
"."
)
==
0
||
strcmp
(
file
->
d_name
,
".."
)
==
0
)
{
continue
;
}
if
(
file
->
d_type
==
DT_DIR
)
{
std
::
string
filePath
=
path
+
"/"
+
file
->
d_name
;
DeleteFolder
(
filePath
);
// 递归执行
}
else
{
pathList
.
emplace_back
(
ConcatPath
(
path
,
file
->
d_name
));
}
}
closedir
(
dir
);
pathList
.
emplace_back
(
path
);
LOGI
(
"[Common] Delete folder %s"
,
path
.
c_str
());
for
(
auto
&
i
:
pathList
)
{
DeleteFile
(
i
);
}
}
bool
CreateFolder
(
const
std
::
string
&
path
)
{
if
(
path
.
empty
())
{
LOGI
(
"CreateFolder: path is empty"
);
return
false
;
}
LOGI
(
"CreateFolder:%s"
,
path
.
c_str
());
mode_t
mode
=
0700
;
for
(
int
i
=
1
;
i
<
path
.
size
()
-
1
;
i
++
)
{
if
(
path
[
i
]
!=
'/'
)
{
continue
;
}
PathType
ret
=
CheckPath
(
path
.
substr
(
0
,
i
));
switch
(
ret
)
{
case
PathType
::
DIR
:
continue
;
case
PathType
::
NOT_FOUND
:
LOGI
(
"mkdir: %s"
,
path
.
substr
(
0
,
i
).
c_str
());
mkdir
(
path
.
substr
(
0
,
i
).
c_str
(),
mode
);
break
;
default:
LOGI
(
"error: %s"
,
path
.
substr
(
0
,
i
).
c_str
());
return
false
;
}
}
mkdir
(
path
.
c_str
(),
mode
);
return
CheckPath
(
path
)
==
PathType
::
DIR
;
}
bool
CheckOutput
(
const
float
*
output
,
const
float
*
expect
)
{
if
(
output
==
nullptr
||
expect
==
nullptr
)
{
LOGE
(
"[NNRtTest] output or expect is nullptr
\n
"
);
return
false
;
}
for
(
int
i
=
0
;
i
<
ELEMENT_COUNT
;
i
++
)
{
if
(
std
::
abs
(
float
(
output
[
i
])
-
float
(
expect
[
i
]))
>
1e-8
)
{
for
(
int
j
=
0
;
j
<
ELEMENT_COUNT
;
j
++
)
{
LOGE
(
"[NNRtTest] output %d not match: expect:%f, actual:%f
\n
"
,
j
,
float
(
expect
[
j
]),
float
(
output
[
j
]));
}
return
false
;
}
}
return
true
;
}
}
// namespace Test
}
// namespace NeuralNetworkRuntime
}
// namespace OHOS
ai/neural_network_runtime/common/nnrt_utils.h
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NNRT_UTILS_H
#define NNRT_UTILS_H
#include <dirent.h>
#include <sys/stat.h>
#include <gtest/gtest.h>
#include "interfaces/kits/c/neural_network_runtime.h"
#include "common/log.h"
#include "mock_idevice.h"
#include "const.h"
namespace
OHOS
{
namespace
NeuralNetworkRuntime
{
namespace
Test
{
namespace
V1_0
=
OHOS
::
HDI
::
Nnrt
::
V1_0
;
struct
OHNNOperandTest
{
OH_NN_DataType
dataType
;
OH_NN_TensorType
type
;
std
::
vector
<
int32_t
>
shape
;
void
*
data
{
nullptr
};
int32_t
length
{
0
};
const
OH_NN_QuantParam
*
quantParam
=
nullptr
;
};
struct
OHNNGraphArgs
{
OH_NN_OperationType
operationType
;
std
::
vector
<
OHNNOperandTest
>
operands
;
std
::
vector
<
uint32_t
>
paramIndices
;
std
::
vector
<
uint32_t
>
inputIndices
;
std
::
vector
<
uint32_t
>
outputIndices
;
bool
build
=
true
;
bool
specifyIO
=
true
;
bool
addOperation
=
true
;
};
struct
OHNNGraphArgsMulti
{
std
::
vector
<
OH_NN_OperationType
>
operationTypes
;
std
::
vector
<
std
::
vector
<
OHNNOperandTest
>>
operands
;
std
::
vector
<
std
::
vector
<
uint32_t
>>
paramIndices
;
std
::
vector
<
std
::
vector
<
uint32_t
>>
inputIndices
;
std
::
vector
<
std
::
vector
<
uint32_t
>>
outputIndices
;
std
::
vector
<
uint32_t
>
graphInput
;
std
::
vector
<
uint32_t
>
graphOutput
;
};
struct
OHNNCompileParam
{
int32_t
deviceId
=
0
;
std
::
string
cacheDir
;
uint32_t
cacheVersion
=
0
;
OH_NN_PerformanceMode
performanceMode
=
OH_NN_PERFORMANCE_NONE
;
OH_NN_Priority
priority
=
OH_NN_PRIORITY_NONE
;
bool
enableFp16
=
false
;
};
int
BuildSingleOpGraph
(
OH_NNModel
*
modelptr
,
const
OHNNGraphArgs
&
args
);
int
ExecutorWithMemory
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
OH_NN_Memory
*
OHNNMemory
[],
float
*
expect
);
void
Free
(
OH_NNModel
*
model
=
nullptr
,
OH_NNCompilation
*
compilation
=
nullptr
,
OH_NNExecutor
*
executor
=
nullptr
);
int
CompileGraphMock
(
OH_NNCompilation
*
compilation
,
const
OHNNCompileParam
&
compileParam
);
int
ExecuteGraphMock
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
float
*
expect
);
int
SetDevice
(
OH_NNCompilation
*
compilation
);
int
BuildMultiOpGraph
(
OH_NNModel
*
model
,
const
OHNNGraphArgsMulti
&
graphArgs
);
OH_NN_UInt32Array
GetUInt32Array
(
std
::
vector
<
uint32_t
>
indices
);
bool
CheckOutput
(
const
float
*
output
,
const
float
*
expect
);
enum
class
PathType
{
FILE
,
DIR
,
UNKNOWN
,
NOT_FOUND
};
PathType
CheckPath
(
const
std
::
string
&
path
);
bool
DeleteFile
(
const
std
::
string
&
path
);
void
CopyFile
(
const
std
::
string
&
srcPath
,
const
std
::
string
&
dstPath
);
std
::
string
ConcatPath
(
const
std
::
string
&
str1
,
const
std
::
string
&
str2
);
void
DeleteFolder
(
const
std
::
string
&
path
);
bool
CreateFolder
(
const
std
::
string
&
path
);
}
// namespace Test
}
// namespace NeuralNetworkRuntime
}
// namespace OHOS
#endif // NNRT_UTILS_H
\ No newline at end of file
ai/neural_network_runtime/interface/BUILD.gn
已删除
100644 → 0
浏览文件 @
44e50725
# Copyright (c) 2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//test/xts/tools/build/suite.gni")
ohos_moduletest_suite("ActsAiNnrtFunctionTest") {
testonly = true
module_out_path = "acts/nnrt"
sources = [
"../common/mock_idevice.cpp",
"../common/nnrt_utils.cpp",
"src/CompileTest.cpp",
"src/DeviceTest.cpp",
"src/ExecutorTest.cpp",
"src/MemoryTest.cpp",
"src/ModelTest.cpp",
]
include_dirs = [
"../common",
"//foundation/ai/neural_network_runtime",
"//foundation/ai/neural_network_runtime/third_party/include",
"//third_party/googletest/googletest/include",
"//third_party/googletest/googlemock/include",
"//third_party/mindspore/mindspore/lite/mindir/include",
]
deps = [
"//foundation/ai/neural_network_runtime/frameworks:libneural_network_runtime",
"//third_party/googletest:gmock",
"//third_party/googletest:gtest",
]
external_deps = [
"c_utils:utils",
"drivers_interface_nnrt:libnnrt_proxy_1.0",
"hdf_core:libhdf_utils",
"hdf_core:libhdi",
"hilog_native:libhilog",
"hitrace_native:libhitracechain",
"ipc:ipc_single",
"mindspore:mindir",
]
cflags = [ "-Wno-error" ]
}
ai/neural_network_runtime/interface/Test.json
已删除
100644 → 0
浏览文件 @
44e50725
{
"kits"
:
[
{
"push"
:
[
"ActsAiNnrtFunctionTest->/data/local/tmp/ActsAiNnrtFunctionTest"
],
"type"
:
"PushKit"
}
],
"driver"
:
{
"native-test-timeout"
:
"120000"
,
"type"
:
"CppTest"
,
"module-name"
:
"ActsAiNnrtFunctionTest"
,
"runtime-hint"
:
"1s"
,
"native-test-device-path"
:
"/data/local/tmp"
},
"description"
:
"Configuration for ActsAiNnrtFunctionTest Tests"
}
\ No newline at end of file
ai/neural_network_runtime/interface/src/CompileTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include <thread>
#include <fstream>
#include "nnrt_utils.h"
#include "model.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
using
namespace
OHOS
::
HDI
::
Nnrt
::
V1_0
;
namespace
{
class
CompileTest
:
public
testing
::
Test
{
public:
void
SetUp
()
{
CreateFolder
(
CACHE_DIR
);
}
void
TearDown
()
{
DeleteFolder
(
CACHE_DIR
);
}
void
GenCacheFile
()
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
10
,
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
ASSERT_TRUE
(
CheckPath
(
CACHE_PATH
)
==
PathType
::
FILE
);
ASSERT_TRUE
(
CheckPath
(
CACHE_INFO_PATH
)
==
PathType
::
FILE
);
}
void
DestroyCache
()
{
std
::
ifstream
ifs
(
CACHE_PATH
.
c_str
(),
std
::
ios
::
in
|
std
::
ios
::
binary
);
char
*
ptr
{
nullptr
};
int
cacheSize
=
ifs
.
tellg
();
int
invalidCacheSize
=
cacheSize
*
0.9
;
ifs
.
read
(
ptr
,
cacheSize
);
ifs
.
close
();
std
::
ofstream
ofs
(
CACHE_PATH
.
c_str
(),
std
::
ios
::
out
|
std
::
ios
::
binary
);
ofs
.
write
(
ptr
,
invalidCacheSize
);
ofs
.
close
();
}
protected:
OHNNCompileParam
compileParam
;
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
};
void
CompileModel
(
OH_NNCompilation
*
compilation
,
const
OHNNCompileParam
&
compileParam
)
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
}
}
// namespace
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Create_0100
* @tc.name : 创建编译实例,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Create_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
nullptr
);
ASSERT_EQ
(
nullptr
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Create_0200
* @tc.name : 创建编译实例,model未完成构图
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Create_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_EQ
(
nullptr
,
compilation
);
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Create_0300
* @tc.name : 创建编译实例,model已完成构图,存在算子不支持
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Create_0300
,
Function
|
MediumTest
|
Level2
)
{
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
std
::
vector
<
bool
>
isSupported
=
{
true
,
false
};
device
->
SetOperationsSupported
(
isSupported
);
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
AddTopKModel
addTopKModel
;
OHNNGraphArgsMulti
graphArgsMulti
=
addTopKModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildMultiOpGraph
(
model
,
graphArgsMulti
));
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
realSupported
{
nullptr
};
uint32_t
opCount
;
uint32_t
devicesCount
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
realSupported
,
&
opCount
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ret
);
for
(
int
i
=
0
;
i
<
opCount
;
i
++
)
{
EXPECT_EQ
(
realSupported
[
i
],
isSupported
[
i
]);
}
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_FAILED
,
OH_NNCompilation_SetDevice
(
compilation
,
targetDevice
));
Free
(
model
,
compilation
);
device
->
SetOperationsSupported
({
true
});
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetDevice_0100
* @tc.name : 设置device,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetDevice_0100
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
// Use the first device in system test.
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetDevice
(
nullptr
,
targetDevice
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetDevice_0200
* @tc.name : 设置device,deviceID不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetDevice_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetDevice
(
compilation
,
100000
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetDevice_0300
* @tc.name : 设置device,deviceID存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetDevice_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0100
* @tc.name : 设置cache路径及版本,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetCache
(
nullptr
,
"./"
,
0
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0200
* @tc.name : 设置cache路径及版本,cacheDir为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetCache
(
compilation
,
nullptr
,
0
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0300
* @tc.name : device不支持,设置cache路径及版本
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
// set model cache unavailabel
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
device
->
SetModelCacheSupported
(
false
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_SetCache
(
compilation
,
"./cache"
,
10
));
Free
(
model
,
compilation
);
device
->
SetModelCacheSupported
(
true
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0400
* @tc.name : 设置不存在cache路径
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{.
cacheDir
=
"./test"
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0500
* @tc.name : 设置cache路径,cache不完整
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0500
,
Function
|
MediumTest
|
Level2
)
{
// generate cache file in cache diretory
GenCacheFile
();
// destroy cache file to invalid size
DestroyCache
();
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
10
,
};
ASSERT_EQ
(
OH_NN_INVALID_FILE
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0600
* @tc.name : 设置version,小于cache版本号
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0600
,
Function
|
MediumTest
|
Level2
)
{
GenCacheFile
();
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
9
,
};
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0700
* @tc.name : 设置version,等于cache版本号
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0700
,
Function
|
MediumTest
|
Level2
)
{
GenCacheFile
();
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
10
,
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetCache_0800
* @tc.name : 设置version,大于cache版本号
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetCache_0800
,
Function
|
MediumTest
|
Level2
)
{
GenCacheFile
();
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
11
,
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0100
* @tc.name : 设置priority,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPerformanceMode
(
nullptr
,
OH_NN_PERFORMANCE_MEDIUM
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0200
* @tc.name : device不支持,设置performance
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_Mock_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
performanceMode
=
OH_NN_PERFORMANCE_LOW
,
};
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
device
->
SetPerformanceSupported
(
false
);
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
device
->
SetPerformanceSupported
(
true
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0300
* @tc.name : 设置performanceMode为NONE
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0300
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
OH_NN_PERFORMANCE_NONE
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0400
* @tc.name : 设置performanceMode为LOW
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
OH_NN_PERFORMANCE_LOW
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0500
* @tc.name : 设置performanceMode为MEDIUM
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
OH_NN_PERFORMANCE_MEDIUM
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0600
* @tc.name : 设置performanceMode为HIGH
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0600
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
OH_NN_PERFORMANCE_HIGH
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0700
* @tc.name : 设置performanceMode为EXTREME
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0700
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
OH_NN_PERFORMANCE_EXTREME
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0800
* @tc.name : 设置performanceMode为NONE-1
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0800
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
static_cast
<
OH_NN_PerformanceMode
>
(
OH_NN_PERFORMANCE_NONE
-
1
)));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0900
* @tc.name : 设置performanceMode为EXTREME+1
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPerformanceMode_0900
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPerformanceMode
(
compilation
,
static_cast
<
OH_NN_PerformanceMode
>
(
OH_NN_PERFORMANCE_EXTREME
+
1
)));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0100
* @tc.name : 设置priority,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPriority
(
nullptr
,
OH_NN_PRIORITY_MEDIUM
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0200
* @tc.name : device不支持,设置priority
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
// set device not supported
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
device
->
SetPrioritySupported
(
false
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_SetPriority
(
compilation
,
OH_NN_PRIORITY_NONE
));
Free
(
model
,
compilation
);
device
->
SetPrioritySupported
(
true
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0300
* @tc.name : 设置priority为NONE
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0300
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPriority
(
compilation
,
OH_NN_PRIORITY_NONE
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0400
* @tc.name : 设置priority为LOW
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPriority
(
compilation
,
OH_NN_PRIORITY_LOW
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0500
* @tc.name : 设置priority为MEDIUM
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPriority
(
compilation
,
OH_NN_PRIORITY_MEDIUM
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0600
* @tc.name : 设置priority为LOW
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0600
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetPriority
(
compilation
,
OH_NN_PRIORITY_HIGH
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0700
* @tc.name : 设置priority为NONE-1
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0700
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPriority
(
compilation
,
static_cast
<
OH_NN_Priority
>
(
OH_NN_PRIORITY_NONE
-
1
)));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_SetPriority_0800
* @tc.name : 设置priority为HIGH+1
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_SetPriority_0800
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_SetPriority
(
compilation
,
static_cast
<
OH_NN_Priority
>
(
OH_NN_PRIORITY_HIGH
+
1
)));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0100
* @tc.name : 设置enableFloat16,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_EnableFloat16
(
nullptr
,
OH_NN_PERFORMANCE_MEDIUM
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0200
* @tc.name : device支持,设置fp16推理为false
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_EnableFloat16
(
compilation
,
false
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0300
* @tc.name : device不支持,设置fp16推理为false
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0300
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
// set fp16 unavailable
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
device
->
SetFP16Supported
(
false
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_EnableFloat16
(
compilation
,
false
));
Free
(
model
,
compilation
);
device
->
SetFP16Supported
(
true
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0400
* @tc.name : device不支持,设置fp16推理为true
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_EnableFloat16_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
// set fp16 unavailable
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
device
->
SetFP16Supported
(
false
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_EnableFloat16
(
compilation
,
true
));
Free
(
model
,
compilation
);
device
->
SetFP16Supported
(
true
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Build_0100
* @tc.name : 编译模型,compilation为空指针
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Build_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNCompilation_Build
(
nullptr
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Build_0200
* @tc.name : 编译模型,未设置device
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Build_0200
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_Build
(
compilation
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Build_0300
* @tc.name : 编译模型,仅设置device,默认配置测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Build_0300
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Build_0400
* @tc.name : 设置缓存路径及版本,编译模型导出缓存
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Build_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
{
.
cacheDir
=
"./cache"
,
.
cacheVersion
=
10
,
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Destroy_0100
* @tc.name : 释放编译实例,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Destroy_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNCompilation
*
compilation
=
nullptr
;
OH_NNCompilation_Destroy
(
&
compilation
);
ASSERT_EQ
(
nullptr
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Destroy_0200
* @tc.name : 释放编译实例,未调用模型编译
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Destroy_0200
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
SetDevice
(
compilation
));
OH_NNCompilation_Destroy
(
&
compilation
);
ASSERT_EQ
(
nullptr
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Destroy_0300
* @tc.name : 模型已编译,释放编译实例
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Destroy_0300
,
Function
|
MediumTest
|
Level0
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNR_Func_North_Compilation_Combine_0100
* @tc.name : 多线程并发模型编译,编译成功
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNR_Func_North_Compilation_Combine_0100
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model1
,
graphArgs
));
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model2
,
graphArgs
));
OH_NNCompilation
*
compilation1
=
OH_NNCompilation_Construct
(
model1
);
ASSERT_NE
(
nullptr
,
compilation1
);
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
compileParam
);
th1
.
join
();
th2
.
join
();
Free
(
model1
,
compilation1
);
Free
(
model2
,
compilation2
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Compilation_Combine_0200
* @tc.name : 已编译模型,重复编译
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
CompileTest
,
SUB_AI_NNRt_Func_North_Compilation_Combine_0200
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
OHNNCompileParam
compileParam
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNCompilation_Build
(
compilation
));
Free
(
model
,
compilation
);
}
\ No newline at end of file
ai/neural_network_runtime/interface/src/DeviceTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include "nnrt_utils.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
class
DeviceTest
:
public
testing
::
Test
{};
/**
* @tc.number : SUB_AI_NNRtt_Func_North_Device_DeviceID_0100
* @tc.name : 获取设备ID,*allDevicesID为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceID_0100
,
Function
|
MediumTest
|
Level3
)
{
uint32_t
count
{
0
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetAllDevicesID
(
nullptr
,
&
count
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceID_0200
* @tc.name : 获取设备ID,**allDevicesID非nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceID_0200
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
allDeviceIds
=
0
;
const
size_t
*
pAllDeviceIds
=
&
allDeviceIds
;
uint32_t
count
{
0
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetAllDevicesID
(
&
pAllDeviceIds
,
&
count
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceID_0300
* @tc.name : 获取设备ID,获取设备ID,deviceCount为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceID_0300
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
allDeviceIds
=
nullptr
;
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetAllDevicesID
(
&
allDeviceIds
,
nullptr
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceID_0400
* @tc.name : 获取设备ID,设备数量校验
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceID_0400
,
Function
|
MediumTest
|
Level2
)
{
const
size_t
*
allDeviceIds
=
nullptr
;
uint32_t
count
{
0
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetAllDevicesID
(
&
allDeviceIds
,
&
count
);
EXPECT_EQ
(
OH_NN_SUCCESS
,
ret
);
uint32_t
expectCount
=
1
;
EXPECT_EQ
(
expectCount
,
count
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceName_0100
* @tc.name : 获取硬件名称,deviceID不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceName_0100
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
deviceID
{
100000
};
const
char
*
name
=
nullptr
;
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetName
(
deviceID
,
&
name
);
EXPECT_EQ
(
OH_NN_FAILED
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceName_0200
* @tc.name : 获取硬件名称,*name为nullprt
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceName_0200
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetName
(
targetDevice
,
nullptr
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceName_0300
* @tc.name : 获取硬件名称,**name非nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceName_0300
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
const
char
*
name
=
"name"
;
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetName
(
targetDevice
,
&
name
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceName_0400
* @tc.name : 获取硬件名称, 结果校验
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceName_0400
,
Function
|
MediumTest
|
Level1
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
const
char
*
name
=
nullptr
;
std
::
string
m_deviceName
{
"RK3568-CPU_Rockchip"
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetName
(
targetDevice
,
&
name
);
EXPECT_EQ
(
OH_NN_SUCCESS
,
ret
);
std
::
string
sName
(
name
);
EXPECT_EQ
(
m_deviceName
,
sName
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceType_0100
* @tc.name : 获取硬件类别,deviceType为nullprt
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceType_0100
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetType
(
targetDevice
,
nullptr
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceType_0200
* @tc.name : 获取硬件类别,deviceID不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceType_0200
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
deviceID
{
100000
};
OH_NN_DeviceType
type
{
OH_NN_OTHERS
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetType
(
deviceID
,
&
type
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Device_DeviceType_0300
* @tc.name :获取硬件类别,结果校验
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
DeviceTest
,
SUB_AI_NNRt_Func_North_Device_DeviceType_0300
,
Function
|
MediumTest
|
Level1
)
{
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_DeviceType
type
{
OH_NN_OTHERS
};
OH_NN_ReturnCode
ret
=
OH_NNDevice_GetType
(
targetDevice
,
&
type
);
EXPECT_EQ
(
OH_NN_SUCCESS
,
ret
);
}
\ No newline at end of file
ai/neural_network_runtime/interface/src/ExecutorTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include <thread>
#include "nnrt_utils.h"
#include "model.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
using
namespace
OHOS
::
HDI
::
Nnrt
::
V1_0
;
namespace
{
class
ExecutorTest
:
public
testing
::
Test
{
protected:
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
;
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
OHNNCompileParam
compileParam
;
};
void
ExecuteModel
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
float
*
expect
)
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
expect
));
}
}
// namespace
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Create_0100
* @tc.name : 创建执行实例,compilation为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Create_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
nullptr
);
ASSERT_EQ
(
nullptr
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Create_0200
* @tc.name : 创建执行实例,compilation未完成编译
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Create_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
const
size_t
*
devicesID
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNCompilation_SetDevice
(
compilation
,
targetDevice
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_EQ
(
nullptr
,
executor
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0100
* @tc.name : 设置输入,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
nullptr
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0200
* @tc.name : 设置输入,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
100000
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0300
* @tc.name : 设置输入,operand参数不一致
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
OH_NN_ADD_ACTIVATIONTYPE
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0400
* @tc.name : 设置输入,operand形状改变
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
int32_t
dimensions
[
3
]{
3
,
3
,
3
};
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
dimensions
,
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0500
* @tc.name : 设置输入,buffer为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
nullptr
,
operandTem
.
length
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0600
* @tc.name : 设置输入,length小于输入长度
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0600
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
0
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetInput_0700
* @tc.name : 设置输入,重复设置同一inputIndex
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetInput_0700
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetOutput_0100
* @tc.name : 设置输出,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetOutput_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutput
(
nullptr
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetOutput_0200
* @tc.name : 设置输出,outputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetOutput_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
10000
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetOutput_0300
* @tc.name : 设置输出,buffer为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetOutput_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
nullptr
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetOutput_0400
* @tc.name : 设置输出,length小于输出长度
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetOutput_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
0
));
outputIndex
+=
1
;
}
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_SetOutput_0500
* @tc.name : 设置输出,重复设置同一outputIndex
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_SetOutput_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0100
* @tc.name : 模型推理,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
nullptr
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0200
* @tc.name : 模型推理,executor未设置输入
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0300
* @tc.name : 模型推理,executor未设置输出
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0400
* @tc.name : 模型推理,executor设置输入个数不足
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
const
OHNNOperandTest
&
operandOut
=
graphArgs
.
operands
[
3
];
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandOut
.
data
,
operandOut
.
length
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0500
* @tc.name : 模型推理,executor设置输出个数不足
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
TopKModel
topKModel
;
graphArgs
=
topKModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
graphArgs
.
outputIndices
=
{
3
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0600
* @tc.name : 定长模型推理测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0600
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Run_0700
* @tc.name : 变长模型推理测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Run_0700
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
AvgPoolDynamicModel
avgModel
;
graphArgs
=
avgModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
avgModel
.
dynamicInput
.
shape
=
{
1
,
3
,
3
,
1
};
avgModel
.
output
.
shape
=
{
1
,
2
,
2
,
1
};
graphArgs
.
operands
=
{
avgModel
.
dynamicInput
,
avgModel
.
kernel
,
avgModel
.
strides
,
avgModel
.
padMode
,
avgModel
.
activation
,
avgModel
.
output
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
avgModel
.
expectValue
));
// check result
EXPECT_TRUE
(
CheckOutput
(
avgModel
.
outputValue
,
avgModel
.
expectValue
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0100
* @tc.name : 获取输出维度,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_GetOutputShape
(
nullptr
,
addOutputIndex
,
&
outputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0200
* @tc.name : 获取输出维度,outputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
10000
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
outputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0300
* @tc.name : 获取输出维度,*dimensions为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
nullptr
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0400
* @tc.name : 获取输出维度,**dimensions非nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
int32_t
outputDimensions
{
2
};
int32_t
*
pOutputDimensions
=
&
outputDimensions
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
pOutputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0500
* @tc.name : 获取输出维度,*dimensionCount为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
outputDimensions
,
nullptr
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0600
* @tc.name : 未调用推理接口,获取输出维度
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0600
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
int
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
outputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0700
* @tc.name : 模型推理成功,获取输出维度
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0700
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
outputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0800
* @tc.name : 变长模型推理成功,获取输出维度
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_GetOutputDimensions_0800
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
AvgPoolDynamicModel
avgModel
;
graphArgs
=
avgModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
avgModel
.
dynamicInput
.
shape
=
{
1
,
3
,
3
,
1
};
avgModel
.
output
.
shape
=
{
1
,
2
,
2
,
1
};
graphArgs
.
operands
=
{
avgModel
.
dynamicInput
,
avgModel
.
kernel
,
avgModel
.
strides
,
avgModel
.
padMode
,
avgModel
.
activation
,
avgModel
.
output
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
avgModel
.
expectValue
));
// check result
EXPECT_TRUE
(
CheckOutput
(
avgModel
.
outputValue
,
avgModel
.
expectValue
));
int32_t
*
outputDimensions
=
nullptr
;
uint32_t
outputDimensionCount
{
0
};
uint32_t
addOutputIndex
=
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_GetOutputShape
(
executor
,
addOutputIndex
,
&
outputDimensions
,
&
outputDimensionCount
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Destroy_0100
* @tc.name : 销毁执行器实例,*executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Destroy_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNExecutor
*
executor
=
nullptr
;
ASSERT_NO_THROW
(
OH_NNExecutor_Destroy
(
&
executor
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Destroy_0200
* @tc.name : 销毁执行器实例,executor释放
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Destroy_0200
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addModel
.
expectValue
));
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
OH_NNExecutor_Destroy
(
&
executor
);
ASSERT_EQ
(
nullptr
,
executor
);
Free
(
model
,
compilation
);
}
/**
* @tc.number : SUB_AI_NNR_Func_North_Executor_Combine_0100
* @tc.name : 并发模型推理,推理成功
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNR_Func_North_Executor_Combine_0100
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model1
,
graphArgs
));
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model2
,
graphArgs
));
OH_NNCompilation
*
compilation1
=
OH_NNCompilation_Construct
(
model1
);
ASSERT_NE
(
nullptr
,
compilation1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation1
,
compileParam
));
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation2
,
compileParam
));
OH_NNExecutor
*
executor1
=
OH_NNExecutor_Construct
(
compilation1
);
ASSERT_NE
(
nullptr
,
executor1
);
OH_NNExecutor
*
executor2
=
OH_NNExecutor_Construct
(
compilation2
);
ASSERT_NE
(
nullptr
,
executor2
);
std
::
thread
th1
(
ExecuteModel
,
executor1
,
graphArgs
,
addModel
.
expectValue
);
std
::
thread
th2
(
ExecuteModel
,
executor2
,
graphArgs
,
addModel
.
expectValue
);
th1
.
join
();
th2
.
join
();
Free
(
model1
,
compilation1
,
executor1
);
Free
(
model2
,
compilation2
,
executor2
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Combine_0200
* @tc.name : 多次设置输入,仅首次成功,模型推理
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Combine_0200
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
float
valueX2
[
4
]
=
{
3
,
2
,
1
,
0
};
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInput
(
executor
,
3
,
&
operand
,
valueX2
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
ASSERT_EQ
(
OH_NN_SUCCESS
,
device
->
MemoryCopy
(
addModel
.
expectValue
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Combine_0300
* @tc.name : 多次设置输出,仅首次生效,模型推理
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Combine_0300
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
+
10
,
operandTem
.
data
,
operandTem
.
length
));
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
ASSERT_EQ
(
OH_NN_SUCCESS
,
device
->
MemoryCopy
(
addModel
.
expectValue
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Combine_0400
* @tc.name : 模型推理,共享输入非共享输出
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Combine_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
[
graphArgs
.
inputIndices
.
size
()];
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutput
(
executor
,
outputIndex
,
operandTem
.
data
,
operandTem
.
length
));
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
ASSERT_EQ
(
OH_NN_SUCCESS
,
device
->
MemoryCopy
(
addModel
.
expectValue
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
addModel
.
outputValue
,
addModel
.
expectValue
));
for
(
auto
i
=
0
;
i
<
graphArgs
.
inputIndices
.
size
();
i
++
)
{
OH_NNExecutor_DestroyInputMemory
(
executor
,
i
,
&
OHNNMemory
[
i
]);
ASSERT_EQ
(
OHNNMemory
[
i
],
nullptr
);
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Combine_0500
* @tc.name : 模型推理,非共享输入共享输出
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ExecutorTest
,
SUB_AI_NNRt_Func_North_Executor_Combine_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
OH_NN_Memory
*
outputMemory
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInput
(
executor
,
inputIndex
,
&
operand
,
operandTem
.
data
,
operandTem
.
length
));
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
outputMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
outputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
outputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
outputIndex
,
outputMemory
));
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
ASSERT_EQ
(
OH_NN_SUCCESS
,
device
->
MemoryCopy
(
addModel
.
expectValue
,
operandTem
.
length
));
outputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
outputMemory
->
data
)),
(
float
*
)
addModel
.
expectValue
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
&
outputMemory
);
ASSERT_EQ
(
outputMemory
,
nullptr
);
Free
(
model
,
compilation
,
executor
);
}
\ No newline at end of file
ai/neural_network_runtime/interface/src/MemoryTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include <thread>
#include <cstdlib>
#include "nnrt_utils.h"
#include "model.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
using
namespace
OHOS
::
HDI
::
Nnrt
::
V1_0
;
namespace
{
class
MemoryTest
:
public
testing
::
Test
{
protected:
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
OHNNCompileParam
compileParam
;
};
void
CheckCreateInputMemory
(
OH_NNExecutor
*
executor
,
uint32_t
inputIndex
,
size_t
length
)
{
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
}
void
CheckCreateOutputMemory
(
OH_NNExecutor
*
executor
,
uint32_t
outputIndex
,
size_t
length
)
{
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
outputIndex
,
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
}
}
// namespace
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0100
* @tc.name : 创建输入共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
nullptr
,
0
,
4
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0200
* @tc.name : 创建输入共享内存,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
2
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0300
* @tc.name : 创建输入共享内存,length为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
0
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0400
* @tc.name :创建输入共享内存,length为最大限制2G
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
1024
*
1024
*
1024
+
1
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0500
* @tc.name : 创建输入共享内存,inputIndex重复创建
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0600
* @tc.name : 多线程创建不同index输入的共享内存
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateInputMemory_0600
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
std
::
thread
th1
(
CheckCreateInputMemory
,
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
std
::
thread
th2
(
CheckCreateInputMemory
,
executor
,
1
,
graphArgs
.
operands
[
1
].
length
);
th1
.
join
();
th2
.
join
();
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0100
* @tc.name : 创建输出共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
nullptr
,
0
,
4
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0200
* @tc.name : 创建输出共享内存,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
2
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0300
* @tc.name : 创建输出共享内存,length为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
0
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0400
* @tc.name :创建输出共享内存,length为最大限制2G
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
1024
*
1024
*
1024
+
1
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0500
* @tc.name : 创建输出共享内存,outputIndex重复创建
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0500
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0600
* @tc.name : 多线程创建不同index输出的共享内存
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_CreateOutputMemory_0600
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
TopKModel
topKModel
;
graphArgs
=
topKModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OHNNCompileParam
compileParam
;
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
std
::
thread
th1
(
CheckCreateOutputMemory
,
executor
,
0
,
graphArgs
.
operands
[
3
].
length
);
std
::
thread
th2
(
CheckCreateOutputMemory
,
executor
,
1
,
graphArgs
.
operands
[
4
].
length
);
th1
.
join
();
th2
.
join
();
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0100
* @tc.name : 销毁输入共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyInputMemory
(
nullptr
,
0
,
&
OHNNMemory
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0200
* @tc.name : 销毁输入共享内存,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyInputMemory
(
executor
,
1
,
&
OHNNMemory
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0300
* @tc.name : 销毁输出共享内存,*memory为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
nullptr
;
ASSERT_NO_THROW
(
OH_NNExecutor_DestroyInputMemory
(
executor
,
0
,
&
OHNNMemory
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0400
* @tc.name : 销毁输出共享内存,inputIndex不同memory重复销毁
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
1
,
graphArgs
.
operands
[
1
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
OH_NNExecutor_DestroyInputMemory
(
executor
,
0
,
&
OHNNMemory
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyInputMemory
(
executor
,
1
,
&
OHNNMemory2
);
ASSERT_EQ
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0500
* @tc.name : 多线销毁不同index输入的共享内存
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyInputMemory_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
1
,
graphArgs
.
operands
[
1
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
std
::
thread
th1
(
OH_NNExecutor_DestroyInputMemory
,
executor
,
0
,
&
OHNNMemory
);
std
::
thread
th2
(
OH_NNExecutor_DestroyInputMemory
,
executor
,
1
,
&
OHNNMemory2
);
th1
.
join
();
th2
.
join
();
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0100
* @tc.name : 销毁输出共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyOutputMemory
(
nullptr
,
0
,
&
OHNNMemory
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0200
* @tc.name : 销毁输出共享内存,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyOutputMemory
(
executor
,
1
,
&
OHNNMemory
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0300
* @tc.name : 销毁输出共享内存,*memory为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
ASSERT_NO_THROW
(
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
nullptr
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0400
* @tc.name : 销毁输出共享内存,inputIndex不同memory重复销毁
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
&
OHNNMemory
);
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
&
OHNNMemory2
);
ASSERT_EQ
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0500
* @tc.name : 多线销毁不同index输出的共享内存
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_DestroyOutputMemory_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
TopKModel
topKModel
;
graphArgs
=
topKModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
OH_NN_Memory
*
OHNNMemory2
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
1
,
graphArgs
.
operands
[
1
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory2
);
std
::
thread
th1
(
OH_NNExecutor_DestroyOutputMemory
,
executor
,
0
,
&
OHNNMemory
);
std
::
thread
th2
(
OH_NNExecutor_DestroyOutputMemory
,
executor
,
1
,
&
OHNNMemory2
);
th1
.
join
();
th2
.
join
();
ASSERT_EQ
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
nullptr
,
OHNNMemory2
);
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0100
* @tc.name : 设置输入共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInputWithMemory
(
nullptr
,
0
,
&
operand
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0200
* @tc.name : 设置输入共享内存,inputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
2
,
&
operand
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0300
* @tc.name : 设置输入共享内存,operand为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
0
,
nullptr
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0400
* @tc.name : 设置输入共享内存,operand与输入不匹配
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory1
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory1
);
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
2
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
0
,
&
operand
,
OHNNMemory1
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0500
* @tc.name : 设置输入共享内存,memory为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
0
,
&
operand
,
nullptr
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0600
* @tc.name : 设置输入共享内存,重复设置相同inputIndex
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetInputFromMemory_0600
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
0
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
0
,
&
operand
,
OHNNMemory
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
0
,
&
operand
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0100
* @tc.name : 设置输出共享内存,executor为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutputWithMemory
(
nullptr
,
0
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0200
* @tc.name : 设置输出共享内存,outputIndex不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
1
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0300
* @tc.name : 设置输出共享内存,memory为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
0
,
nullptr
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0400
* @tc.name : 设置输出共享内存,重复设置相同outputIndex
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_SetOutputFromMemory_0400
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
OH_NN_Memory
*
OHNNMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
0
,
graphArgs
.
operands
[
0
].
length
);
ASSERT_NE
(
nullptr
,
OHNNMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
0
,
OHNNMemory
));
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
0
,
OHNNMemory
));
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100
* @tc.name : 共享内存模型推理,executor设置输入个数不足
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
uint32_t
outputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
i
==
0
)
{
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
OH_NN_Memory
*
outputMemory
=
OH_NNExecutor_AllocateOutputMemory
(
executor
,
outputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
outputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetOutputWithMemory
(
executor
,
outputIndex
,
outputMemory
));
outputIndex
+=
1
;
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200
* @tc.name : 共享内存模型推理,executor设置输出个数不足
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
uint32_t
inputIndex
=
0
;
for
(
auto
i
=
0
;
i
<
graphArgs
.
operands
.
size
();
i
++
)
{
const
OHNNOperandTest
&
operandTem
=
graphArgs
.
operands
[
i
];
auto
quantParam
=
operandTem
.
quantParam
;
OH_NN_Tensor
operand
=
{
operandTem
.
dataType
,
(
uint32_t
)
operandTem
.
shape
.
size
(),
operandTem
.
shape
.
data
(),
quantParam
,
operandTem
.
type
};
if
(
std
::
find
(
graphArgs
.
inputIndices
.
begin
(),
graphArgs
.
inputIndices
.
end
(),
i
)
!=
graphArgs
.
inputIndices
.
end
())
{
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300
* @tc.name : 共享内存,定长模型推理测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
size_t
ioSize
=
graphArgs
.
inputIndices
.
size
()
+
graphArgs
.
outputIndices
.
size
();
OH_NN_Memory
*
OHNNMemory
[
ioSize
];
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecutorWithMemory
(
executor
,
graphArgs
,
OHNNMemory
,
addModel
.
expectValue
));
for
(
auto
i
=
0
;
i
<
graphArgs
.
inputIndices
.
size
();
i
++
)
{
OH_NNExecutor_DestroyInputMemory
(
executor
,
i
,
&
OHNNMemory
[
i
]);
ASSERT_EQ
(
OHNNMemory
[
i
],
nullptr
);
}
for
(
auto
j
=
0
;
j
<
graphArgs
.
outputIndices
.
size
();
j
++
)
{
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
addModel
.
expectValue
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
Free
(
model
,
compilation
,
executor
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400
* @tc.name : 共享内存,变长模型推理测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MemoryTest
,
SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
AvgPoolDynamicModel
avgModel
;
graphArgs
=
avgModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNCompilation
*
compilation
=
OH_NNCompilation_Construct
(
model
);
ASSERT_NE
(
nullptr
,
compilation
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
OH_NNExecutor
*
executor
=
OH_NNExecutor_Construct
(
compilation
);
ASSERT_NE
(
nullptr
,
executor
);
avgModel
.
dynamicInput
.
shape
=
{
1
,
3
,
3
,
1
};
avgModel
.
output
.
shape
=
{
1
,
2
,
2
,
1
};
graphArgs
.
operands
=
{
avgModel
.
dynamicInput
,
avgModel
.
kernel
,
avgModel
.
strides
,
avgModel
.
padMode
,
avgModel
.
activation
,
avgModel
.
output
};
size_t
ioSize
=
graphArgs
.
inputIndices
.
size
()
+
graphArgs
.
outputIndices
.
size
();
OH_NN_Memory
*
OHNNMemory
[
ioSize
];
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecutorWithMemory
(
executor
,
graphArgs
,
OHNNMemory
,
avgModel
.
expectValue
));
for
(
auto
i
=
0
;
i
<
graphArgs
.
inputIndices
.
size
();
i
++
)
{
OH_NNExecutor_DestroyInputMemory
(
executor
,
i
,
&
OHNNMemory
[
i
]);
ASSERT_EQ
(
OHNNMemory
[
i
],
nullptr
);
}
for
(
auto
j
=
0
;
j
<
graphArgs
.
outputIndices
.
size
();
j
++
)
{
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
avgModel
.
expectValue
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
Free
(
model
,
compilation
,
executor
);
}
\ No newline at end of file
ai/neural_network_runtime/interface/src/ModelTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include <thread>
#include "nnrt_utils.h"
#include "model.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
using
namespace
OHOS
::
HDI
::
Nnrt
::
V1_0
;
namespace
{
class
ModelTest
:
public
testing
::
Test
{
protected:
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
OHNNCompileParam
compileParam
;
};
void
BuildAddTopKGraph
(
OH_NNModel
*
model
)
{
AddTopKModel
addTopKModel
;
OHNNGraphArgsMulti
graphArgsMulti
=
addTopKModel
.
graphArgs
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildMultiOpGraph
(
model
,
graphArgsMulti
));
}
void
BuildModel
(
OH_NNModel
*
model
,
const
OHNNGraphArgs
&
graphArgs
)
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
}
}
// namespace
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_CreateModel_0100
* @tc.name : 创建模型实例,指针校验
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_CreateModel_0100
,
Function
|
MediumTest
|
Level0
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_CreateModel_0200
* @tc.name : 创建多个模型实例,指针校验
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_CreateModel_0200
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model_first
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model_first
);
OH_NNModel
*
model_second
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model_second
);
OH_NNModel
*
model_third
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model_third
);
ASSERT_NE
(
model_first
,
model_second
);
ASSERT_NE
(
model_first
,
model_third
);
ASSERT_NE
(
model_second
,
model_third
);
Free
(
model_first
);
Free
(
model_second
);
Free
(
model_third
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperand_0100
* @tc.name : 添加操作数值,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperand_0100
,
Function
|
MediumTest
|
Level3
)
{
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
OH_NN_ReturnCode
ret
=
OH_NNModel_AddTensor
(
nullptr
,
&
operand
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperand_0200
* @tc.name : 添加操作数,operand为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperand_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
OH_NN_ReturnCode
ret
=
OH_NNModel_AddTensor
(
model
,
nullptr
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperand_0300
* @tc.name : 添加操作数,operand中dataType为100000
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperand_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
static_cast
<
OH_NN_DataType
>
(
100000
),
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
OH_NN_ReturnCode
ret
=
OH_NNModel_AddTensor
(
model
,
&
operand
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperand_0400
* @tc.name : 添加操作数,operand中type为100000
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperand_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
static_cast
<
OH_NN_TensorType
>
(
100000
)};
OH_NN_ReturnCode
ret
=
OH_NNModel_AddTensor
(
model
,
&
operand
);
EXPECT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SetOperandValue_0100
* @tc.name : 设置操作数值,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SetOperandValue_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int8_t
activationValue
{
0
};
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNModel_AddTensor
(
model
,
&
operand
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SetTensorData
(
nullptr
,
1
,
(
void
*
)
&
activationValue
,
sizeof
(
int8_t
)));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SetOperandValue_0200
* @tc.name : 设置操作数值,操作数不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SetOperandValue_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int8_t
activationValue
{
0
};
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNModel_AddTensor
(
model
,
&
operand
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SetTensorData
(
model
,
1000
,
(
void
*
)
&
activationValue
,
sizeof
(
int8_t
)));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SetOperandValue_0300
* @tc.name : 设置操作数值,buffer为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SetOperandValue_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNModel_AddTensor
(
model
,
&
operand
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SetTensorData
(
model
,
1
,
nullptr
,
sizeof
(
int8_t
)));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SetOperandValue_0400
* @tc.name : 设置操作数值,length为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SetOperandValue_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
int8_t
activationValue
{
0
};
int32_t
dimensions
[
3
]{
3
,
2
,
2
};
OH_NN_Tensor
operand
{
OH_NN_FLOAT32
,
3
,
dimensions
,
nullptr
,
OH_NN_TENSOR
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNModel_AddTensor
(
model
,
&
operand
));
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SetTensorData
(
model
,
1
,
(
void
*
)
&
activationValue
,
0
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0100
* @tc.name : 添加算子,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
nullptr
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0200
* @tc.name : 添加算子,paramIndices为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
nullptr
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0300
* @tc.name : 添加算子,paramIndices中data为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
nullptr
,
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0400
* @tc.name : 添加算子,paramIndices中data对应序号不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
uint32_t
paramIndicesValue
{
10
};
OH_NN_UInt32Array
paramIndices
{
&
paramIndicesValue
,
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0500
* @tc.name : 添加算子,paramIndices中size为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
0
};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0600
* @tc.name : 添加算子,inputIndices为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0600
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
nullptr
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0700
* @tc.name : 添加算子,inputIndices中data为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0700
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
nullptr
,
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0800
* @tc.name : 添加算子,inputIndices中data对应序号不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0800
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
uint32_t
inputIndicesValue
{
10
};
OH_NN_UInt32Array
inputIndices
{
&
inputIndicesValue
,
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_0900
* @tc.name : 添加算子,inputIndices中size为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_0900
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
0
};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_1000
* @tc.name : 添加算子,outputIndices为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_1000
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
nullptr
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_1100
* @tc.name : 添加算子,outputIndices中data为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_1100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
nullptr
,
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_1200
* @tc.name : 添加算子,outputIndices中data对应序号不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_1200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
uint32_t
outputIndicesValue
{
10
};
OH_NN_UInt32Array
outputIndices
{
&
outputIndicesValue
,
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_AddOperation_1300
* @tc.name : 添加算子,outputIndices中size为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_AddOperation_1300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
graphArgs
.
addOperation
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
paramIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
paramIndices
.
data
()),
graphArgs
.
paramIndices
.
size
()};
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_AddOperation
(
model
,
graphArgs
.
operationType
,
&
paramIndices
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0100
* @tc.name : 设置输入输出,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
nullptr
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0200
* @tc.name : 设置输入输出,inputIndices为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
nullptr
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0300
* @tc.name : 设置输入输出,inputIndices中data为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
nullptr
,
2
};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0400
* @tc.name : 设置输入输出,inputIndices中data对应序号不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
uint32_t
modelInputIndicesValue
{
5
};
OH_NN_UInt32Array
inputIndices
{
&
modelInputIndicesValue
,
1
};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0500
* @tc.name : 设置输入输出,inputIndices中size为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
0
};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
graphArgs
.
outputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0600
* @tc.name : 设置输入输出,outputIndices为空指针
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0600
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
nullptr
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0700
* @tc.name : 设置输入输出,outputIndices中data为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0700
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
nullptr
,
1
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0800
* @tc.name : 设置输入输出,outputIndices中data对应序号不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0800
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
uint32_t
modelOutputIndicesValue
{
5
};
OH_NN_UInt32Array
outputIndices
{
&
modelOutputIndicesValue
,
1
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0900
* @tc.name : 设置输入输出,outputIndices中size为0
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_SpecifyInputsAndOutputs_0900
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NN_UInt32Array
inputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
inputIndices
.
data
()),
graphArgs
.
inputIndices
.
size
()};
OH_NN_UInt32Array
outputIndices
{
const_cast
<
uint32_t
*>
(
graphArgs
.
outputIndices
.
data
()),
0
};
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_SpecifyInputsAndOutputs
(
model
,
&
inputIndices
,
&
outputIndices
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Finish_0100
* @tc.name : 模型构图,model为空指针
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Finish_0100
,
Function
|
MediumTest
|
Level3
)
{
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_Finish
(
nullptr
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Finish_0200
* @tc.name : 模型构图,未添加操作数
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Finish_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNModel_Finish
(
model
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Finish_0300
* @tc.name : 模型构图,未设置输入输出
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Finish_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
specifyIO
=
false
;
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
BuildSingleOpGraph
(
model
,
graphArgs
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Finish_0400
* @tc.name : 模型构图,设置输入输出,构图成功
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Finish_0400
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Destroy_0100
* @tc.name : 释放模型,model为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Destroy_0100
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
nullptr
;
ASSERT_NO_THROW
(
OH_NNModel_Destroy
(
&
model
));
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_Destroy_0200
* @tc.name : 释放模型,model未构图
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_Destroy_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
OH_NNModel_Destroy
(
&
model
);
ASSERT_EQ
(
nullptr
,
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0100
* @tc.name : 查询算子支持,model为空指针
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0100
,
Function
|
MediumTest
|
Level3
)
{
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
isSupported
{
nullptr
};
uint32_t
opCount
{
0
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNModel_GetAvailableOperations
(
nullptr
,
targetDevice
,
&
isSupported
,
&
opCount
);
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
ret
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0200
* @tc.name : 查询算子支持,deviceID不存在
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0200
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
size_t
targetDevice
{
100000
};
const
bool
*
isSupported
{
nullptr
};
uint32_t
opCount
{
0
};
ASSERT_EQ
(
OH_NN_FAILED
,
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
isSupported
,
&
opCount
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0300
* @tc.name : 查询算子支持,*isSupported为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0300
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
const
size_t
*
devicesID
{
nullptr
};
uint32_t
opCount
{
0
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
nullptr
,
&
opCount
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0400
* @tc.name : 查询算子支持,**isSupported非nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0400
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
const
size_t
*
devicesID
{
nullptr
};
const
bool
isSupported
=
true
;
const
bool
*
realSupported
=
&
isSupported
;
uint32_t
opCount
{
0
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
realSupported
,
&
opCount
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0500
* @tc.name : 查询算子支持,*opCount为nullptr
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0500
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
isSupported
{
nullptr
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
isSupported
,
nullptr
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0600
* @tc.name : 查询算子支持,model未完成构图
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0600
,
Function
|
MediumTest
|
Level3
)
{
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
graphArgs
.
build
=
false
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model
,
graphArgs
));
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
isSupported
{
nullptr
};
uint32_t
opCount
{
0
};
uint32_t
devicesCount
{
0
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
ASSERT_EQ
(
OH_NN_OPERATION_FORBIDDEN
,
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
isSupported
,
&
opCount
));
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0700
* @tc.name : 查询算子支持,算子均支持
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0700
,
Function
|
MediumTest
|
Level1
)
{
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
std
::
vector
<
bool
>
isSupported
{
true
,
true
};
device
->
SetOperationsSupported
(
isSupported
);
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
BuildAddTopKGraph
(
model
);
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
realSupported
{
nullptr
};
uint32_t
opCount
;
uint32_t
devicesCount
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
realSupported
,
&
opCount
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ret
);
for
(
int
i
=
0
;
i
<
opCount
;
i
++
)
{
EXPECT_EQ
(
realSupported
[
i
],
isSupported
[
i
]);
}
Free
(
model
);
}
/**
* @tc.number : SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0800
* @tc.name : 查询算子支持,算子部分支持
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNRt_Func_North_Model_GetSupportedOperation_0800
,
Function
|
MediumTest
|
Level2
)
{
OHOS
::
sptr
<
V1_0
::
MockIDevice
>
device
=
V1_0
::
MockIDevice
::
GetInstance
();
std
::
vector
<
bool
>
isSupported
{
true
,
false
};
device
->
SetOperationsSupported
(
isSupported
);
OH_NNModel
*
model
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model
);
BuildAddTopKGraph
(
model
);
const
size_t
*
devicesID
{
nullptr
};
const
bool
*
realSupported
{
nullptr
};
uint32_t
opCount
;
uint32_t
devicesCount
;
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNDevice_GetAllDevicesID
(
&
devicesID
,
&
devicesCount
));
size_t
targetDevice
=
devicesID
[
0
];
OH_NN_ReturnCode
ret
=
OH_NNModel_GetAvailableOperations
(
model
,
targetDevice
,
&
realSupported
,
&
opCount
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
ret
);
for
(
int
i
=
0
;
i
<
opCount
;
i
++
)
{
EXPECT_EQ
(
realSupported
[
i
],
isSupported
[
i
]);
}
Free
(
model
);
device
->
SetOperationsSupported
({
true
});
}
/**
* @tc.number : SUB_AI_NNR_Func_North_Model_Combine_0100
* @tc.name : 不同model,多线程并发在线构图,构图成功
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNR_Func_North_Model_Combine_0100
,
Function
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
std
::
thread
th1
(
BuildModel
,
model1
,
graphArgs
);
std
::
thread
th2
(
BuildModel
,
model2
,
graphArgs
);
th1
.
join
();
th2
.
join
();
Free
(
model1
);
Free
(
model2
);
}
/**
* @tc.number : SUB_AI_NNR_Func_North_Model_Combine_0200
* @tc.name : 多模型构图,模型构图过程中释放其他模型
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
ModelTest
,
SUB_AI_NNR_Func_North_Model_Combine_0200
,
Function
|
MediumTest
|
Level1
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model1
,
graphArgs
));
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
std
::
thread
th1
(
BuildModel
,
model2
,
graphArgs
);
std
::
thread
th2
(
OH_NNModel_Destroy
,
&
model1
);
th1
.
join
();
th2
.
join
();
ASSERT_EQ
(
nullptr
,
model1
);
Free
(
model2
);
}
ai/neural_network_runtime/stability/BUILD.gn
已删除
100644 → 0
浏览文件 @
44e50725
# Copyright (c) 2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//test/xts/tools/build/suite.gni")
ohos_moduletest_suite("ActsAiNnrtStabilityTest") {
testonly = true
module_out_path = "acts/nnrt"
sources = [
"../common/mock_idevice.cpp",
"../common/nnrt_utils.cpp",
"src/MultiThreadTest.cpp",
]
include_dirs = [
"../common",
"//foundation/ai/neural_network_runtime",
"//foundation/ai/neural_network_runtime/third_party/include",
"//third_party/googletest/googletest/include",
"//third_party/googletest/googlemock/include",
"//third_party/mindspore/mindspore/lite/mindir/include",
]
deps = [
"//foundation/ai/neural_network_runtime/frameworks:libneural_network_runtime",
"//third_party/googletest:gmock",
"//third_party/googletest:gtest",
]
external_deps = [
"c_utils:utils",
"drivers_interface_nnrt:libnnrt_proxy_1.0",
"hdf_core:libhdf_utils",
"hdf_core:libhdi",
"hilog_native:libhilog",
"hitrace_native:libhitracechain",
"ipc:ipc_single",
"mindspore:mindir",
]
cflags = [ "-Wno-error" ]
}
ai/neural_network_runtime/stability/Test.json
已删除
100644 → 0
浏览文件 @
44e50725
{
"kits"
:
[
{
"push"
:
[
"ActsAiNnrtStabilityTest->/data/local/tmp/ActsAiNnrtStabilityTest"
],
"type"
:
"PushKit"
}
],
"driver"
:
{
"native-test-timeout"
:
"120000"
,
"type"
:
"CppTest"
,
"module-name"
:
"ActsAiNnrtStabilityTest"
,
"runtime-hint"
:
"1s"
,
"native-test-device-path"
:
"/data/local/tmp"
},
"description"
:
"Configuration for ActsAiNnrtStabilityTest Tests"
}
\ No newline at end of file
ai/neural_network_runtime/stability/src/MultiThreadTest.cpp
已删除
100644 → 0
浏览文件 @
44e50725
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cmath>
#include <cstdio>
#include <vector>
#include <thread>
#include "interfaces/kits/c/neural_network_runtime.h"
#include "nnrt_utils.h"
#include "model.h"
using
namespace
testing
::
ext
;
using
namespace
OHOS
::
NeuralNetworkRuntime
;
using
namespace
OHOS
::
NeuralNetworkRuntime
::
Test
;
using
namespace
OHOS
::
HDI
::
Nnrt
::
V1_0
;
class
MultiThreadTest
:
public
testing
::
Test
{
public:
void
SetUp
()
{
}
void
TearDown
()
{
}
protected:
OHNNCompileParam
compileParam
;
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
};
void
CompileModel
(
OH_NNCompilation
*
compilation
,
const
OHNNCompileParam
&
compileParam
)
{
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation
,
compileParam
));
}
void
ExecuteModel
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
)
{
float
addExpectValue
[
4
]
=
{
0
,
1
,
2
,
3
};
ASSERT_EQ
(
OH_NN_SUCCESS
,
ExecuteGraphMock
(
executor
,
graphArgs
,
addExpectValue
));
}
/**
* @tc.number : SUB_AI_NNR_Reliability_North_Stress_0100
* @tc.name : 模型编译多线程并发长稳测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MultiThreadTest
,
SUB_AI_NNR_Reliability_North_Stress_0100
,
Reliability
|
MediumTest
|
Level2
)
{
for
(
int
i
=
0
;
i
<
STRESS_COUNT
;
i
++
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model1
,
graphArgs
));
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model2
,
graphArgs
));
OH_NNCompilation
*
compilation1
=
OH_NNCompilation_Construct
(
model1
);
ASSERT_NE
(
nullptr
,
compilation1
);
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
compileParam
);
th1
.
join
();
th2
.
join
();
Free
(
model1
,
compilation1
);
Free
(
model2
,
compilation2
);
if
(
i
%
PRINT_FREQ
==
0
)
{
printf
(
"[NnrtTest] SUB_AI_NNR_Reliability_North_Stress_0100 times: %d/%d
\n
"
,
i
,
STRESS_COUNT
);
}
}
}
/**
* @tc.number : SUB_AI_NNR_Reliability_North_Stress_0200
* @tc.name : 模型推理多线程并发长稳测试
* @tc.desc : [C- SOFTWARE -0200]
*/
HWTEST_F
(
MultiThreadTest
,
SUB_AI_NNR_Reliability_North_Stress_0200
,
Reliability
|
MediumTest
|
Level2
)
{
OH_NNModel
*
model1
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model1
,
graphArgs
));
OH_NNModel
*
model2
=
OH_NNModel_Construct
();
ASSERT_NE
(
nullptr
,
model2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
BuildSingleOpGraph
(
model2
,
graphArgs
));
OH_NNCompilation
*
compilation1
=
OH_NNCompilation_Construct
(
model1
);
ASSERT_NE
(
nullptr
,
compilation1
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation1
,
compileParam
));
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
CompileGraphMock
(
compilation2
,
compileParam
));
for
(
int
i
=
0
;
i
<
STRESS_COUNT
;
i
++
)
{
OH_NNExecutor
*
executor1
=
OH_NNExecutor_Construct
(
compilation1
);
ASSERT_NE
(
nullptr
,
executor1
);
OH_NNExecutor
*
executor2
=
OH_NNExecutor_Construct
(
compilation2
);
ASSERT_NE
(
nullptr
,
executor2
);
std
::
thread
th1
(
ExecuteModel
,
executor1
,
graphArgs
);
std
::
thread
th2
(
ExecuteModel
,
executor2
,
graphArgs
);
th1
.
join
();
th2
.
join
();
OH_NNExecutor_Destroy
(
&
executor1
);
ASSERT_EQ
(
nullptr
,
executor1
);
OH_NNExecutor_Destroy
(
&
executor2
);
ASSERT_EQ
(
nullptr
,
executor2
);
if
(
i
%
PRINT_FREQ
==
0
)
{
printf
(
"[NnrtTest] SUB_AI_NNR_Reliability_North_Stress_0200 times: %d/%d
\n
"
,
i
,
STRESS_COUNT
);
}
}
Free
(
model1
,
compilation1
);
Free
(
model2
,
compilation2
);
}
test_packages.gni
浏览文件 @
a22633e9
...
...
@@ -14,7 +14,6 @@
import("//test/xts/tools/build/suite.gni")
_all_test_packages = [
"${ACTS_ROOT}/ai:ai",
"${ACTS_ROOT}/arkXtest:arkXtest",
"${ACTS_ROOT}/global:global",
"${ACTS_ROOT}/security:security",
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录