Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
OpenHarmony
Xts Acts
提交
27ecc5d5
X
Xts Acts
项目概览
OpenHarmony
/
Xts Acts
1 年多 前同步成功
通知
9
Star
22
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
Xts Acts
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
27ecc5d5
编写于
6月 27, 2023
作者:
O
openharmony_ci
提交者:
Gitee
6月 27, 2023
浏览文件
操作
浏览文件
下载
差异文件
!9083 修复 acts 仓安全告警问题
Merge pull request !9083 from scholar-lc/master
上级
5e0dc942
fbb10dfa
变更
14
隐藏空白更改
内联
并排
Showing
14 changed file
with
34 addition
and
24 deletion
+34
-24
ai/neural_network_runtime/v1_0/common/mock_idevice.cpp
ai/neural_network_runtime/v1_0/common/mock_idevice.cpp
+5
-0
ai/neural_network_runtime/v1_0/common/mock_idevice.h
ai/neural_network_runtime/v1_0/common/mock_idevice.h
+1
-1
ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp
ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp
+1
-1
ai/neural_network_runtime/v1_0/common/nnrt_utils.h
ai/neural_network_runtime/v1_0/common/nnrt_utils.h
+1
-1
ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp
ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp
+3
-3
ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp
...eural_network_runtime/v1_0/interface/src/ExecutorTest.cpp
+2
-2
ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp
ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp
+4
-4
ai/neural_network_runtime/v2_0/common/mock_idevice.cpp
ai/neural_network_runtime/v2_0/common/mock_idevice.cpp
+5
-0
ai/neural_network_runtime/v2_0/common/mock_idevice.h
ai/neural_network_runtime/v2_0/common/mock_idevice.h
+1
-1
ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp
ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp
+1
-1
ai/neural_network_runtime/v2_0/common/nnrt_utils.h
ai/neural_network_runtime/v2_0/common/nnrt_utils.h
+1
-1
ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp
ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp
+3
-3
ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp
...eural_network_runtime/v2_0/interface/src/ExecutorTest.cpp
+2
-2
ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp
ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp
+4
-4
未找到文件。
ai/neural_network_runtime/v1_0/common/mock_idevice.cpp
浏览文件 @
27ecc5d5
...
...
@@ -34,6 +34,11 @@ sptr<INnrtDevice> INnrtDevice::Get(const std::string &serviceName, bool isStub)
return
mockIDevice
;
}
MockIDevice
::
MockIDevice
()
{
m_bufferFd
=
0
;
}
MockIDevice
::~
MockIDevice
()
{
for
(
auto
ash
:
m_ashmems
)
{
...
...
ai/neural_network_runtime/v1_0/common/mock_idevice.h
浏览文件 @
27ecc5d5
...
...
@@ -89,7 +89,7 @@ public:
static
MockIDevice
*
GetInstance
();
MockIDevice
()
=
default
;
MockIDevice
();
virtual
~
MockIDevice
();
private:
...
...
ai/neural_network_runtime/v1_0/common/nnrt_utils.cpp
浏览文件 @
27ecc5d5
...
...
@@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs,
LOGE
(
"[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
);
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
);
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
...
...
ai/neural_network_runtime/v1_0/common/nnrt_utils.h
浏览文件 @
27ecc5d5
...
...
@@ -67,7 +67,7 @@ struct OHNNCompileParam {
bool
enableFp16
=
false
;
};
int
BuildSingleOpGraph
(
OH_NNModel
*
model
ptr
,
const
OHNNGraphArgs
&
a
rgs
);
int
BuildSingleOpGraph
(
OH_NNModel
*
model
,
const
OHNNGraphArgs
&
graphA
rgs
);
int
ExecutorWithMemory
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
OH_NN_Memory
*
OHNNMemory
[],
float
*
expect
);
...
...
ai/neural_network_runtime/v1_0/interface/src/CompileTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -68,7 +68,7 @@ public:
}
protected:
OHNNCompileParam
compileParam
;
OHNNCompileParam
m_
compileParam
;
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
};
...
...
@@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function |
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
compileParam
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
m_
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
m_
compileParam
);
th1
.
join
();
th2
.
join
();
Free
(
model1
,
compilation1
);
...
...
ai/neural_network_runtime/v1_0/interface/src/ExecutorTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -1135,7 +1135,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function |
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
...
...
@@ -1205,7 +1205,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function |
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
outputMemory
->
data
)),
(
float
*
)
addModel
.
expectValue
));
static_cast
<
float
*>
(
addModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
&
outputMemory
);
ASSERT_EQ
(
outputMemory
,
nullptr
);
...
...
ai/neural_network_runtime/v1_0/interface/src/MemoryTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function |
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
...
...
@@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function |
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
...
...
@@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function |
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
addModel
.
expectValue
));
static_cast
<
float
*>
(
addModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
...
...
@@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function |
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
avgModel
.
expectValue
));
static_cast
<
float
*>
(
avgModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
...
...
ai/neural_network_runtime/v2_0/common/mock_idevice.cpp
浏览文件 @
27ecc5d5
...
...
@@ -42,6 +42,11 @@ MockIDevice::~MockIDevice()
}
}
MockIDevice
::
MockIDevice
()
{
m_bufferFd
=
0
;
}
MockIPreparedModel
::~
MockIPreparedModel
()
{
for
(
auto
ash
:
m_ashmems
)
{
...
...
ai/neural_network_runtime/v2_0/common/mock_idevice.h
浏览文件 @
27ecc5d5
...
...
@@ -92,7 +92,7 @@ public:
static
MockIDevice
*
GetInstance
();
MockIDevice
()
=
default
;
MockIDevice
();
virtual
~
MockIDevice
();
private:
...
...
ai/neural_network_runtime/v2_0/common/nnrt_utils.cpp
浏览文件 @
27ecc5d5
...
...
@@ -278,7 +278,7 @@ int ExecutorWithMemory(OH_NNExecutor *executor, const OHNNGraphArgs &graphArgs,
LOGE
(
"[NNRtTest] OH_NNExecutor_SetInputWithMemory failed! ret=%d
\n
"
,
ret
);
return
ret
;
}
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
);
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
);
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
...
...
ai/neural_network_runtime/v2_0/common/nnrt_utils.h
浏览文件 @
27ecc5d5
...
...
@@ -67,7 +67,7 @@ struct OHNNCompileParam {
bool
enableFp16
=
false
;
};
int
BuildSingleOpGraph
(
OH_NNModel
*
model
ptr
,
const
OHNNGraphArgs
&
a
rgs
);
int
BuildSingleOpGraph
(
OH_NNModel
*
model
,
const
OHNNGraphArgs
&
graphA
rgs
);
int
ExecutorWithMemory
(
OH_NNExecutor
*
executor
,
const
OHNNGraphArgs
&
graphArgs
,
OH_NN_Memory
*
OHNNMemory
[],
float
*
expect
);
...
...
ai/neural_network_runtime/v2_0/interface/src/CompileTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -68,7 +68,7 @@ public:
}
protected:
OHNNCompileParam
compileParam
;
OHNNCompileParam
m_
compileParam
;
AddModel
addModel
;
OHNNGraphArgs
graphArgs
=
addModel
.
graphArgs
;
};
...
...
@@ -835,8 +835,8 @@ HWTEST_F(CompileTest, SUB_AI_NNR_Func_North_Compilation_Combine_0100, Function |
OH_NNCompilation
*
compilation2
=
OH_NNCompilation_Construct
(
model2
);
ASSERT_NE
(
nullptr
,
compilation2
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
compileParam
);
std
::
thread
th1
(
CompileModel
,
compilation1
,
m_
compileParam
);
std
::
thread
th2
(
CompileModel
,
compilation2
,
m_
compileParam
);
th1
.
join
();
th2
.
join
();
Free
(
model1
,
compilation1
);
...
...
ai/neural_network_runtime/v2_0/interface/src/ExecutorTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -1240,7 +1240,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0400, Function |
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
OHNNMemory
[
inputIndex
]
=
inputMemory
;
inputIndex
+=
1
;
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
...
...
@@ -1310,7 +1310,7 @@ HWTEST_F(ExecutorTest, SUB_AI_NNRt_Func_North_Executor_Combine_0500, Function |
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_Run
(
executor
));
// check result
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
outputMemory
->
data
)),
(
float
*
)
addModel
.
expectValue
));
static_cast
<
float
*>
(
addModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
0
,
&
outputMemory
);
ASSERT_EQ
(
outputMemory
,
nullptr
);
...
...
ai/neural_network_runtime/v2_0/interface/src/MemoryTest.cpp
浏览文件 @
27ecc5d5
...
...
@@ -776,7 +776,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0100, Function |
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
}
else
if
(
std
::
find
(
graphArgs
.
outputIndices
.
begin
(),
graphArgs
.
outputIndices
.
end
(),
i
)
!=
graphArgs
.
outputIndices
.
end
())
{
...
...
@@ -815,7 +815,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0200, Function |
OH_NN_Memory
*
inputMemory
=
OH_NNExecutor_AllocateInputMemory
(
executor
,
inputIndex
,
operandTem
.
length
);
ASSERT_NE
(
nullptr
,
inputMemory
);
ASSERT_EQ
(
OH_NN_SUCCESS
,
OH_NNExecutor_SetInputWithMemory
(
executor
,
inputIndex
,
&
operand
,
inputMemory
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
(
void
*
)
operandTem
.
data
,
operandTem
.
length
));
ASSERT_EQ
(
EOK
,
memcpy_s
(
inputMemory
->
data
,
operandTem
.
length
,
static_cast
<
void
*>
(
operandTem
.
data
)
,
operandTem
.
length
));
}
}
ASSERT_EQ
(
OH_NN_INVALID_PARAMETER
,
OH_NNExecutor_Run
(
executor
));
...
...
@@ -847,7 +847,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0300, Function |
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
addModel
.
expectValue
));
static_cast
<
float
*>
(
addModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
...
...
@@ -891,7 +891,7 @@ HWTEST_F(MemoryTest, SUB_AI_NNRt_Func_North_Executor_Memory_Run_0400, Function |
auto
outputIndex
=
graphArgs
.
inputIndices
.
size
()
+
j
;
// check memory output
EXPECT_TRUE
(
CheckOutput
(
static_cast
<
float
*>
(
const_cast
<
void
*>
(
OHNNMemory
[
outputIndex
]
->
data
)),
(
float
*
)
avgModel
.
expectValue
));
static_cast
<
float
*>
(
avgModel
.
expectValue
)
));
OH_NNExecutor_DestroyOutputMemory
(
executor
,
j
,
&
OHNNMemory
[
outputIndex
]);
ASSERT_EQ
(
OHNNMemory
[
outputIndex
],
nullptr
);
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录