Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle-Lite
提交
6a08d3ac
P
Paddle-Lite
项目概览
PaddlePaddle
/
Paddle-Lite
通知
331
Star
4
Fork
1
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
271
列表
看板
标记
里程碑
合并请求
78
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle-Lite
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
271
Issue
271
列表
看板
标记
里程碑
合并请求
78
合并请求
78
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
6a08d3ac
编写于
7月 17, 2020
作者:
Y
ysh329
提交者:
GitHub
7月 17, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[OPENCL][API] add opencl valid api for device. test=develop (#3951)
* add opencl valid api for device
上级
0db9f728
变更
10
显示空白变更内容
内联
并排
Showing
10 changed file
with
123 addition
and
27 deletion
+123
-27
docs/demo_guides/opencl.md
docs/demo_guides/opencl.md
+14
-3
lite/api/paddle_api.cc
lite/api/paddle_api.cc
+13
-0
lite/api/paddle_api.h
lite/api/paddle_api.h
+3
-0
lite/backends/opencl/cl_runtime.cc
lite/backends/opencl/cl_runtime.cc
+22
-12
lite/backends/opencl/cl_runtime.h
lite/backends/opencl/cl_runtime.h
+28
-3
lite/backends/opencl/cl_wrapper.cc
lite/backends/opencl/cl_wrapper.cc
+9
-4
lite/backends/opencl/cl_wrapper.h
lite/backends/opencl/cl_wrapper.h
+8
-1
lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
+22
-0
lite/tools/build.sh
lite/tools/build.sh
+2
-2
lite/tools/ci_build.sh
lite/tools/ci_build.sh
+2
-2
未找到文件。
docs/demo_guides/opencl.md
浏览文件 @
6a08d3ac
...
@@ -37,14 +37,25 @@ rm ./lite/api/paddle_use_kernels.h
...
@@ -37,14 +37,25 @@ rm ./lite/api/paddle_use_kernels.h
rm
./lite/api/paddle_use_ops.h
rm
./lite/api/paddle_use_ops.h
# 设置编译参数并开始编译
# 设置编译参数并开始编译
# android-armv7:cpu+gpu+cv+extra
./lite/tools/build_android.sh
\
./lite/tools/build_android.sh
\
--arch
=
armv7
\
--arch
=
armv7
\
--toolchain
=
clang
\
--toolchain
=
clang
\
--with_cv
=
OFF
\
--with_log
=
OFF
\
--with_log
=
OFF
\
--with_extra
=
OFF
\
--with_extra
=
ON
\
--with_cv
=
ON
\
--with_opencl
=
ON
--with_opencl
=
ON
# android-armv8:cpu+gpu+cv+extra
./lite/tools/build_android.sh
\
--arch
=
armv8
\
--toolchain
=
clang
\
--with_log
=
OFF
\
--with_extra
=
ON
\
--with_cv
=
ON
\
--with_opencl
=
ON
# 注:编译帮助请执行: ./lite/tools/build_android.sh help
# 注:编译帮助请执行: ./lite/tools/build_android.sh help
```
```
...
@@ -206,7 +217,7 @@ adb shell "export GLOG_v=4; \
...
@@ -206,7 +217,7 @@ adb shell "export GLOG_v=4; \
## 3. 如何在Code中使用
## 3. 如何在Code中使用
即编译产物
`demo/cxx/mobile_light`
目录下的代码,在线版参考GitHub仓库
[
./lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
](
https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
)
;
即编译产物
`demo/cxx/mobile_light`
目录下的代码,在线版参考GitHub仓库
[
./lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
](
https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
)
,其中也包括判断当前设备是否支持OpenCL的方法
;
注:这里给出的链接会跳转到线上最新develop分支的代码,很可能与您本地的代码存在差异,建议参考自己本地位于
`lite/demo/cxx/`
目录的代码,查看如何使用。
注:这里给出的链接会跳转到线上最新develop分支的代码,很可能与您本地的代码存在差异,建议参考自己本地位于
`lite/demo/cxx/`
目录的代码,查看如何使用。
...
...
lite/api/paddle_api.cc
浏览文件 @
6a08d3ac
...
@@ -32,9 +32,22 @@
...
@@ -32,9 +32,22 @@
#include "lite/backends/mlu/target_wrapper.h"
#include "lite/backends/mlu/target_wrapper.h"
#endif
#endif
#ifdef LITE_WITH_OPENCL
#include "lite/backends/opencl/cl_runtime.h"
#endif
namespace
paddle
{
namespace
paddle
{
namespace
lite_api
{
namespace
lite_api
{
bool
IsOpenCLBackendValid
()
{
bool
opencl_valid
=
false
;
#ifdef LITE_WITH_OPENCL
opencl_valid
=
paddle
::
lite
::
CLRuntime
::
Global
()
->
OpenCLAvaliableForDevice
();
#endif
LOG
(
INFO
)
<<
"opencl_valid:"
<<
opencl_valid
;
return
opencl_valid
;
}
Tensor
::
Tensor
(
void
*
raw
)
:
raw_tensor_
(
raw
)
{}
Tensor
::
Tensor
(
void
*
raw
)
:
raw_tensor_
(
raw
)
{}
// TODO(Superjomn) refine this by using another `const void* const_raw`;
// TODO(Superjomn) refine this by using another `const void* const_raw`;
...
...
lite/api/paddle_api.h
浏览文件 @
6a08d3ac
...
@@ -33,6 +33,9 @@ using lod_t = std::vector<std::vector<uint64_t>>;
...
@@ -33,6 +33,9 @@ using lod_t = std::vector<std::vector<uint64_t>>;
enum
class
LiteModelType
{
kProtobuf
=
0
,
kNaiveBuffer
,
UNK
};
enum
class
LiteModelType
{
kProtobuf
=
0
,
kNaiveBuffer
,
UNK
};
// return true if current device supports OpenCL model
LITE_API
bool
IsOpenCLBackendValid
();
struct
LITE_API
Tensor
{
struct
LITE_API
Tensor
{
explicit
Tensor
(
void
*
raw
);
explicit
Tensor
(
void
*
raw
);
explicit
Tensor
(
const
void
*
raw
);
explicit
Tensor
(
const
void
*
raw
);
...
...
lite/backends/opencl/cl_runtime.cc
浏览文件 @
6a08d3ac
...
@@ -38,17 +38,20 @@ CLRuntime::~CLRuntime() {
...
@@ -38,17 +38,20 @@ CLRuntime::~CLRuntime() {
}
}
bool
CLRuntime
::
Init
()
{
bool
CLRuntime
::
Init
()
{
if
(
initialized_
)
{
if
(
i
s_cl_runtime_i
nitialized_
)
{
return
true
;
return
true
;
}
}
bool
is_platform_init
=
InitializePlatform
();
bool
is_platform_init
=
InitializePlatform
();
bool
is_device_init
=
InitializeDevice
();
bool
is_device_init
=
InitializeDevice
();
is_init_success_
=
is_platform_init
&&
is_device_init
;
LOG
(
INFO
)
<<
"is_platform_init:"
<<
is_platform_init
;
initialized_
=
true
;
LOG
(
INFO
)
<<
"is_device_init:"
<<
is_device_init
;
if
((
is_platform_init
==
true
)
&&
(
is_device_init
==
true
))
{
is_platform_device_init_success_
=
true
;
context_
=
CreateContext
();
context_
=
CreateContext
();
command_queue_
=
CreateCommandQueue
(
context
());
command_queue_
=
CreateCommandQueue
(
context
());
return
initialized_
;
is_cl_runtime_initialized_
=
true
;
}
return
is_cl_runtime_initialized_
;
}
}
cl
::
Platform
&
CLRuntime
::
platform
()
{
cl
::
Platform
&
CLRuntime
::
platform
()
{
...
@@ -64,7 +67,9 @@ cl::Context& CLRuntime::context() {
...
@@ -64,7 +67,9 @@ cl::Context& CLRuntime::context() {
}
}
cl
::
Device
&
CLRuntime
::
device
()
{
cl
::
Device
&
CLRuntime
::
device
()
{
CHECK
(
device_
!=
nullptr
)
<<
"device_ is not initialized!"
;
if
(
device_
==
nullptr
)
{
LOG
(
ERROR
)
<<
"device_ is not initialized!"
;
}
return
*
device_
;
return
*
device_
;
}
}
...
@@ -150,6 +155,14 @@ GpuType CLRuntime::ParseGpuTypeFromDeviceName(std::string device_name) {
...
@@ -150,6 +155,14 @@ GpuType CLRuntime::ParseGpuTypeFromDeviceName(std::string device_name) {
}
}
bool
CLRuntime
::
InitializeDevice
()
{
bool
CLRuntime
::
InitializeDevice
()
{
VLOG
(
3
)
<<
"device_info_.size():"
<<
device_info_
.
size
();
for
(
auto
i
:
device_info_
)
{
VLOG
(
3
)
<<
">>> "
<<
i
.
first
<<
" "
<<
i
.
second
;
}
if
(
device_info_
.
size
()
>
0
&&
device_info_
.
size
()
<=
2
)
{
return
false
;
}
device_info_
[
"PLACEHOLDER"
]
=
1
;
// ===================== BASIC =====================
// ===================== BASIC =====================
// CL_DEVICE_TYPE_GPU
// CL_DEVICE_TYPE_GPU
// CL_DEVICE_NAME
// CL_DEVICE_NAME
...
@@ -160,7 +173,7 @@ bool CLRuntime::InitializeDevice() {
...
@@ -160,7 +173,7 @@ bool CLRuntime::InitializeDevice() {
status_
=
platform_
->
getDevices
(
CL_DEVICE_TYPE_GPU
,
&
all_devices
);
status_
=
platform_
->
getDevices
(
CL_DEVICE_TYPE_GPU
,
&
all_devices
);
CL_CHECK_ERROR
(
status_
);
CL_CHECK_ERROR
(
status_
);
if
(
all_devices
.
empty
())
{
if
(
all_devices
.
empty
())
{
LOG
(
FATAL
)
<<
"No
OpenCL GPU device found!"
;
LOG
(
ERROR
)
<<
"No available
OpenCL GPU device found!"
;
return
false
;
return
false
;
}
}
device_
=
std
::
make_shared
<
cl
::
Device
>
();
device_
=
std
::
make_shared
<
cl
::
Device
>
();
...
@@ -313,9 +326,6 @@ bool CLRuntime::InitializeDevice() {
...
@@ -313,9 +326,6 @@ bool CLRuntime::InitializeDevice() {
}
}
std
::
map
<
std
::
string
,
size_t
>&
CLRuntime
::
GetDeviceInfo
()
{
std
::
map
<
std
::
string
,
size_t
>&
CLRuntime
::
GetDeviceInfo
()
{
if
(
0
!=
device_info_
.
size
())
{
return
device_info_
;
}
InitializeDevice
();
InitializeDevice
();
return
device_info_
;
return
device_info_
;
}
}
...
...
lite/backends/opencl/cl_runtime.h
浏览文件 @
6a08d3ac
...
@@ -18,6 +18,7 @@ limitations under the License. */
...
@@ -18,6 +18,7 @@ limitations under the License. */
#include <vector>
#include <vector>
#include "lite/backends/opencl/cl_include.h"
#include "lite/backends/opencl/cl_include.h"
#include "lite/backends/opencl/cl_utility.h"
#include "lite/backends/opencl/cl_utility.h"
#include "lite/backends/opencl/cl_wrapper.h"
typedef
enum
{
typedef
enum
{
UNKNOWN
=
0
,
UNKNOWN
=
0
,
...
@@ -68,6 +69,28 @@ class CLRuntime {
...
@@ -68,6 +69,28 @@ class CLRuntime {
public:
public:
static
CLRuntime
*
Global
();
static
CLRuntime
*
Global
();
bool
OpenCLAvaliableForDevice
()
{
bool
opencl_lib_found
=
paddle
::
lite
::
CLWrapper
::
Global
()
->
OpenclLibFound
();
LOG
(
INFO
)
<<
"opencl_lib_found:"
<<
opencl_lib_found
;
if
(
opencl_lib_found
==
false
)
return
false
;
bool
dlsym_success
=
paddle
::
lite
::
CLWrapper
::
Global
()
->
DlsymSuccess
();
LOG
(
INFO
)
<<
"dlsym_success:"
<<
dlsym_success
;
if
(
opencl_lib_found
==
false
)
return
false
;
InitializeDevice
();
bool
support_fp16
=
static_cast
<
bool
>
(
device_info_
[
"CL_DEVICE_EXTENSIONS_FP16"
]);
LOG
(
INFO
)
<<
"support_fp16:"
<<
support_fp16
;
if
(
support_fp16
==
false
)
return
false
;
is_device_avaliable_for_opencl_
=
dlsym_success
&&
opencl_lib_found
&&
support_fp16
;
LOG
(
INFO
)
<<
"is_device_avaliable_for_opencl_:"
<<
is_device_avaliable_for_opencl_
;
return
is_device_avaliable_for_opencl_
;
}
bool
Init
();
bool
Init
();
cl
::
Platform
&
platform
();
cl
::
Platform
&
platform
();
...
@@ -85,7 +108,7 @@ class CLRuntime {
...
@@ -85,7 +108,7 @@ class CLRuntime {
bool
BuildProgram
(
cl
::
Program
*
program
,
const
std
::
string
&
options
=
""
);
bool
BuildProgram
(
cl
::
Program
*
program
,
const
std
::
string
&
options
=
""
);
bool
IsInitSuccess
()
{
return
is_init_success_
;
}
bool
IsInitSuccess
()
{
return
is_
platform_device_
init_success_
;
}
std
::
string
cl_path
()
{
return
cl_path_
;
}
std
::
string
cl_path
()
{
return
cl_path_
;
}
...
@@ -167,9 +190,11 @@ class CLRuntime {
...
@@ -167,9 +190,11 @@ class CLRuntime {
cl_int
status_
{
CL_SUCCESS
};
cl_int
status_
{
CL_SUCCESS
};
bool
initialized_
{
false
};
bool
is_device_avaliable_for_opencl_
{
false
};
bool
is_cl_runtime_initialized_
{
false
};
bool
is_init_success_
{
false
};
bool
is_
platform_device_
init_success_
{
false
};
};
};
}
// namespace lite
}
// namespace lite
...
...
lite/backends/opencl/cl_wrapper.cc
浏览文件 @
6a08d3ac
...
@@ -19,14 +19,16 @@ limitations under the License. */
...
@@ -19,14 +19,16 @@ limitations under the License. */
namespace
paddle
{
namespace
paddle
{
namespace
lite
{
namespace
lite
{
CLWrapper
*
CLWrapper
::
Global
()
{
CLWrapper
*
CLWrapper
::
Global
()
{
static
CLWrapper
wrapper
;
static
CLWrapper
wrapper
;
return
&
wrapper
;
return
&
wrapper
;
}
}
CLWrapper
::
CLWrapper
()
{
CLWrapper
::
CLWrapper
()
{
CHECK
(
InitHandle
())
<<
"Fail to initialize the OpenCL library!"
;
opencl_lib_found_
=
InitHandle
();
InitFunctions
();
CHECK
(
opencl_lib_found_
)
<<
"Fail to initialize the OpenCL library!"
;
dlsym_success_
=
InitFunctions
();
}
}
bool
CLWrapper
::
InitHandle
()
{
bool
CLWrapper
::
InitHandle
()
{
...
@@ -68,15 +70,17 @@ bool CLWrapper::InitHandle() {
...
@@ -68,15 +70,17 @@ bool CLWrapper::InitHandle() {
}
}
}
}
void
CLWrapper
::
InitFunctions
()
{
bool
CLWrapper
::
InitFunctions
()
{
CHECK
(
handle_
!=
nullptr
)
<<
"The library handle can't be null!"
;
CHECK
(
handle_
!=
nullptr
)
<<
"The library handle can't be null!"
;
bool
dlsym_success
=
true
;
#define PADDLE_DLSYM(cl_func) \
#define PADDLE_DLSYM(cl_func) \
do { \
do { \
cl_func##_ = (cl_func##Type)dlsym(handle_, #cl_func); \
cl_func##_ = (cl_func##Type)dlsym(handle_, #cl_func); \
if (cl_func##_ == nullptr) { \
if (cl_func##_ == nullptr) { \
LOG(
FATAL
) << "Cannot find the " << #cl_func \
LOG(
ERROR
) << "Cannot find the " << #cl_func \
<< " symbol in libOpenCL.so!"; \
<< " symbol in libOpenCL.so!"; \
dlsym_success = false; \
break; \
break; \
} \
} \
VLOG(4) << "Loaded the " << #cl_func << " symbol successfully."; \
VLOG(4) << "Loaded the " << #cl_func << " symbol successfully."; \
...
@@ -137,6 +141,7 @@ void CLWrapper::InitFunctions() {
...
@@ -137,6 +141,7 @@ void CLWrapper::InitFunctions() {
PADDLE_DLSYM
(
clEnqueueCopyImage
);
PADDLE_DLSYM
(
clEnqueueCopyImage
);
#undef PADDLE_DLSYM
#undef PADDLE_DLSYM
return
dlsym_success
;
}
}
}
// namespace lite
}
// namespace lite
...
...
lite/backends/opencl/cl_wrapper.h
浏览文件 @
6a08d3ac
...
@@ -508,13 +508,20 @@ class CLWrapper final {
...
@@ -508,13 +508,20 @@ class CLWrapper final {
return
clEnqueueCopyImage_
;
return
clEnqueueCopyImage_
;
}
}
bool
OpenclLibFound
()
{
return
opencl_lib_found_
;
}
bool
DlsymSuccess
()
{
return
dlsym_success_
;
}
private:
private:
CLWrapper
();
CLWrapper
();
CLWrapper
(
const
CLWrapper
&
)
=
delete
;
CLWrapper
(
const
CLWrapper
&
)
=
delete
;
CLWrapper
&
operator
=
(
const
CLWrapper
&
)
=
delete
;
CLWrapper
&
operator
=
(
const
CLWrapper
&
)
=
delete
;
bool
InitHandle
();
bool
InitHandle
();
void
InitFunctions
();
bool
InitFunctions
();
bool
opencl_lib_found_
{
true
};
bool
dlsym_success_
{
true
};
void
*
handle_
{
nullptr
};
void
*
handle_
{
nullptr
};
clGetPlatformIDsType
clGetPlatformIDs_
{
nullptr
};
clGetPlatformIDsType
clGetPlatformIDs_
{
nullptr
};
clGetPlatformInfoType
clGetPlatformInfo_
{
nullptr
};
clGetPlatformInfoType
clGetPlatformInfo_
{
nullptr
};
clBuildProgramType
clBuildProgram_
{
nullptr
};
clBuildProgramType
clBuildProgram_
{
nullptr
};
...
...
lite/demo/cxx/mobile_light/mobilenetv1_light_api.cc
浏览文件 @
6a08d3ac
...
@@ -78,6 +78,28 @@ void RunModel(std::string model_dir,
...
@@ -78,6 +78,28 @@ void RunModel(std::string model_dir,
// 1. Set MobileConfig
// 1. Set MobileConfig
MobileConfig
config
;
MobileConfig
config
;
config
.
set_model_from_file
(
model_dir
);
config
.
set_model_from_file
(
model_dir
);
// NOTE: Use android gpu with opencl, you should ensure:
// first, [compile **cpu+opencl** paddlelite
// lib](https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/docs/demo_guides/opencl.md);
// second, [convert and use opencl nb
// model](https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/docs/user_guides/opt/opt_bin.md).
//
/* Uncomment code below to enable OpenCL
bool is_opencl_backend_valid = ::IsOpenCLBackendValid();
std::cout << "is_opencl_backend_valid:" << is_opencl_backend_valid <<
std::endl;
if (is_opencl_backend_valid) {
// give opencl nb model dir
config.set_model_from_file(model_dir);
} else {
std::cout << "Unsupport opencl nb model." << std::endl;
exit(1);
// you can give backup cpu nb model instead
// config.set_model_from_file(cpu_nb_model_dir);
}
*/
// NOTE: To load model transformed by model_optimize_tool before
// NOTE: To load model transformed by model_optimize_tool before
// release/v2.3.0, plese use `set_model_dir` API as listed below.
// release/v2.3.0, plese use `set_model_dir` API as listed below.
// config.set_model_dir(model_dir);
// config.set_model_dir(model_dir);
...
...
lite/tools/build.sh
浏览文件 @
6a08d3ac
...
@@ -39,8 +39,8 @@ readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/t
...
@@ -39,8 +39,8 @@ readonly THIRDPARTY_TAR=https://paddle-inference-dist.bj.bcebos.com/PaddleLite/t
readonly
workspace
=
$PWD
readonly
workspace
=
$PWD
# if operating in mac env, we should expand the maximum file num
# if operating in mac env, we should expand the maximum file num
os_n
ma
e
=
`
uname
-s
`
os_n
am
e
=
`
uname
-s
`
if
[
${
os_n
ma
e
}
==
"Darwin"
]
;
then
if
[
${
os_n
am
e
}
==
"Darwin"
]
;
then
ulimit
-n
1024
ulimit
-n
1024
fi
fi
...
...
lite/tools/ci_build.sh
浏览文件 @
6a08d3ac
...
@@ -21,8 +21,8 @@ USE_ADB_EMULATOR=ON
...
@@ -21,8 +21,8 @@ USE_ADB_EMULATOR=ON
LITE_WITH_COVERAGE
=
OFF
LITE_WITH_COVERAGE
=
OFF
# if operating in mac env, we should expand the maximum file num
# if operating in mac env, we should expand the maximum file num
os_n
ma
e
=
`
uname
-s
`
os_n
am
e
=
`
uname
-s
`
if
[
${
os_n
ma
e
}
==
"Darwin"
]
;
then
if
[
${
os_n
am
e
}
==
"Darwin"
]
;
then
ulimit
-n
1024
ulimit
-n
1024
fi
fi
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录