Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
8c6bbb48
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
8c6bbb48
编写于
5月 27, 2021
作者:
J
Jacek Czaja
提交者:
GitHub
5月 27, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[oneDNN] Accesses to oneDNN cache optimized for conv2d (#33048)
上级
9b203ef3
变更
2
展开全部
隐藏空白更改
内联
并排
Showing
2 changed file
with
307 addition
and
205 deletion
+307
-205
paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc
paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc
+245
-204
paddle/fluid/platform/mkldnn_reuse.h
paddle/fluid/platform/mkldnn_reuse.h
+62
-1
未找到文件。
paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc
浏览文件 @
8c6bbb48
此差异已折叠。
点击以展开。
paddle/fluid/platform/mkldnn_reuse.h
浏览文件 @
8c6bbb48
...
@@ -35,7 +35,8 @@ using user_function = std::function<std::shared_ptr<float>(const float*)>;
...
@@ -35,7 +35,8 @@ using user_function = std::function<std::shared_ptr<float>(const float*)>;
using
memory
=
mkldnn
::
memory
;
using
memory
=
mkldnn
::
memory
;
template
<
typename
T
,
typename
TForward
,
template
<
typename
T
,
typename
TForward
,
typename
TBackward
=
mkldnn_dummy_primitive
>
typename
TBackward
=
mkldnn_dummy_primitive
,
typename
TBackward_params
=
mkldnn_dummy_primitive
>
class
MKLDNNHandlerT
{
class
MKLDNNHandlerT
{
public:
public:
MKLDNNHandlerT
(
const
MKLDNNDeviceContext
&
dev_ctx
,
mkldnn
::
engine
engine
,
MKLDNNHandlerT
(
const
MKLDNNDeviceContext
&
dev_ctx
,
mkldnn
::
engine
engine
,
...
@@ -72,6 +73,21 @@ class MKLDNNHandlerT {
...
@@ -72,6 +73,21 @@ class MKLDNNHandlerT {
return
backward_p
;
return
backward_p
;
}
}
std
::
shared_ptr
<
TBackward_params
>
AcquireBackwardWeightsPrimitive
()
{
const
std
::
string
key_p
=
key_
+
"@bwd_w_p"
;
auto
backward_p
=
std
::
static_pointer_cast
<
TBackward_params
>
(
dev_ctx_
.
GetBlob
(
key_p
));
if
(
backward_p
==
nullptr
)
{
PADDLE_ENFORCE_NOT_NULL
(
bwd_w_pd_
,
platform
::
errors
::
Unavailable
(
"Error: BWD_PD should be set when "
"getting BWD prim witk key: %s ."
,
key_p
));
backward_p
=
std
::
make_shared
<
TBackward_params
>
(
*
bwd_w_pd_
);
dev_ctx_
.
SetBlob
(
key_p
,
backward_p
);
}
return
backward_p
;
}
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireSrcMemory
(
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireSrcMemory
(
const
framework
::
Tensor
*
input
)
{
const
framework
::
Tensor
*
input
)
{
const
T
*
input_data
=
input
->
data
<
T
>
();
const
T
*
input_data
=
input
->
data
<
T
>
();
...
@@ -116,6 +132,29 @@ class MKLDNNHandlerT {
...
@@ -116,6 +132,29 @@ class MKLDNNHandlerT {
"@diff_src_mem_p"
);
"@diff_src_mem_p"
);
}
}
// Buffer of given Tensor is used for oneDNN computation
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireDiffWeightsMemory
(
framework
::
Tensor
*
diff_weights
)
{
PADDLE_ENFORCE_NOT_NULL
(
bwd_w_pd_
,
platform
::
errors
::
Unavailable
(
"Error: BWD_W_PD should be set when getting BWD grad of weights."
));
T
*
ptr
=
diff_weights
->
mutable_data
<
T
>
(
place_
,
bwd_w_pd_
->
diff_weights_desc
().
get_size
());
return
this
->
AcquireMemoryFromPrimitive
(
bwd_w_pd_
->
diff_weights_desc
(),
ptr
,
"@diff_wei_mem_p"
);
}
// Buffer is allocated by oneDNN to store computation results
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireDiffWeightsMemory
(
void
)
{
PADDLE_ENFORCE_NOT_NULL
(
bwd_w_pd_
,
platform
::
errors
::
Unavailable
(
"Error: BWD_W_PD should be set when getting BWD grad of weights."
));
return
this
->
AcquireMemoryFromPrimitive
(
bwd_w_pd_
->
diff_weights_desc
(),
"@diff_wei_mem_p"
);
}
protected:
protected:
bool
isCached
()
{
bool
isCached
()
{
const
std
::
string
key_pd
=
key_common_
+
"@fwd_pd"
;
const
std
::
string
key_pd
=
key_common_
+
"@fwd_pd"
;
...
@@ -243,6 +282,27 @@ class MKLDNNHandlerT {
...
@@ -243,6 +282,27 @@ class MKLDNNHandlerT {
}
}
}
}
template
<
typename
...
Args
>
void
AcquireBackwardWeightsPrimitiveDescriptorNonBlocking
(
Args
&&
...
args
)
{
// fwd_pd_ is set during grad by calling
// AcquireForwardPrimitiveDescriptorNonBlocking
PADDLE_ENFORCE_NOT_NULL
(
fwd_pd_
,
platform
::
errors
::
Unavailable
(
"Get MKLDNN Forward primitive %s failed."
,
key_
+
"@fwd_pd"
));
const
std
::
string
key_pd
=
key_
+
"@bwd_w_pd"
;
bwd_w_pd_
=
std
::
static_pointer_cast
<
typename
TBackward_params
::
primitive_desc
>
(
dev_ctx_
.
GetBlob
(
key_pd
));
if
(
bwd_w_pd_
==
nullptr
)
{
auto
bwd_desc
=
typename
TBackward_params
::
desc
(
std
::
forward
<
Args
>
(
args
)...);
bwd_w_pd_
=
std
::
make_shared
<
typename
TBackward_params
::
primitive_desc
>
(
bwd_desc
,
engine_
,
*
fwd_pd_
);
dev_ctx_
.
SetBlob
(
key_pd
,
bwd_w_pd_
);
}
}
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireMemoryFromPrimitive
(
std
::
shared_ptr
<
mkldnn
::
memory
>
AcquireMemoryFromPrimitive
(
const
std
::
string
&
suffix
)
{
const
std
::
string
&
suffix
)
{
return
std
::
static_pointer_cast
<
mkldnn
::
memory
>
(
return
std
::
static_pointer_cast
<
mkldnn
::
memory
>
(
...
@@ -370,6 +430,7 @@ class MKLDNNHandlerT {
...
@@ -370,6 +430,7 @@ class MKLDNNHandlerT {
std
::
string
key_
;
std
::
string
key_
;
std
::
shared_ptr
<
typename
TForward
::
primitive_desc
>
fwd_pd_
;
std
::
shared_ptr
<
typename
TForward
::
primitive_desc
>
fwd_pd_
;
std
::
shared_ptr
<
typename
TBackward
::
primitive_desc
>
bwd_pd_
;
std
::
shared_ptr
<
typename
TBackward
::
primitive_desc
>
bwd_pd_
;
std
::
shared_ptr
<
typename
TBackward_params
::
primitive_desc
>
bwd_w_pd_
;
};
};
// TODO(grygielski) this class will be deleted later.
// TODO(grygielski) this class will be deleted later.
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录