Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
2e72a0e3
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
2e72a0e3
编写于
8月 17, 2020
作者:
Z
zlsh80826
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
skip layer norm w/ nvinfer plugin
上级
ea6ff5a2
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
46 addition
and
1 deletion
+46
-1
paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc
paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc
+46
-1
未找到文件。
paddle/fluid/inference/tensorrt/convert/skip_layernorm.cc
浏览文件 @
2e72a0e3
...
...
@@ -47,17 +47,62 @@ class SkipLayerNormOpConverter : public OpConverter {
framework
::
DDim
bias_dims
,
scale_dims
;
auto
*
bias
=
get_persistable_data
(
"Bias"
,
&
bias_dims
);
auto
*
scale
=
get_persistable_data
(
"Scale"
,
&
scale_dims
);
float
eps
=
BOOST_GET_CONST
(
float
,
op_desc
.
GetAttr
(
"epsilon"
));
int
bias_size
=
framework
::
product
(
bias_dims
);
int
scale_size
=
framework
::
product
(
scale_dims
);
nvinfer1
::
ILayer
*
layer
=
nullptr
;
if
(
engine_
->
with_dynamic_shape
())
{
#ifdef USE_NVINFER_PLUGIN
auto
creator
=
GetPluginRegistry
()
->
getPluginCreator
(
"CustomSkipLayerNormPluginDynamic"
,
"1"
);
assert
(
creator
!=
nullptr
);
int
type
=
static_cast
<
int
>
((
engine_
->
WithFp16
()
==
1
)
?
nvinfer1
::
DataType
::
kHALF
:
nvinfer1
::
DataType
::
kFLOAT
);
int
ld
=
input1
->
getDimensions
().
d
[
2
];
// hidden dimension
assert
(
ld
>
0
);
const
std
::
vector
<
nvinfer1
::
PluginField
>
fields
{
{
"type_id"
,
&
type
,
nvinfer1
::
PluginFieldType
::
kINT32
,
1
},
{
"ld"
,
&
ld
,
nvinfer1
::
PluginFieldType
::
kINT32
,
1
},
{
"beta"
,
bias
,
nvinfer1
::
PluginFieldType
::
kFLOAT32
,
bias_size
},
{
"gamma"
,
scale
,
nvinfer1
::
PluginFieldType
::
kFLOAT32
,
scale_size
},
};
nvinfer1
::
PluginFieldCollection
*
pluginPtr
=
static_cast
<
nvinfer1
::
PluginFieldCollection
*>
(
malloc
(
sizeof
(
*
pluginPtr
)
+
fields
.
size
()
*
sizeof
(
nvinfer1
::
PluginField
)));
// remember to free
pluginPtr
->
nbFields
=
static_cast
<
int
>
(
fields
.
size
());
pluginPtr
->
fields
=
fields
.
data
();
auto
pluginObj
=
creator
->
createPlugin
(
"CustomSkipLayerNormPluginDynamic"
,
pluginPtr
);
nvinfer1
::
Permutation
permutation
{
1
,
0
,
2
,
3
,
4
};
auto
trans_layer0
=
TRT_ENGINE_ADD_LAYER
(
engine_
,
Shuffle
,
*
inputs
[
0
]);
auto
trans_layer1
=
TRT_ENGINE_ADD_LAYER
(
engine_
,
Shuffle
,
*
inputs
[
1
]);
trans_layer0
->
setFirstTranspose
(
permutation
);
trans_layer1
->
setFirstTranspose
(
permutation
);
std
::
vector
<
nvinfer1
::
ITensor
*>
trans_tensors
;
trans_tensors
.
emplace_back
(
trans_layer0
->
getOutput
(
0
));
trans_tensors
.
emplace_back
(
trans_layer1
->
getOutput
(
0
));
auto
plugin_layer
=
engine_
->
network
()
->
addPluginV2
(
trans_tensors
.
data
(),
trans_tensors
.
size
(),
*
pluginObj
);
assert
(
plugin_layer
!=
nullptr
);
auto
trans_layer
=
TRT_ENGINE_ADD_LAYER
(
engine_
,
Shuffle
,
*
plugin_layer
->
getOutput
(
0
));
assert
(
trans_layer
!=
nullptr
);
trans_layer
->
setFirstTranspose
(
permutation
);
layer
=
trans_layer
;
#else
float
eps
=
BOOST_GET_CONST
(
float
,
op_desc
.
GetAttr
(
"epsilon"
));
bool
ban_fp16
=
engine_
->
disable_trt_plugin_fp16
();
plugin
::
SkipLayerNormPluginDynamic
*
plugin
=
new
plugin
::
SkipLayerNormPluginDynamic
(
bias
,
scale
,
bias_size
,
scale_size
,
eps
,
ban_fp16
);
layer
=
engine_
->
AddPluginV2
(
inputs
.
data
(),
2
,
plugin
);
#endif
}
else
{
PADDLE_THROW
(
platform
::
errors
::
Fatal
(
"You are running the Ernie(Bert) model in static"
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录