未验证 提交 4805da50 编写于 作者: W Wangzheee 提交者: GitHub

[Paddle Inference]Disable skip layernorm half (#45047)

* disable_skip_layernorm_fp16
上级 3f49817a
......@@ -163,8 +163,10 @@ class SkipLayerNormOpConverter : public OpConverter {
auto scale_weight = GetFp32Weight("Scale").get();
float eps = PADDLE_GET_CONST(float, op_desc.GetAttr("epsilon"));
bool with_fp16 =
engine_->WithFp16() && !engine_->disable_trt_plugin_fp16();
// bool with_fp16 =
// engine_->WithFp16() && !engine_->disable_trt_plugin_fp16();
bool with_fp16 = false;
plugin::SkipLayerNormPluginDynamic* plugin =
new plugin::SkipLayerNormPluginDynamic(
static_cast<const float*>(bias_weight.values),
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册