未验证 提交 d5ff367b 编写于 作者: P Pei Yang 提交者: GitHub

conflict (#29498)

上级 6bfc5721
......@@ -65,13 +65,21 @@ class HardSwishOpConverter : public OpConverter {
const float offset = op_desc.HasAttr("offset")
? BOOST_GET_CONST(float, op_desc.GetAttr("offset"))
: 3.0f;
nvinfer1::ILayer* layer = nullptr;
if (threshold == scale) {
auto* hsig_layer = TRT_ENGINE_ADD_LAYER(
engine_, Activation, *input, nvinfer1::ActivationType::kHARD_SIGMOID);
hsig_layer->setAlpha(1.0 / scale);
hsig_layer->setBeta(offset / scale);
nvinfer1::IElementWiseLayer* eltwise_layer = TRT_ENGINE_ADD_LAYER(
engine_, ElementWise, *input, *(hsig_layer->getOutput(0)),
nvinfer1::ElementWiseOperation::kPROD);
layer = eltwise_layer;
} else {
plugin::HardSwishPlugin* plugin =
new plugin::HardSwishPlugin(threshold, scale, offset);
layer = engine_->AddPlugin(&input, input_num, plugin);
}
auto output_name = op_desc.Output("Out")[0];
RreplenishLayerAndOutput(layer, "hard_swish", {output_name}, test_mode);
}
......
......@@ -341,6 +341,12 @@ class TensorRTSubgraphPassHardSwishTest(TensorRTSubgraphPassActivationTest):
return fluid.layers.hard_swish(x)
class TensorRTSubgraphPassHardSwishPluginTest(
TensorRTSubgraphPassActivationTest):
def append_act(self, x):
return fluid.layers.hard_swish(x, threshold=4.0, scale=8.0)
class TensorRTSubgraphPassHardSigmoidTest(TensorRTSubgraphPassActivationTest):
def append_act(self, x):
return fluid.layers.hard_sigmoid(x)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册