未验证 提交 d255bfe0 编写于 作者: J JingZhuangzhuang 提交者: GitHub

fix_conv2d_trt_convert_test_case (#39882)

* fix_conv2d_trt_convert_test_case

* fix_conv2d_trt_convert_test_case

* fix_conv2d_trt_convert_test_case

* fix_conv2d_trt_convert_test_case
上级 b46e49de
...@@ -37,6 +37,13 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -37,6 +37,13 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest):
if inputs['input_data'].shape[1] != weights['conv2d_weight'].shape[0]: if inputs['input_data'].shape[1] != weights['conv2d_weight'].shape[0]:
return False return False
if attrs[0]['dilations'][0] != 1 or attrs[0]['dilations'][1] != 1:
return False
ver = paddle_infer.get_trt_compile_version()
if ver[0] * 1000 + ver[1] * 100 + ver[2] * 10 < 7000:
return False
return True return True
def sample_program_configs(self): def sample_program_configs(self):
...@@ -175,9 +182,9 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -175,9 +182,9 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest):
self.trt_param.precision = paddle_infer.PrecisionType.Half self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num( yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), (1e-5, 1e-3) attrs, False), (1e-5, 1e-3)
self.trt_param.precision = paddle_infer.PrecisionType.Int8 # self.trt_param.precision = paddle_infer.PrecisionType.Int8
yield self.create_inference_config(), generate_trt_nodes_num( # yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), (1e-5, 1e-5) # attrs, False), (1e-5, 1e-5)
# for dynamic_shape # for dynamic_shape
generate_dynamic_shape(attrs) generate_dynamic_shape(attrs)
...@@ -187,41 +194,18 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -187,41 +194,18 @@ class TrtConvertConv2dTransposeTest(TrtLayerAutoScanTest):
self.trt_param.precision = paddle_infer.PrecisionType.Half self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num( yield self.create_inference_config(), generate_trt_nodes_num(
attrs, True), (1e-5, 1e-3) attrs, True), (1e-5, 1e-3)
self.trt_param.precision = paddle_infer.PrecisionType.Int8 # self.trt_param.precision = paddle_infer.PrecisionType.Int8
yield self.create_inference_config(), generate_trt_nodes_num( # yield self.create_inference_config(), generate_trt_nodes_num(
attrs, True), (1e-5, 1e-5) # attrs, True), (1e-5, 1e-5)
def add_skip_trt_case(self): def add_skip_trt_case(self):
def teller1(program_config, predictor_config): def teller1(program_config, predictor_config):
if program_config.ops[0].attrs[
'padding_algorithm'] == "SAME" or program_config.ops[
0].attrs['padding_algorithm'] == "VALID":
return True
return False
self.add_skip_case(
teller1, SkipReasons.TRT_NOT_IMPLEMENTED,
"When padding_algorithm is 'SAME' or 'VALID', Trt dose not support. In this case, trt build error is caused by scale op."
)
def teller2(program_config, predictor_config):
if program_config.ops[0].attrs['dilations'][
0] != 1 or program_config.ops[0].attrs['dilations'][1] != 1:
return True
return False
self.add_skip_case(
teller2, SkipReasons.TRT_NOT_IMPLEMENTED,
"When dilations's element is not equal 1, there are different behaviors between Trt and Paddle."
)
def teller3(program_config, predictor_config):
if self.trt_param.precision == paddle_infer.PrecisionType.Int8: if self.trt_param.precision == paddle_infer.PrecisionType.Int8:
return True return True
return False return False
self.add_skip_case( self.add_skip_case(
teller3, SkipReasons.TRT_NOT_IMPLEMENTED, teller1, SkipReasons.TRT_NOT_IMPLEMENTED,
"When precisionType is int8 without relu op, output is different between Trt and Paddle." "When precisionType is int8 without relu op, output is different between Trt and Paddle."
) )
......
...@@ -147,7 +147,7 @@ class TrtConvertDeformableConvTest(TrtLayerAutoScanTest): ...@@ -147,7 +147,7 @@ class TrtConvertDeformableConvTest(TrtLayerAutoScanTest):
if len(attrs[0]['paddings']) == 4: if len(attrs[0]['paddings']) == 4:
return 1, 2 return 1, 2
else: else:
return 1, 2 return 1, 4
attrs = [ attrs = [
program_config.ops[i].attrs program_config.ops[i].attrs
...@@ -160,20 +160,8 @@ class TrtConvertDeformableConvTest(TrtLayerAutoScanTest): ...@@ -160,20 +160,8 @@ class TrtConvertDeformableConvTest(TrtLayerAutoScanTest):
yield self.create_inference_config(), generate_trt_nodes_num( yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), 1e-5 attrs, False), 1e-5
def add_skip_trt_case(self):
def teller1(program_config, predictor_config):
if len(program_config.ops[0].attrs["strides"]) != 2:
return False
return True
self.add_skip_case(
teller1, SkipReasons.TRT_NOT_IMPLEMENTED,
"In deformable conv, length of Attr(strides) should be 2.")
def test(self): def test(self):
self.trt_param.workspace_size = 1 << 28 self.trt_param.workspace_size = 1 << 28
self.add_skip_trt_case()
self.run_test() self.run_test()
......
...@@ -40,6 +40,13 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -40,6 +40,13 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest):
if inputs['input_data'].shape[1] != attrs[0]['groups']: if inputs['input_data'].shape[1] != attrs[0]['groups']:
return False return False
if attrs[0]['dilations'][0] != 1 or attrs[0]['dilations'][1] != 1:
return False
ver = paddle_infer.get_trt_compile_version()
if ver[0] * 1000 + ver[1] * 100 + ver[2] * 10 < 7000:
return False
return True return True
def sample_program_configs(self): def sample_program_configs(self):
...@@ -139,9 +146,9 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -139,9 +146,9 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest):
self.trt_param.precision = paddle_infer.PrecisionType.Half self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num( yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), (1e-5, 1e-3) attrs, False), (1e-5, 1e-3)
self.trt_param.precision = paddle_infer.PrecisionType.Int8 # self.trt_param.precision = paddle_infer.PrecisionType.Int8
yield self.create_inference_config(), generate_trt_nodes_num( # yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), (1e-5, 1e-5) # attrs, False), (1e-5, 1e-5)
# for dynamic_shape # for dynamic_shape
generate_dynamic_shape(attrs) generate_dynamic_shape(attrs)
...@@ -151,41 +158,18 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest): ...@@ -151,41 +158,18 @@ class TrtConvertDepthwiseConv2dTransposeTest(TrtLayerAutoScanTest):
self.trt_param.precision = paddle_infer.PrecisionType.Half self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num( yield self.create_inference_config(), generate_trt_nodes_num(
attrs, True), (1e-5, 1e-5) attrs, True), (1e-5, 1e-5)
self.trt_param.precision = paddle_infer.PrecisionType.Int8 # self.trt_param.precision = paddle_infer.PrecisionType.Int8
yield self.create_inference_config(), generate_trt_nodes_num( # yield self.create_inference_config(), generate_trt_nodes_num(
attrs, True), (1e-5, 1e-5) # attrs, True), (1e-5, 1e-5)
def add_skip_trt_case(self): def add_skip_trt_case(self):
def teller1(program_config, predictor_config): def teller1(program_config, predictor_config):
if program_config.ops[0].attrs[
'padding_algorithm'] == "SAME" or program_config.ops[
0].attrs['padding_algorithm'] == "VALID":
return True
return False
self.add_skip_case(
teller1, SkipReasons.TRT_NOT_IMPLEMENTED,
"When padding_algorithm is 'SAME' or 'VALID', Trt dose not support. In this case, trt build error is caused by scale op."
)
def teller2(program_config, predictor_config):
if program_config.ops[0].attrs['dilations'][
0] != 1 or program_config.ops[0].attrs['dilations'][1] != 1:
return True
return False
self.add_skip_case(
teller2, SkipReasons.TRT_NOT_IMPLEMENTED,
"When dilations's element is not equal 1, there are different behaviors between Trt and Paddle."
)
def teller3(program_config, predictor_config):
if self.trt_param.precision == paddle_infer.PrecisionType.Int8: if self.trt_param.precision == paddle_infer.PrecisionType.Int8:
return True return True
return False return False
self.add_skip_case( self.add_skip_case(
teller3, SkipReasons.TRT_NOT_IMPLEMENTED, teller1, SkipReasons.TRT_NOT_IMPLEMENTED,
"When precisionType is int8 without relu op, output is different between Trt and Paddle." "When precisionType is int8 without relu op, output is different between Trt and Paddle."
) )
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册