未验证 提交 f82baed8 编写于 作者: P Pei Yang 提交者: GitHub

fix trt instance norm plugin on gcc8. test=develop (#25730)

上级 920d998f
...@@ -111,6 +111,7 @@ int InstanceNormPlugin::enqueue(int batch_size, const void *const *inputs, ...@@ -111,6 +111,7 @@ int InstanceNormPlugin::enqueue(int batch_size, const void *const *inputs,
handle_, CUDNN_BATCHNORM_SPATIAL_PERSISTENT, &alpha, &beta, x_desc_, handle_, CUDNN_BATCHNORM_SPATIAL_PERSISTENT, &alpha, &beta, x_desc_,
x_ptr, y_desc_, y_ptr, b_desc_, scale_d, bias_d, 1., nullptr, nullptr, x_ptr, y_desc_, y_ptr, b_desc_, scale_d, bias_d, 1., nullptr, nullptr,
eps_, nullptr, nullptr); eps_, nullptr, nullptr);
return cudaGetLastError() != cudaSuccess;
} }
} // namespace plugin } // namespace plugin
......
...@@ -389,10 +389,9 @@ if(WITH_GPU AND TENSORRT_FOUND) ...@@ -389,10 +389,9 @@ if(WITH_GPU AND TENSORRT_FOUND)
inference_analysis_test(trt_split_converter_test SRCS trt_split_converter_test.cc inference_analysis_test(trt_split_converter_test SRCS trt_split_converter_test.cc
EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
ARGS --infer_model=${TEST_SPLIT_CONVERTER_MODEL}/) ARGS --infer_model=${TEST_SPLIT_CONVERTER_MODEL}/)
#TODO(peiyang): Fix this unitest failed on GCC8. inference_analysis_test(trt_instance_norm_test SRCS trt_instance_norm_converter_test.cc
#inference_analysis_test(trt_instance_norm_test SRCS trt_instance_norm_converter_test.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS}
# EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${TEST_INSTANCE_NORM_MODEL}/)
# ARGS --infer_model=${TEST_INSTANCE_NORM_MODEL}/)
inference_analysis_test(test_analyzer_capi_gpu SRCS analyzer_capi_gpu_tester.cc inference_analysis_test(test_analyzer_capi_gpu SRCS analyzer_capi_gpu_tester.cc
EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} paddle_fluid_c
ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models) ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_inference_test_models)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册