diff --git a/tools/ci_op_benchmark.sh b/tools/ci_op_benchmark.sh index 80efd32ecf14eebac990dd8a531c134e95e7c039..1db79418b2d8f296c37d7757cace7b7bc2a8141c 100644 --- a/tools/ci_op_benchmark.sh +++ b/tools/ci_op_benchmark.sh @@ -106,7 +106,7 @@ function prepare_benchmark_environment { [ $? -ne 0 ] && LOG "[FATAL] Clone benchmark repo fail." && exit -1 LOG "[INFO] Collect api info ..." python benchmark/api/deploy/collect_api_info.py \ - --test_module_name tests_v2 \ + --test_module_name dynamic_tests_v2 \ --info_file api_info.txt >& 2 [ $? -ne 0 ] && LOG "[FATAL] Collect api info fail." && exit -1 [ ! -f benchmark/ci/scripts/op_benchmark.config ] && LOG "[FATAL] Missing op_benchmark.config!" && exit -1 @@ -185,7 +185,7 @@ function run_op_benchmark_test { logs_dir="$(pwd)/logs-${branch_name}" [ -d $logs_dir ] && rm -rf $logs_dir/* || mkdir -p $logs_dir pushd benchmark/api > /dev/null - bash deploy/main_control.sh tests_v2 \ + bash deploy/main_control.sh dynamic_tests_v2 \ tests_v2/configs \ $logs_dir \ $VISIBLE_DEVICES \ @@ -212,7 +212,7 @@ function check_op_benchmark_result { # there is no need to recompile and install paddle LOG "[INFO] retry ${retry_time} times ..." pushd benchmark/api > /dev/null - bash deploy/main_control.sh tests_v2 \ + bash deploy/main_control.sh dynamic_tests_v2 \ tests_v2/configs \ ${logs_dir} \ $VISIBLE_DEVICES \