未验证 提交 dc26d07b 编写于 作者: Z zhangchunle 提交者: GitHub

Unittest parallel (#43042)

unittest parallel
Co-authored-by: Nzhangbo9674 <zhangbo54@baidu.com>
上级 c4b7c485
...@@ -1564,6 +1564,10 @@ set +x ...@@ -1564,6 +1564,10 @@ set +x
card_test "$exclusive_tests_non_parallel" -1 2 # run cases exclusively, in this cases would be run with 2/4/8 GPUs card_test "$exclusive_tests_non_parallel" -1 2 # run cases exclusively, in this cases would be run with 2/4/8 GPUs
exclu_ut_endTime_s=`date +%s` exclu_ut_endTime_s=`date +%s`
echo "ipipe_log_param_1_TestCases_Total_Time: $[ $single_ut_endTime_s - $single_ut_startTime_s ]s"
echo "ipipe_log_param_2_TestCases_Total_Time: $[ $multi_ut_endTime_s - $multi_ut_startTime_s ]s"
echo "ipipe_log_param_Exclusive_TestCases_Total_Time: $[ $exclu_ut_endTime_s - $exclu_ut_startTime_s ]s"
echo "ipipe_log_param_1_TestCases_Total_Time: $[ $single_ut_endTime_s - $single_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt echo "ipipe_log_param_1_TestCases_Total_Time: $[ $single_ut_endTime_s - $single_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
echo "ipipe_log_param_2_TestCases_Total_Time: $[ $multi_ut_endTime_s - $multi_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt echo "ipipe_log_param_2_TestCases_Total_Time: $[ $multi_ut_endTime_s - $multi_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
echo "ipipe_log_param_Exclusive_TestCases_Total_Time: $[ $exclu_ut_endTime_s - $exclu_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt echo "ipipe_log_param_Exclusive_TestCases_Total_Time: $[ $exclu_ut_endTime_s - $exclu_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
...@@ -1684,6 +1688,68 @@ set -ex ...@@ -1684,6 +1688,68 @@ set -ex
fi fi
} }
function classify_case_by_cardNum() {
cd ${PADDLE_ROOT}/build
test_cases=$(ctest -N -V) # get all test cases
single_card_tests='^job$' # all cases list which would take single GPU
multiple_card_tests='^job$'
exclusive_card_tests='^job$'
nightly_tests='^job$'
is_exclusive='' # indicate whether the case is exclusive type
is_multicard='' # indicate whether the case is multiple GPUs type
is_nightly='' # indicate whether the case will only run at night
set +x
while read -r line; do
if [[ "$line" == "" ]]; then
continue
fi
read matchstr <<< $(echo "$line"|grep -oEi 'Test[ \t]+#')
if [[ "$matchstr" == "" ]]; then
# Any test case with LABELS property would be parse here
# RUN_TYPE=EXCLUSIVE mean the case would run exclusively
# RUN_TYPE=DIST mean the case would take two graph GPUs during runtime
# RUN_TYPE=NIGHTLY or RUN_TYPE=DIST:NIGHTLY or RUN_TYPE=EXCLUSIVE:NIGHTLY means the case will ONLY run at night
read is_exclusive <<< $(echo "$line"|grep -oEi "RUN_TYPE=EXCLUSIVE")
read is_multicard <<< $(echo "$line"|grep -oEi "RUN_TYPE=DIST")
read is_nightly <<< $(echo "$line"|grep -oEi "RUN_TYPE=NIGHTLY|RUN_TYPE=DIST:NIGHTLY|RUN_TYPE=EXCLUSIVE:NIGHTLY")
continue
fi
read testcase <<< $(echo "$line"|grep -oEi "\w+$")
if [[ "$is_nightly" != "" ]] && [ ${NIGHTLY_MODE:-OFF} == "OFF" ]; then
echo $testcase" will only run at night."
nightly_tests="$nightly_tests|^$testcase$"
echo "$testcase" >> ${PADDLE_ROOT}/build/nightly_case
continue
fi
if [[ "$is_multicard" == "" ]]; then
# trick: treat all test case with prefix "test_dist" as dist case, and would run on 2 GPUs
read is_multicard <<< $(echo "$testcase"|grep -oEi "test_dist_")
fi
if [[ "$is_exclusive" != "" ]]; then
exclusive_card_tests="$exclusive_card_tests|^$testcase$"
elif [[ "$is_multicard" != "" ]]; then
multiple_card_tests="$multiple_card_tests|^$testcase$"
else
single_card_tests="$single_card_tests|^$testcase$"
fi
is_exclusive=''
is_multicard=''
is_nightly=''
matchstr=''
testcase=''
done <<< "$test_cases";
set -x
rm -rf ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
touch ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
echo 'single_card_tests: '$single_card_tests >> ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
echo 'multiple_card_tests: '$multiple_card_tests >> ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
echo 'exclusive_card_tests: '$exclusive_card_tests >> ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
echo 'nightly_tests: '$nightly_tests >> ${PADDLE_ROOT}/build/classify_case_by_cardNum.txt
}
function show_ut_retry_result() { function show_ut_retry_result() {
if [ "$SYSTEM" == "Darwin" ]; then if [ "$SYSTEM" == "Darwin" ]; then
exec_retry_threshold_count=10 exec_retry_threshold_count=10
...@@ -1921,8 +1987,15 @@ set -x ...@@ -1921,8 +1987,15 @@ set -x
#generate ut file map #generate ut file map
python ${PADDLE_ROOT}/tools/get_ut_file_map.py 'get_ut_map' ${PADDLE_ROOT} python ${PADDLE_ROOT}/tools/get_ut_file_map.py 'get_ut_map' ${PADDLE_ROOT}
wait;
#classify_case_by_cardNum
classify_case_by_cardNum
#generate ut mem map #generate ut mem map
python ${PADDLE_ROOT}/tools/get_ut_mem_map.py $tmp_dir python ${PADDLE_ROOT}/tools/get_ut_mem_map.py $tmp_dir
python ${PADDLE_ROOT}/tools/final_ut_parallel_rule.py ${PADDLE_ROOT}
} }
function get_failedUts_precise_map_file { function get_failedUts_precise_map_file {
...@@ -2288,6 +2361,220 @@ set -ex ...@@ -2288,6 +2361,220 @@ set -ex
fi fi
} }
function parallel_test_base_gpu_test() {
if [ ${WITH_TESTING:-ON} == "ON" ] ; then
cat <<EOF
========================================
Running unit tests in parallel way ...
========================================
EOF
set -x
# set trt_convert ut to run 15% cases.
export TEST_NUM_PERCENT_CASES=0.15
precison_cases=""
bash $PADDLE_ROOT/tools/check_added_ut.sh
if [ ${PRECISION_TEST:-OFF} == "ON" ]; then
python3.7 $PADDLE_ROOT/tools/get_pr_ut.py
fi
if [ -a "$PADDLE_ROOT/duplicate_ut" ];then
duplicate_uts=$(cat $PADDLE_ROOT/duplicate_ut|sed -e 's/\r//g')
if [[ "$duplicate_uts" != "" ]];then
set +x
echo "========================================"
echo "The new unit test has the same name as the existing unit test"
cat "$PADDLE_ROOT/duplicate_ut"
echo "========================================"
exit 102;
set -x
fi
fi
if [ -a "$PADDLE_ROOT/added_ut" ];then
added_uts=^$(awk BEGIN{RS=EOF}'{gsub(/\n/,"$|^");print}' $PADDLE_ROOT/added_ut)$
env CUDA_VISIBLE_DEVICES=0 ctest -R "(${added_uts})" -LE "RUN_TYPE=DIST|RUN_TYPE=EXCLUSIVE" --output-on-failure --repeat-until-fail 3 --timeout 15;added_ut_error=$?
ctest -R "(${added_uts})" -L "RUN_TYPE=DIST|RUN_TYPE=EXCLUSIVE" --output-on-failure --repeat-until-fail 3 --timeout 15;added_ut_error_1=$?
if [ "$added_ut_error" != 0 ] && [ "$added_ut_error_1" != 0 ];then
echo "========================================"
echo "Added UT should not exceed 15 seconds"
echo "========================================"
exit 8;
fi
fi
set +x
EXIT_CODE=0;
wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/CTestCostData.txt --no-check-certificate
mkdir -p ${PADDLE_ROOT}/build/Testing/Temporary/
cp -r ${PADDLE_ROOT}/build/CTestCostData.txt ${PADDLE_ROOT}/build/Testing/Temporary/
ctest -N | awk -F ': ' '{print $2}' | sed '/^$/d' | sed '$d' > all_ut_list
get_quickly_disable_ut||disable_ut_quickly='disable_ut' # indicate whether the case was in quickly disable list
test_cases=$(ctest -N -V) # get all test cases
python ${PADDLE_ROOT}/tools/group_case_for_parallel.py ${PADDLE_ROOT}
single_ut_mem_0_startTime_s=`date +%s`
while read line
do
card_test "$line" 1 4
done < $PADDLE_ROOT/tools/single_card_tests_mem0_new
single_ut_mem_0_endTime_s=`date +%s`
echo "ipipe_log_param_1_mem_0_TestCases_Total_Time: $[ $single_ut_mem_0_endTime_s - $single_ut_mem_0_startTime_s ]s"
echo "ipipe_log_param_1_mem_0_TestCases_Total_Time: $[ $single_ut_mem_0_endTime_s - $single_ut_mem_0_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
single_ut_startTime_s=`date +%s`
while read line
do
num=$[(`echo $line | awk -F"$" '{print NF-1}'`-1)/6]
if [ $num -eq 0 ]; then
num=1
fi
card_test "$line" 1 $num
done < $PADDLE_ROOT/tools/single_card_tests_new
single_ut_endTime_s=`date +%s`
echo "ipipe_log_param_1_TestCases_Total_Time: $[ $single_ut_endTime_s - $single_ut_startTime_s ]s"
echo "ipipe_log_param_1_TestCases_Total_Time: $[ $single_ut_endTime_s - $single_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
multiple_ut_mem_0_startTime_s=`date +%s`
while read line
do
card_test "$line" 2 4
done < $PADDLE_ROOT/tools/multiple_card_tests_mem0_new
multiple_ut_mem_0_endTime_s=`date +%s`
echo "ipipe_log_param_2_mem0_TestCases_Total_Time: $[ $multiple_ut_mem_0_endTime_s - $multiple_ut_mem_0_startTime_s ]s"
echo "ipipe_log_param_2_mem0_TestCases_Total_Time: $[ $multiple_ut_mem_0_endTime_s - $multiple_ut_mem_0_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
multiple_ut_startTime_s=`date +%s`
while read line
do
num=$[(`echo $line | awk -F"$" '{print NF-1}'`-1)/6]
if [ $num -eq 0 ]; then
num=1
fi
card_test "$line" 2 $num
done < $PADDLE_ROOT/tools/multiple_card_tests_new
multiple_ut_endTime_s=`date +%s`
echo "ipipe_log_param_2_TestCases_Total_Time: $[ $multiple_ut_endTime_s - $multiple_ut_startTime_s ]s"
echo "ipipe_log_param_2_TestCases_Total_Time: $[ $multiple_ut_endTime_s - $multiple_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
exclusive_ut_mem_0_startTime_s=`date +%s`
while read line
do
card_test "$line" -1 4
done < $PADDLE_ROOT/tools/exclusive_card_tests_mem0_new
exclusive_ut_mem_0_endTime_s=`date +%s`
echo "ipipe_log_param_-1_mem0_TestCases_Total_Time: $[ $exclusive_ut_mem_0_endTime_s - $exclusive_ut_mem_0_startTime_s ]s"
echo "ipipe_log_param_-1_mem0_TestCases_Total_Time: $[ $exclusive_ut_mem_0_endTime_s - $exclusive_ut_mem_0_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
exclusive_ut_startTime_s=`date +%s`
while read line
do
num=$[(`echo $line | awk -F"$" '{print NF-1}'`-1)/6]
if [ $num -eq 0 ]; then
num=1
fi
card_test "$line" -1 $num
done < $PADDLE_ROOT/tools/exclusive_card_tests_new
exclusive_ut_endTime_s=`date +%s`
echo "ipipe_log_param_-1_TestCases_Total_Time: $[ $exclusive_ut_endTime_s - $exclusive_ut_startTime_s ]s"
echo "ipipe_log_param_-1_TestCases_Total_Time: $[ $exclusive_ut_endTime_s - $exclusive_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
noparallel_ut_startTime_s=`date +%s`
while read line
do
card_test "$line" -1 2
done < $PADDLE_ROOT/tools/no_parallel_case_file
noparallel_ut_endTime_s=`date +%s`
echo "ipipe_log_param_noparallel_TestCases_Total_Time: $[ $noparallel_ut_endTime_s - $noparallel_ut_startTime_s ]s"
echo "ipipe_log_param_noparallel_TestCases_Total_Time: $[ $noparallel_ut_endTime_s - $noparallel_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
###retry
collect_failed_tests
rm -f $tmp_dir/*
exec_times=0
retry_unittests_record=''
retry_time=4
exec_time_array=('first' 'second' 'third' 'fourth')
parallel_failed_tests_exec_retry_threshold=120
exec_retry_threshold=30
is_retry_execuate=0
rerun_ut_startTime_s=`date +%s`
if [ -n "$failed_test_lists" ];then
if [ ${TIMEOUT_DEBUG_HELP:-OFF} == "ON" ];then
bash $PADDLE_ROOT/tools/timeout_debug_help.sh "$failed_test_lists" # cat logs for tiemout uts which killed by ctest
fi
read need_retry_ut_str <<< $(echo "$failed_test_lists" | grep -oEi "\-.+\(.+\)" | sed 's/(.\+)//' | sed 's/- //' )
need_retry_ut_arr=(${need_retry_ut_str})
need_retry_ut_count=${#need_retry_ut_arr[@]}
read retry_unittests <<< $(echo "$failed_test_lists" | grep -oEi "\-.+\(.+\)" | sed 's/(.\+)//' | sed 's/- //' )
while ( [ $exec_times -lt $retry_time ] )
do
if [[ "${exec_times}" == "0" ]] ;then
if [ $need_retry_ut_count -lt $parallel_failed_tests_exec_retry_threshold ];then
is_retry_execuate=0
else
is_retry_execuate=1
fi
elif [[ "${exec_times}" == "1" ]] ;then
read need_retry_ut_str <<< $(echo "$failed_test_lists" | grep -oEi "\-.+\(.+\)" | sed 's/(.\+)//' | sed 's/- //' )
need_retry_ut_arr=(${need_retry_ut_str})
need_retry_ut_count=${#need_retry_ut_arr[@]}
if [ $need_retry_ut_count -lt $exec_retry_threshold ];then
is_retry_execuate=0
else
is_retry_execuate=1
fi
fi
if [[ "$is_retry_execuate" == "0" ]];then
set +e
retry_unittests_record="$retry_unittests_record$failed_test_lists"
failed_test_lists_ult=`echo "${failed_test_lists}" |grep -Po '[^ ].*$'`
set -e
if [[ "${exec_times}" == "1" ]] || [[ "${exec_times}" == "2" ]];then
if [[ "${failed_test_lists}" == "" ]];then
break
else
read retry_unittests <<< $(echo "$failed_test_lists" | grep -oEi "\-.+\(.+\)" | sed 's/(.\+)//' | sed 's/- //' )
fi
fi
echo "========================================="
echo "This is the ${exec_time_array[$exec_times]} time to re-run"
echo "========================================="
echo "The following unittest will be re-run:"
echo "${retry_unittests}"
for line in ${retry_unittests[@]} ;
do
if [[ "$retry_cases" == "" ]]; then
retry_cases="^$line$"
else
retry_cases="$retry_cases|^$line$"
fi
done
if [[ "$retry_cases" != "" ]]; then
card_test "$retry_cases" -1 2
fi
exec_times=$[$exec_times+1]
failed_test_lists=''
collect_failed_tests
rm -f $tmp_dir/*
retry_cases=''
else
break
fi
done
retry_unittests_record="$retry_unittests_record$failed_test_lists"
fi
rerun_ut_endTime_s=`date +%s`
echo "ipipe_log_param_Rerun_TestCases_Total_Time: $[ $rerun_ut_endTime_s - $rerun_ut_startTime_s ]s"
echo "ipipe_log_param_Rerun_TestCases_Total_Time: $[ $rerun_ut_endTime_s - $rerun_ut_startTime_s ]s" >> ${PADDLE_ROOT}/build/build_summary.txt
cp $PADDLE_ROOT/build/Testing/Temporary/CTestCostData.txt ${cfs_dir}/coverage/${AGILE_PULL_ID}/${AGILE_REVISION}/
if [[ "$EXIT_CODE" != "0" ]]; then
show_ut_retry_result
fi
set -ex
fi
}
function parallel_test_base_ipu() { function parallel_test_base_ipu() {
mkdir -p ${PADDLE_ROOT}/build mkdir -p ${PADDLE_ROOT}/build
cd ${PADDLE_ROOT}/build/python/paddle/fluid/tests/unittests/ipu cd ${PADDLE_ROOT}/build/python/paddle/fluid/tests/unittests/ipu
...@@ -2424,7 +2711,7 @@ function parallel_test() { ...@@ -2424,7 +2711,7 @@ function parallel_test() {
if [ "$WITH_CINN" == "ON" ];then if [ "$WITH_CINN" == "ON" ];then
parallel_test_base_cinn parallel_test_base_cinn
elif [ "$WITH_GPU" == "ON" ] || [ "$WITH_ROCM" == "ON" ];then elif [ "$WITH_GPU" == "ON" ] || [ "$WITH_ROCM" == "ON" ];then
parallel_test_base_gpu parallel_test_base_gpu_test
elif [ "$WITH_XPU" == "ON" ];then elif [ "$WITH_XPU" == "ON" ];then
parallel_test_base_xpu parallel_test_base_xpu
elif [ "$WITH_ASCEND_CL" == "ON" ];then elif [ "$WITH_ASCEND_CL" == "ON" ];then
...@@ -3294,6 +3581,10 @@ function main() { ...@@ -3294,6 +3581,10 @@ function main() {
# only test trt convert. # only test trt convert.
trt_convert_test trt_convert_test
;; ;;
classify_case_by_cardNum)
# only class case by card num
classify_case_by_cardNum
;;
*) *)
print_usage print_usage
exit 1 exit 1
......
# -*- coding: utf-8 -*-
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import json
import datetime
import codecs
import sys
def classify_cases_by_mem(rootPath):
"""classify cases by mem"""
case_filename = '%s/build/classify_case_by_cardNum.txt' % rootPath
case_exec_100 = [
'test_conv_eltwiseadd_bn_fuse_pass', 'test_trt_convert_pool2d',
'test_fc_fuse_pass', 'test_trt_convert_depthwise_conv2d',
'test_quant2_int8_resnet50_mkldnn',
'test_conv_elementwise_add_act_fuse_pass', 'test_trt_convert_conv2d',
'test_paddle_save_load', 'test_logical_op', 'test_nearest_interp_op',
'test_pool2d_op', 'test_conv3d_transpose_op', 'test_lstmp_op',
'test_cross_entropy2_op', 'test_sgd_op', 'test_imperative_ptq',
'test_model', 'test_custom_relu_op_setup', 'test_dropout_op',
'test_concat_op'
] #木桶原理 70s-100s之间的case
case_exec_200 = [
'test_post_training_quantization_mnist',
'test_imperative_auto_mixed_precision',
'test_trt_dynamic_shape_ernie_fp16_ser_deser',
'test_trt_dynamic_shape_ernie', 'test_layer_norm_op',
'trt_quant_int8_yolov3_r50_test', 'test_gru_op',
'test_post_training_quantization_while', 'test_mkldnn_log_softmax_op',
'test_mkldnn_matmulv2_op', 'test_mkldnn_shape_op',
'interceptor_pipeline_short_path_test',
'interceptor_pipeline_long_path_test', 'test_cpuonly_spawn'
] #木桶原理 110s-200s之间的case 以及容易timeout
case_always_timeout = [
'test_quant2_int8_resnet50_channelwise_mkldnn',
'test_parallel_dygraph_unused_variables_gloo',
'test_seq2seq',
'test_pool3d_op',
'test_trilinear_interp_op',
'test_trilinear_interp_v2_op',
'test_dropout_op',
'test_parallel_dygraph_sync_batch_norm',
'test_conv3d_op',
'test_quant2_int8_resnet50_range_mkldnn',
] # always timeout
f = open(case_filename)
lines = f.readlines()
all_tests_by_card = {}
for line in lines:
if line.startswith('single_card_tests:'):
all_tests_by_card['single_card_tests'] = []
line = line.split('single_card_tests: ^job$|')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['single_card_tests'].append(case)
elif line.startswith('multiple_card_tests:'):
all_tests_by_card['multiple_card_tests'] = []
line = line.split('multiple_card_tests: ^job$|')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['multiple_card_tests'].append(case)
elif line.startswith('exclusive_card_tests:'):
all_tests_by_card['exclusive_card_tests'] = []
line = line.split('exclusive_card_tests: ^job$')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['exclusive_card_tests'].append(case)
with open("/pre_test/classify_case_by_cardNum.json", "w") as f:
json.dump(all_tests_by_card, f)
with open("/pre_test/ut_mem_map.json", 'r') as load_f:
new_lastest_mem = json.load(load_f)
no_parallel_case = '^job$'
for cardType in all_tests_by_card:
case_mem_0 = '^job$'
case_mem_1 = {}
for case in all_tests_by_card[cardType]:
if case in case_exec_100 or case in case_exec_200:
continue
if case in case_always_timeout:
no_parallel_case = no_parallel_case + '|^' + case + '$'
if case not in new_lastest_mem:
continue
#mem = 0
if new_lastest_mem[case]["mem_nvidia"] == 0:
case_mem_0 = case_mem_0 + '|^' + case + '$'
#mem != 0
else:
case_mem_1[case] = new_lastest_mem[case]["mem_nvidia"]
with open('/pre_test/%s_mem0' % cardType, 'w') as f:
f.write(case_mem_0)
f.close()
case_mem_1_sort = sorted(case_mem_1.items(), key=lambda x: x[1])
case_mem_1_line = '^job$'
mem_1_sum = 0
with open('/pre_test/%s' % cardType, 'w') as f_not_0:
for index in case_mem_1_sort:
if mem_1_sum < 16 * 1024 * 2:
mem_1_sum += index[1]
case_mem_1_line = case_mem_1_line + '|^' + index[0] + '$'
else:
f_not_0.write(case_mem_1_line + '\n')
'''
if len(always_timeout_list
) != 0 and cardType == 'single_card_tests' and count > 25:
f.write(case_mem_1_line + '|^%s$\n' %
always_timeout_list[0])
always_timeout_list.pop(0)
else:
f.write(case_mem_1_line + '\n')
count += 1
'''
case_mem_1_line = '^job$|^' + index[0] + '$'
mem_1_sum = index[1]
f_not_0.write(case_mem_1_line + '\n')
if cardType == 'single_card_tests':
for cases in [case_exec_100, case_exec_200]:
case_mem_1_line = '^job$'
for case in cases:
case_mem_1_line = case_mem_1_line + '|^' + case + '$'
f_not_0.write(case_mem_1_line + '\n')
f_not_0.close()
os.system('cp %s/build/nightly_case /pre_test/' % rootPath)
if __name__ == '__main__':
rootPath = sys.argv[1]
classify_cases_by_mem(rootPath)
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
import os import os
import json import json
import sys
def get_ut_mem(rootPath): def get_ut_mem(rootPath):
...@@ -24,7 +25,7 @@ def get_ut_mem(rootPath): ...@@ -24,7 +25,7 @@ def get_ut_mem(rootPath):
continue continue
ut = f.replace('^', '').replace('$.log', '') ut = f.replace('^', '').replace('$.log', '')
case_dic[ut] = {} case_dic[ut] = {}
filename = '%s%s' % (parent, f) filename = '%s/%s' % (parent, f)
fi = open(filename) fi = open(filename)
lines = fi.readlines() lines = fi.readlines()
mem_reserved1 = -1 mem_reserved1 = -1
...@@ -56,7 +57,7 @@ def get_ut_mem(rootPath): ...@@ -56,7 +57,7 @@ def get_ut_mem(rootPath):
if caseTime != -1: if caseTime != -1:
case_dic[ut]['time'] = caseTime case_dic[ut]['time'] = caseTime
ut_mem_map_file = "/pre_test/ut_mem_map.json" % rootPath ut_mem_map_file = "/pre_test/ut_mem_map.json"
with open(ut_mem_map_file, "w") as f: with open(ut_mem_map_file, "w") as f:
json.dump(case_dic, f) json.dump(case_dic, f)
......
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
def group_case_for_parallel(rootPath):
"""group cases"""
#wget file
for filename in [
'nightly_tests', 'single_card_tests', 'single_card_tests_mem0',
'multiple_card_tests', 'multiple_card_tests_mem0',
'exclusive_card_tests', 'exclusive_card_tests_mem0'
]:
os.system(
'cd %s/tools && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/%s --no-check-certificate'
% (rootPath, filename))
#get nightly tests
nightly_tests_file = open('%s/tools/nightly_tests' % rootPath, 'r')
nightly_tests = nightly_tests_file.read().strip().split('\n')
nightly_tests_file.close()
parallel_case_file_list = [
'%s/tools/single_card_tests_mem0' % rootPath,
'%s/tools/single_card_tests' % rootPath,
'%s/tools/multiple_card_tests_mem0' % rootPath,
'%s/tools/multiple_card_tests' % rootPath,
'%s/tools/exclusive_card_tests_mem0' % rootPath,
'%s/tools/exclusive_card_tests' % rootPath
]
case_file = '%s/build/ut_list' % rootPath
if os.path.exists(case_file):
f = open(case_file, 'r')
all_need_run_cases = f.read().strip().split('\n')
if len(all_need_run_cases) == 1 and all_need_run_cases[0] == '':
f.close()
case_file = '%s/build/all_ut_list' % rootPath
f = open(case_file, 'r')
all_need_run_cases = f.read().strip().split('\n')
else:
case_file = '%s/build/all_ut_list' % rootPath
f = open(case_file, 'r')
all_need_run_cases = f.read().strip().split('\n')
print("case_file: %s" % case_file)
all_group_case = []
for filename in parallel_case_file_list:
fi = open(filename, 'r')
new_f = open('%s_new' % filename, 'w')
lines = fi.readlines()
new_case_file_list = []
for line in lines:
case_line_list = line.replace('^', '').replace('|', '').split('$')
new_case_line_list = list(
set(all_need_run_cases).intersection(set(case_line_list)))
if len(new_case_line_list) != 0:
new_case_file_list.append(new_case_line_list)
all_group_case += new_case_line_list
all_need_run_cases = list(
set(all_need_run_cases).difference(set(all_group_case)))
for line in new_case_file_list:
cases = '$|^'.join(case for case in line)
cases = '^job$|^%s$' % cases
new_f.write(cases + '\n')
fi.close()
new_f.close()
#no parallel cases
cases = '^job'
if len(all_need_run_cases) != 0:
for case in all_need_run_cases:
if case not in nightly_tests:
cases = cases + '$|^%s' % case
cases = '%s$' % cases
new_f = open('%s/tools/no_parallel_case_file' % rootPath, 'w')
new_f.write(cases + '\n')
new_f.close()
f.close()
if __name__ == "__main__":
rootPath = sys.argv[1]
group_case_for_parallel(rootPath)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册