未验证 提交 23c9c885 编写于 作者: R risemeup1 提交者: GitHub

precise_test_logic_update (#47387)

* test,test=ljd_test

* modify get_ut_file_map.py,test=ljd_test

* modify paddle_build.sh,test=ljd_test

* test,test=ljd_test

* update precise test logic,test=ljd_test

* it is a test,test=ljd_test

* it is a test,test=ljd_test

* it is a test,test=ljd_test

* it is a test,test=ljd_test

* it is a test,test=ljd_test

* it is a test,test=ljd_test

* precise test logic update
上级 daf98c15
......@@ -1825,16 +1825,30 @@ function precise_card_test_single {
set +x
testcases=$1
num=$2
for case in $(echo $testcases | tr "$|^" "\n")
for case in $(echo $testcases | tr "$|^" "\n" | awk '!/^$/')
do
cd ${PADDLE_ROOT}/build
precise_card_test "^${case}$" $num
# c++
#if test failed,continue,if test succeed ,go on
if_test_failed=$(cat $tmp_dir/^${case}$.log| grep "The following tests FAILED:")
if [[ "$if_test_failed" == "The following tests FAILED:" ]];then
echo "$testcases has failed,put it into prec_delta"
continue
else
echo "$testcases succeed"
fi
# c++
if [ ! -d "${PADDLE_ROOT}/build/ut_map/$case" ];then
mkdir ${PADDLE_ROOT}/build/ut_map/$case
fi
set -x
find paddle/fluid -name '*.gcda'|xargs -I {} cp --path {} ut_map/$case
find paddle/phi -name '*.gcda'|xargs -I {} cp --path {} ut_map/$case
find paddle/utils -name '*.gcda'|xargs -I {} cp --path {} ut_map/$case
find paddle/phi -name '*.gcno'|xargs -I {} cp --path {} ut_map/$case
find paddle/utils -name '*.gcno'|xargs -I {} cp --path {} ut_map/$case
find paddle/fluid -name '*.gcno'|xargs -I {} cp --path {} ut_map/$case
python ${PADDLE_ROOT}/tools/get_single_test_cov.py ${PADDLE_ROOT} $case &
......@@ -1847,7 +1861,9 @@ function precise_card_test_single {
fi
mv python-coverage.data.* ${PADDLE_ROOT}/build/pytest/$case
fi
find paddle/fluid -name *.gcda | xargs rm -f #delete gcda
find paddle/fluid -name *.gcda | xargs rm -f
find paddle/phi -name *.gcda | xargs rm -f
find paddle/utils -name *.gcda | xargs rm -f
done
}
......@@ -2009,12 +2025,12 @@ set -x
#get notSuccessut including the failed uniitests and not executed unittests
python ${PADDLE_ROOT}/tools/get_ut_file_map.py 'get_not_success_ut' ${PADDLE_ROOT}
#rerun the notSuccessut and get the mapping between notSuccessut and .cu files
get_failedUts_precise_map_file
#analyze the mapping between unit tests and .cu files
python ${PADDLE_ROOT}/tools/handle_h_cu_file.py 'analy_h_cu_file' $tmp_dir ${PADDLE_ROOT}
wait;
#rerun the notSuccessut and get the mapping between notSuccessut and .cu files
get_failedUts_precise_map_file
#generate python coverage and generate python file to tests_map_file
python ${PADDLE_ROOT}/tools/pyCov_multithreading.py ${PADDLE_ROOT}
......@@ -2117,12 +2133,6 @@ function get_failedUts_precise_map_file {
if [[ -f "${PADDLE_ROOT}/build/utNotSuccess" ]]; then
rerun_tests=`cat ${PADDLE_ROOT}/build/utNotSuccess`
#remove pile to full h/cu file
python ${PADDLE_ROOT}/tools/handle_h_cu_file.py 'remove_pile_from_h_file' ${PADDLE_ROOT}
cd ${PADDLE_ROOT}/build
cmake_base ${PYTHON_ABI:-""}
build ${parallel_number}
pip uninstall -y paddlepaddle-gpu
pip install ${PADDLE_ROOT}/build/python/dist/*whl
precise_card_test_single "$rerun_tests"
wait;
......
......@@ -141,16 +141,6 @@ def classify_cases_by_mem(rootPath):
case_mem_1_line = case_mem_1_line + '|^' + index[0] + '$'
else:
f_not_0.write(case_mem_1_line + '\n')
'''
if len(always_timeout_list
) != 0 and cardType == 'single_card_tests' and count > 25:
f.write(case_mem_1_line + '|^%s$\n' %
always_timeout_list[0])
always_timeout_list.pop(0)
else:
f.write(case_mem_1_line + '\n')
count += 1
'''
case_mem_1_line = '^job$|^' + index[0] + '$'
mem_1_sum = index[1]
f_not_0.write(case_mem_1_line + '\n')
......
......@@ -464,7 +464,8 @@ class PRChecker(object):
if ret:
with open('prec_delta') as delta:
for ut in delta:
ut_list.append(ut.rstrip('\r\n'))
if ut not in ut_list:
ut_list.append(ut.rstrip('\r\n'))
else:
print('PREC download prec_delta failed')
exit(1)
......
......@@ -21,7 +21,12 @@ def getFNDAFile(rootPath, test):
filename = '%s/build/ut_map/%s/coverage.info.tmp' % (rootPath, test)
fn_filename = '%s/build/ut_map/%s/fnda.tmp' % (rootPath, test)
os.system('touch %s' % fn_filename)
f = open(filename)
try:
f = open(filename)
print("oepn %s succesfully" % filename)
except FileNotFoundError:
print("%s is not found." % filename)
return
lines = f.readlines()
for line in lines:
line = line.replace('\n', '')
......@@ -47,8 +52,22 @@ def analysisFNDAFile(rootPath, test):
)
os.system('touch %s' % related_ut_map_file)
os.system('touch %s' % notrelated_ut_map_file)
if os.path.isfile(related_ut_map_file) and os.path.isfile(
notrelated_ut_map_file
):
print("make related.txt and not_related.txt succesfully")
else:
print("make related.txt and not_related.txt failed")
return
fn_filename = '%s/build/ut_map/%s/fnda.tmp' % (rootPath, test)
f = open(fn_filename)
try:
f = open(fn_filename)
print("oepn %s succesfully" % fn_filename)
except FileNotFoundError:
print("%s is not found." % fn_filename)
return
data = f.read().split('SF:')
related_file_list = []
for message in data:
......@@ -99,14 +118,30 @@ def getCovinfo(rootPath, test):
'cd %s && lcov --capture -d . -o coverage.info --rc lcov_branch_coverage=0 > /dev/null 2>&1'
% ut_map_path
)
coverage_info_path = ut_map_path + '/coverage.info'
file_size = os.path.getsize(coverage_info_path)
if file_size == 0:
print("coverage.info is empty,collect coverage rate failed")
return
else:
print("get coverage.info succesfully")
os.system(
"cd %s && lcov --extract coverage.info '/paddle/paddle/fluid/framework/*' '/paddle/paddle/fluid/imperative/*' '/paddle/paddle/fluid/inference/*' '/paddle/paddle/fluid/memory/*' '/paddle/paddle/fluid/operators/*' '/paddle/paddle/fluid/string/*' '/paddle/paddle/fluid/distributed/*' '/paddle/paddle/fluid/platform/*' '/paddle/paddle/fluid/pybind/*' '/paddle/build/*' -o coverage.info.tmp --rc lcov_branch_coverage=0 > /dev/null 2>&1"
"cd %s && lcov --extract coverage.info '/paddle/paddle/phi/*' '/paddle/paddle/utils/*' '/paddle/paddle/fluid/framework/*' '/paddle/paddle/fluid/imperative/*' '/paddle/paddle/fluid/inference/*' '/paddle/paddle/fluid/memory/*' '/paddle/paddle/fluid/operators/*' '/paddle/paddle/fluid/string/*' '/paddle/paddle/fluid/distributed/*' '/paddle/paddle/fluid/platform/*' '/paddle/paddle/fluid/pybind/*' '/paddle/build/*' -o coverage.info.tmp --rc lcov_branch_coverage=0 > /dev/null 2>&1"
% ut_map_path
)
coverage_info_tmp = ut_map_path + '/coverage.info.tmp'
coverage_tmp_size = os.path.getsize(coverage_info_tmp)
if coverage_tmp_size == 0:
print("coverage.info.tmp is empty,collect coverage rate failed")
return
else:
print("get coverage.info.tmp succesfully")
os.system('rm -rf %s/paddle' % ut_map_path)
os.system('rm -rf %s/coverage.info' % ut_map_path)
getFNDAFile(rootPath, test)
analysisFNDAFile(rootPath, test)
os.system('rm -rf %s/coverage.info.tmp' % ut_map_path)
if __name__ == "__main__":
......
......@@ -62,13 +62,33 @@ def handle_ut_file_map(rootPath):
ut_file_map = {}
count = 0
not_success_file = open("%s/build/prec_delta" % rootPath, 'w')
# if testdir is not made,write the test into prec_delta
get_all_uts(rootPath)
all_ut = '%s/build/all_uts_paddle' % rootPath
with open(all_ut, 'r') as f:
all_ut_list = []
for ut in f.readlines():
ut = ut.replace('\n', '')
all_ut_list.append(ut.strip())
f.close()
for ut in all_ut_list:
filedir = '%s/build/ut_map/%s' % (rootPath, ut)
if not os.path.exists(filedir):
not_success_file.write('%s\n' % ut)
utNotSuccess_list.append(ut)
# if fnda.tmp not exists,write the test into prec_delta
for ut in files:
count = count + 1
print("ut %s: %s" % (count, ut))
coverage_info = '%s/%s/coverage.info.tmp' % (ut_map_path, ut)
coverage_info = '%s/%s/fnda.tmp' % (ut_map_path, ut)
if os.path.exists(coverage_info):
filename = '%s/%s/related_%s.txt' % (ut_map_path, ut, ut)
f = open(filename)
try:
f = open(filename)
print("oepn %s succesfully" % filename)
except FileNotFoundError:
print("%s is not found." % filename)
return
lines = f.readlines()
for line in lines:
line = line.replace('\n', '').strip()
......@@ -87,6 +107,7 @@ def handle_ut_file_map(rootPath):
ut_file_map[source_file] = []
if ut not in ut_file_map[source_file]:
ut_file_map[source_file].append(ut)
f.close()
else:
not_success_file.write('%s\n' % ut)
utNotSuccess_list.append(ut)
......@@ -98,7 +119,11 @@ def handle_ut_file_map(rootPath):
for ut in files:
if ut not in utNotSuccess_list:
filename = '%s/%s/notrelated_%s.txt' % (ut_map_path, ut, ut)
f = open(filename)
try:
f = open(filename)
print("oepn %s succesfully" % filename)
except FileNotFoundError:
print("%s is not found." % filename)
lines = f.readlines()
for line in lines:
line = line.replace('\n', '').strip()
......@@ -110,7 +135,7 @@ def handle_ut_file_map(rootPath):
source_file = line
if source_file not in ut_file_map:
ut_file_map[source_file] = []
f.close()
with open("%s/build/ut_file_map.json" % rootPath, "w") as f:
json.dump(ut_file_map, f, indent=4)
......@@ -122,7 +147,7 @@ def notsuccessfuc(rootPath):
count = 0
# ut failed!!
for ut in files:
coverage_info = '%s/%s/coverage.info.tmp' % (ut_map_path, ut)
coverage_info = '%s/%s/fnda.tmp' % (ut_map_path, ut)
if os.path.exists(coverage_info):
pass
else:
......@@ -130,6 +155,7 @@ def notsuccessfuc(rootPath):
utNotSuccess = utNotSuccess + '^%s$|' % ut
# ut not exec
get_all_uts(rootPath)
with open("/paddle/build/all_uts_paddle", "r") as f:
data = f.readlines()
......@@ -149,35 +175,32 @@ def notsuccessfuc(rootPath):
def ut_file_map_supplement(rootPath):
ut_file_map_new = "%s/build/ut_file_map.json" % rootPath
os.system('mkdir /pre_test')
os.system('mkdir /pre_test_tmp')
os.system(
'cd /pre_test && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/ut_file_map.json --no-check-certificate'
'cd /pre_test_tmp && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/ut_file_map.json --no-check-certificate'
)
ut_file_map_old = "/pre_test/ut_file_map.json"
ut_file_map_old = "/pre_test_tmp/ut_file_map.json"
with open(ut_file_map_new, 'r') as load_f:
load_dict_new = json.load(load_f)
with open(ut_file_map_old, 'r') as f:
load_dict_old = json.load(f)
all_uts_paddle = '%s/build/all_uts_paddle' % rootPath
with open(all_uts_paddle, 'r') as f:
all_uts_paddle_list = []
for ut in f.readlines():
all_uts_paddle_list.append(ut.strip())
f.close()
for filename in load_dict_old:
if filename not in load_dict_new:
load_dict_new[filename] = load_dict_old[filename]
with open("/pre_test/ut_file_map.json", "w") as f:
with open("/pre_test_tmp/ut_file_map.json", "w") as f:
json.dump(load_dict_new, f, indent=4)
print("load_dict_new success!!")
os.system(
'cd /pre_test && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/prec_delta --no-check-certificate'
'cd /pre_test_tmp && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/prec_delta --no-check-certificate'
)
prec_delta_old = '/pre_test/prec_delta'
prec_delta_old = '/pre_test_tmp/prec_delta'
prec_delta_new = "%s/build/prec_delta" % rootPath
with open(prec_delta_old, 'r') as f:
prec_delta_old_list = []
......@@ -189,15 +212,16 @@ def ut_file_map_supplement(rootPath):
for ut in f.readlines():
prec_delta_new_list.append(ut.strip())
f.close()
for ut in prec_delta_old_list:
filename = '%s/build/ut_map/%s/coverage.info.tmp' % (rootPath, ut)
filename = '%s/build/ut_map/%s/fnda.tmp' % (rootPath, ut)
if ut in all_uts_paddle_list:
if not os.path.exists(filename) and ut not in prec_delta_new_list:
prec_delta_new_list.append(ut)
prec_delta_new_list.append(
'test_py_reader_error_msg'
) # add a python case for pycoverage
prec_delta_file = open("/pre_test/prec_delta", 'w')
prec_delta_file = open("/pre_test_tmp/prec_delta", 'w')
for ut in prec_delta_new_list:
prec_delta_file.write(ut + '\n')
print("prec_delta_file success!!")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册