From 59997d53f134f6e660e3e0d5f0c714184da1ea18 Mon Sep 17 00:00:00 2001 From: zhangchunle Date: Tue, 18 May 2021 10:54:09 +0800 Subject: [PATCH] notest;test=zcltest (#32821) --- paddle/scripts/paddle_build.sh | 175 +++++++++++++++++++++++++++++ tools/analysisPyXml.py | 68 ++++++++++++ tools/get_single_test_cov.py | 79 +++++++++++++ tools/get_ut_file_map.py | 196 +++++++++++++++++++++++++++++++++ tools/handle_h_cu_file.py | 112 +++++++++++++++++++ tools/pyCov_multithreading.py | 82 ++++++++++++++ 6 files changed, 712 insertions(+) create mode 100644 tools/analysisPyXml.py create mode 100644 tools/get_single_test_cov.py create mode 100644 tools/get_ut_file_map.py create mode 100644 tools/handle_h_cu_file.py create mode 100644 tools/pyCov_multithreading.py diff --git a/paddle/scripts/paddle_build.sh b/paddle/scripts/paddle_build.sh index 1ad5a881559..ff3ded9f9ea 100755 --- a/paddle/scripts/paddle_build.sh +++ b/paddle/scripts/paddle_build.sh @@ -1417,6 +1417,175 @@ EOF fi } +function insert_pile_to_h_cu_diff { + # TODO get develop h/cu md5 + cd ${PADDLE_ROOT} + find ${PADDLE_ROOT} -name '*.h'| grep -v ${PADDLE_ROOT}/build >> ${PADDLE_ROOT}/tools/h_cu_files.log + find ${PADDLE_ROOT} -name '*.cu'| grep -v ${PADDLE_ROOT}/build >> ${PADDLE_ROOT}/tools/h_cu_files.log + python ${PADDLE_ROOT}/tools/handle_h_cu_file.py 'get_h_file_md5' ${PADDLE_ROOT} + + # TODO insert pile to diff h/cu file + + #insert pile to full h/cu file + python ${PADDLE_ROOT}/tools/handle_h_cu_file.py 'insert_pile_to_h_file' ${PADDLE_ROOT} +} + +function precise_card_test_single { + set +e + set +x + testcases=$1 + num=$2 + for case in $(echo $testcases | tr "$|^" "\n") + do + cd ${PADDLE_ROOT}/build + precise_card_test "^${case}$" $num + # c++ + if [ -d "${PADDLE_ROOT}/build/ut_map/$case" ];then + rm -rf ${PADDLE_ROOT}/build/ut_map/$case + fi + set -x + mkdir ${PADDLE_ROOT}/build/ut_map/$case + find paddle/fluid -name '*.gcda'|xargs -I {} cp --path {} ut_map/$case + find paddle/fluid -name '*.gcno'|xargs -I {} cp --path {} ut_map/$case + python ${PADDLE_ROOT}/tools/get_single_test_cov.py ${PADDLE_ROOT} $case & + + # python + ls python-coverage.data.* + if [[ $? == 0 ]] + then + mkdir -p ${PADDLE_ROOT}/build/pytest/$case + mv python-coverage.data.* ${PADDLE_ROOT}/build/pytest/$case + fi + find paddle/fluid -name *.gcda | xargs rm -f #delete gcda + done +} + +function precise_card_test() { + set -m + testcases=$1 + if (( $# > 1 )); then + cardnumber=$2 + cuda_list="0" + if [ $cardnumber -eq 2 ]; then + cuda_list=${CUDA_VISIBLE_DEVICES} + else + cuda_list="0" + fi + else + cardnumber=2 + cuda_list=${CUDA_VISIBLE_DEVICES} + fi + + if [[ "$testcases" == "" ]]; then + return 0 + fi + + echo "****************************************************************" + echo "***Running ut: $testcases***" + echo "****************************************************************" + + tmpfile=$tmp_dir/$testcases".log" + env CUDA_VISIBLE_DEVICES=$cuda_list ctest -I 0,,1 -R "($testcases)" --timeout 500 --output-on-failure -V -j 1 > $tmpfile + set +m +} + +function get_precise_tests_map_file { + cd ${PADDLE_ROOT}/build + pip install ${PADDLE_ROOT}/build/python/dist/*whl + ut_total_startTime_s=`date +%s` + EXIT_CODE=0; + test_cases=$(ctest -N -V) # get all test cases + single_card_tests='' # all cases list which would take one graph card + exclusive_tests='' # cases list which would be run exclusively + multiple_card_tests='' # cases list which would take multiple GPUs, most cases would be two GPUs + is_exclusive='' # indicate whether the case is exclusive type + is_multicard='' # indicate whether the case is multiple GPUs type +set +x + + while read -r line; do + if [[ "$line" == "" ]]; then + continue + fi + read matchstr <<< $(echo "$line"|grep -oEi 'Test[ \t]+#') + if [[ "$matchstr" == "" ]]; then + # Any test case with LABELS property would be parse here + # RUN_TYPE=EXCLUSIVE mean the case would run exclusively + # RUN_TYPE=DIST mean the case would take two graph GPUs during runtime + read is_exclusive <<< $(echo "$line"|grep -oEi "RUN_TYPE=EXCLUSIVE") + read is_multicard <<< $(echo "$line"|grep -oEi "RUN_TYPE=DIST") + continue + fi + read testcase <<< $(echo "$line"|grep -oEi "\w+$") + + if [[ "$is_multicard" == "" ]]; then + # trick: treat all test case with prefix "test_dist" as dist case, and would run on 2 GPUs + read is_multicard <<< $(echo "$testcase"|grep -oEi "test_dist_") + fi + + if [[ "$is_exclusive" != "" ]]; then + if [[ "$exclusive_tests" == "" ]]; then + exclusive_tests="^$testcase$" + else + exclusive_tests="$exclusive_tests|^$testcase$" + fi + elif [[ "$is_multicard" != "" ]]; then + if [[ "$multiple_card_tests" == "" ]]; then + multiple_card_tests="^$testcase$" + else + multiple_card_tests="$multiple_card_tests|^$testcase$" + fi + else + if [[ "${single_card_tests}" -gt 3000 ]];then + if [[ "$single_card_tests_1" == "" ]]; then + single_card_tests_1="^$testcase$" + else + single_card_tests_1="$single_card_tests_1|^$testcase$" + fi + continue + fi + if [[ "$single_card_tests" == "" ]]; then + single_card_tests="^$testcase$" + else + single_card_tests="$single_card_tests|^$testcase$" + fi + fi + is_exclusive='' + is_multicard='' + is_nightly='' + matchstr='' + testcase='' + done <<< "$test_cases"; + +set -x + mkdir -p ${PADDLE_ROOT}/build/ut_map + mkdir -p ${PADDLE_ROOT}/build/pytest + + precise_card_test_single "$single_card_tests" 1 + precise_card_test_single "$single_card_tests_1" 1 + precise_card_test_single "$multiple_card_tests" 2 + precise_card_test_single "$exclusive_tests" + + python ${PADDLE_ROOT}/tools/get_ut_file_map.py 'get_not_success_ut' ${PADDLE_ROOT} + + if [[ -f "${PADDLE_ROOT}/build/utNotSuccess" ]]; then + rerun_tests=`cat ${PADDLE_ROOT}/build/utNotSuccess` + precise_card_test_single "$rerun_tests" + fi + wait; + + #generate python coverage and generate python file to tests_map_file + python ${PADDLE_ROOT}/tools/pyCov_multithreading.py ${PADDLE_ROOT} + + #analy h/cu to Map file + python ${PADDLE_ROOT}/tools/handle_h_cu_file.py 'analy_h_cu_file' $tmp_dir ${PADDLE_ROOT} + + #generate ut map + python ${PADDLE_ROOT}/tools/get_ut_file_map.py 'get_ut_map' ${PADDLE_ROOT} + wait; +} + + + function parallel_test_base_xpu() { mkdir -p ${PADDLE_ROOT}/build cd ${PADDLE_ROOT}/build @@ -1986,6 +2155,12 @@ function main() { check_coverage check_change_of_unittest ${PYTHON_ABI:-""} ;; + ci_preciseTest) + insert_pile_to_h_cu_diff + cmake_gen_and_build ${PYTHON_ABI:-""} ${parallel_number} + enable_unused_var_check + get_precise_tests_map_file + ;; cicheck_brpc) cmake_gen ${PYTHON_ABI:-""} build ${parallel_number} diff --git a/tools/analysisPyXml.py b/tools/analysisPyXml.py new file mode 100644 index 00000000000..db3d6887853 --- /dev/null +++ b/tools/analysisPyXml.py @@ -0,0 +1,68 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import commands +from xml.etree import ElementTree +import re +import time +import queue +import threading +import os +import json +import sys + + +def analysisPyXml(rootPath, ut): + xml_path = '%s/build/pytest/%s/python-coverage.xml' % (rootPath, ut) + ut_map_file = '%s/build/ut_map/%s/%s.txt' % (rootPath, ut, ut) + tree = ElementTree.parse(xml_path) + root = tree.getroot() + error_files = [] + pyCov_file = [] + for clazz in root.findall('packages/package/classes/class'): + clazz_filename = clazz.attrib.get('filename') + if not clazz_filename.startswith('/paddle'): + clazz_filename = '/paddle/%s' % clazz_filename + for line in clazz.findall('lines/line'): + line_hits = int(line.attrib.get('hits')) + if line_hits != 0: + line_number = int(line.attrib.get('number')) + command = 'sed -n %sp %s' % (line_number, clazz_filename) + _code, output = commands.getstatusoutput(command) + if _code == 0: + if output.strip().startswith( + ('from', 'import', '__all__', 'def', 'class', '"""', + '@', '\'\'\'', 'logger', '_logger', 'logging', 'r"""', + 'pass', 'try', 'except', 'if __name__ == "__main__"' + )) == False: + #print(line_hits, line_number) + pattern = "(.*) = ('*')|(.*) = (\"*\")|(.*) = (\d)|(.*) = (-\d)|(.*) = (None)|(.*) = (True)|(.*) = (False)|(.*) = (URL_PREFIX*)|(.*) = (\[)|(.*) = (\{)|(.*) = (\()" #a='b'/a="b"/a=0 + if re.match(pattern, output.strip()) == None: + pyCov_file.append(clazz_filename) + os.system('echo %s >> %s' % + (clazz_filename, ut_map_file)) + break + else: + error_files.append(clazz_filename) + break + print("============len(pyCov_file)") + print(len(pyCov_file)) + print("============error") + print(error_files) + + +if __name__ == "__main__": + rootPath = sys.argv[1] + ut = sys.argv[2] + analysisPyXml(rootPath, ut) diff --git a/tools/get_single_test_cov.py b/tools/get_single_test_cov.py new file mode 100644 index 00000000000..42940386ca0 --- /dev/null +++ b/tools/get_single_test_cov.py @@ -0,0 +1,79 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import json +import time +import sys +import re + + +def getFNDAFile(rootPath, test): + filename = '%s/build/ut_map/%s/coverage.info.tmp' % (rootPath, test) + fn_filename = '%s/build/ut_map/%s/fnda.tmp' % (rootPath, test) + os.system('touch %s' % fn_filename) + f = open(filename) + lines = f.readlines() + for line in lines: + line = line.replace('\n', '') + if line.startswith(('SF:')): + os.system('echo %s >> %s' % (line, fn_filename)) + elif line.startswith(('FNDA:')): + hit = int(line.split('FNDA:')[1].split(',')[0]) + if hit != 0: + os.system('echo %s >> %s' % (line, fn_filename)) + f.close() + + +def analysisFNDAFile(rootPath, test): + ut_map_file = '%s/build/ut_map/%s/%s.txt' % (rootPath, test, test) + os.system('touch %s' % ut_map_file) + fn_filename = '%s/build/ut_map/%s/fnda.tmp' % (rootPath, test) + f = open(fn_filename) + data = f.read().split('SF:') + for message in data: + if 'FNDA:' in message: + message_list = message.split('\n') + clazz_filename = message_list[0] + if not clazz_filename.endswith('.h'): #filter .h's Analysis + for i in range(1, len(message_list) - 1): + fn = message_list[i] + matchObj = re.match( + r'(.*)Maker(.*)|(.*)Touch(.*)Regist(.*)|(.*)Touch(.*)JitKernel(.*)|(.*)converterC2Ev(.*)', + fn, re.I) + if matchObj == None: + os.system('echo %s >> %s' % + (clazz_filename, ut_map_file)) + break + f.close() + + +def getCovinfo(rootPath, test): + ut_map_path = '%s/build/ut_map/%s' % (rootPath, test) + os.system( + 'cd %s && lcov --capture -d . -o coverage.info --rc lcov_branch_coverage=0 > /dev/null 2>&1' + % ut_map_path) + os.system( + "cd %s && lcov --extract coverage.info '/paddle/paddle/fluid/framework/*' '/paddle/paddle/fluid/imperative/*' '/paddle/paddle/fluid/inference/*' '/paddle/paddle/fluid/memory/*' '/paddle/paddle/fluid/operators/*' '/paddle/paddle/fluid/string/*' '/paddle/paddle/fluid/distributed/*' '/paddle/paddle/fluid/extension/*' '/paddle/paddle/fluid/platform/*' '/paddle/paddle/fluid/pybind/*' -o coverage.info.tmp --rc lcov_branch_coverage=0 > /dev/null 2>&1" + % ut_map_path) + os.system('rm -rf %s/paddle' % ut_map_path) + os.system('rm -rf %s/coverage.info' % ut_map_path) + getFNDAFile(rootPath, test) + analysisFNDAFile(rootPath, test) + + +if __name__ == "__main__": + rootPath = sys.argv[1] + case = sys.argv[2] + getCovinfo(rootPath, case) diff --git a/tools/get_ut_file_map.py b/tools/get_ut_file_map.py new file mode 100644 index 00000000000..d952a299d49 --- /dev/null +++ b/tools/get_ut_file_map.py @@ -0,0 +1,196 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import re +import json + + +def get_all_paddle_file(rootPath): + """get all file in Paddle repo: paddle/fluild, python""" + traverse_files = ['%s/paddle/fluid' % rootPath, '%s/python' % rootPath] + all_file_paddle = '%s/build/all_file_paddle' % rootPath + all_file_paddle_list = [] + with open(all_file_paddle, 'w') as f: + for filename in traverse_files: + g = os.walk(filename) + for path, dir_list, file_list in g: + for file_name in file_list: + all_file_paddle_list.append(os.path.join(path, file_name)) + return all_file_paddle_list + + +def get_all_uts(rootPath): + all_uts_paddle = '%s/build/all_uts_paddle' % rootPath + os.system( + 'cd %s/build && ctest -N -V | grep -Ei "Test[ \t]+#" | grep -oEi "\w+$" > %s' + % (rootPath, all_uts_paddle)) + + +def remove_useless_file(rootPath): + """remove useless file in ut_file_map.json""" + all_file_paddle_list = get_all_paddle_file(rootPath) + ut_file_map_new = {} + ut_file_map = "%s/build/ut_file_map.json" % rootPath + with open(ut_file_map, 'r') as load_f: + load_dict = json.load(load_f) + for key in load_dict: + if key in all_file_paddle_list: + ut_file_map_new[key] = load_dict[key] + + with open("%s/build/ut_file_map.json" % rootPath, "w") as f: + json.dump(ut_file_map_new, f, indent=4) + print("remove_useless_file ut_file_map success!!") + + +def handle_ut_file_map(rootPath): + utNotSuccess = '' + ut_map_path = "%s/build/ut_map" % rootPath + files = os.listdir(ut_map_path) + ut_file_map = {} + count = 0 + not_success_file = open("%s/build/prec_delta" % rootPath, 'w') + for ut in files: + count = count + 1 + print("ut %s: %s" % (count, ut)) + coverage_info = '%s/%s/coverage.info.tmp' % (ut_map_path, ut) + if os.path.exists(coverage_info): + filename = '%s/%s/%s.txt' % (ut_map_path, ut, ut) + f = open(filename) + lines = f.readlines() + for line in lines: + line = line.replace('\n', '').strip() + if line == '': + continue + elif line.startswith('/paddle/build'): + source_file = line.replace('/build', '') + #source_file = re.sub('.pb.*', '.proto', source_file) + elif 'precise test map fileeee:' in line: + source_file = line.split('precise test map fileeee:')[ + 1].strip() + else: + source_file = line + if source_file not in ut_file_map: + ut_file_map[source_file] = [] + if ut not in ut_file_map[source_file]: + ut_file_map[source_file].append(ut) + + else: + not_success_file.write('%s\n' % ut) + utNotSuccess = utNotSuccess + '^%s$|' % ut + + not_success_file.close() + + with open("%s/build/ut_file_map.json" % rootPath, "w") as f: + json.dump(ut_file_map, f, indent=4) + + print("utNotSuccess:") + print(utNotSuccess) + + +def notsuccessfuc(rootPath): + utNotSuccess = '' + ut_map_path = "%s/build/ut_map" % rootPath + files = os.listdir(ut_map_path) + count = 0 + # ut failed!! + for ut in files: + coverage_info = '%s/%s/coverage.info.tmp' % (ut_map_path, ut) + if os.path.exists(coverage_info): + pass + else: + count = count + 1 + utNotSuccess = utNotSuccess + '^%s$|' % ut + + # ut not exec + get_all_uts(rootPath) + with open("/paddle/build/all_uts_paddle", "r") as f: + data = f.readlines() + for ut in data: + ut = ut.replace('\n', '').strip() + if ut not in files: + print(ut) + count = count + 1 + utNotSuccess = utNotSuccess + '^%s$|' % ut + + if utNotSuccess != '': + print("utNotSuccess count: %s" % count) + f = open('%s/build/utNotSuccess' % rootPath, 'w') + f.write(utNotSuccess[:-1]) + f.close() + + +def ut_file_map_supplement(rootPath): + ut_file_map_new = "%s/build/ut_file_map.json" % rootPath + os.system('mkdir /pre_test') + os.system( + 'cd /pre_test && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/ut_file_map.json --no-check-certificate' + ) + ut_file_map_old = "/pre_test/ut_file_map.json" + ut_file_map_full = {} + with open(ut_file_map_new, 'r') as load_f: + load_dict_new = json.load(load_f) + with open(ut_file_map_old, 'r') as f: + load_dict_old = json.load(f) + + for filename in load_dict_new: + ut_file_map_full[filename] = load_dict_new[filename] + if filename in load_dict_old: + for ut in load_dict_old[filename]: + if ut not in ut_file_map_full[filename]: + ut_file_map_full[filename].append(ut) + + for filename in load_dict_old: + if filename not in load_dict_new: + ut_file_map_full[filename] = load_dict_old[filename] + + with open("/pre_test/ut_file_map.json", "w") as f: + json.dump(ut_file_map_full, f, indent=4) + print("ut_file_map_full success!!") + + all_uts_paddle = '%s/build/all_uts_paddle' % rootPath + with open(all_uts_paddle, 'r') as f: + all_uts_paddle_list = f.readlines() + f.close() + os.system( + 'cd /pre_test && wget --no-proxy https://paddle-docker-tar.bj.bcebos.com/pre_test/prec_delta --no-check-certificate' + ) + prec_delta_old = '/pre_test/prec_delta' + prec_delta_new = "%s/build/prec_delta" % rootPath + with open(prec_delta_old, 'r') as f: + prec_delta_old_list = f.readlines() + f.close() + with open(prec_delta_new, 'r') as f: + prec_delta_new_list = f.readlines() + f.close() + for ut in prec_delta_old_list: + if ut not in prec_delta_new_list and ut not in all_uts_paddle_list: + prec_delta_new_list.append(ut) + prec_delta_file = open("/pre_test/prec_delta", 'w') + for ut in prec_delta_new_list: + prec_delta_file.write(ut) + prec_delta_file.close() + + +if __name__ == "__main__": + func = sys.argv[1] + if func == 'get_not_success_ut': + rootPath = sys.argv[2] + notsuccessfuc(rootPath) + elif func == 'get_ut_map': + rootPath = sys.argv[2] + handle_ut_file_map(rootPath) + remove_useless_file(rootPath) + ut_file_map_supplement(rootPath) diff --git a/tools/handle_h_cu_file.py b/tools/handle_h_cu_file.py new file mode 100644 index 00000000000..7c300d96c84 --- /dev/null +++ b/tools/handle_h_cu_file.py @@ -0,0 +1,112 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import threading +import os +import json +import time +import sys + +taskQueue = queue.Queue() + + +def worker(fun): + while True: + temp = taskQueue.get() + fun(temp) + taskQueue.task_done() + + +def threadPool(threadPoolNum): + threadPool = [] + for i in range(threadPoolNum): + thread = threading.Thread(target=worker, args={doFun, }) + thread.daemon = True + threadPool.append(thread) + return threadPool + + +def get_h_file_md5(rootPath): + h_cu_files = '%s/tools/h_cu_files.log' % rootPath + f = open(h_cu_files) + lines = f.readlines() + for line in lines: + line = line.strip() + os.system('md5sum %s >> %s/tools/h_cu_md5.log' % (line, rootPath)) + + +def insert_pile_to_h_file(rootPath): + h_cu_files = '%s/tools/h_cu_files.log' % rootPath + f = open(h_cu_files) + lines = f.readlines() + for line in lines: + line = line.strip() + func = line.replace('/', '_').replace('.', '_') + os.system('echo "\n#ifndef _PRECISE%s_\n" >> %s' % (func.upper(), line)) + os.system('echo "#define _PRECISE%s_" >> %s' % (func.upper(), line)) + os.system('echo "\n#include \n" >> %s' % line) + os.system( + 'echo "__attribute__((constructor)) static void calledFirst%s()\n{" >> %s' + % (func, line)) + os.system( + 'echo \' printf("precise test map fileeee: %%s\\\\n", __FILE__);\n}\' >> %s' + % line) + os.system('echo "\n#endif" >> %s' % line) + + +def get_h_cu_file(file_path): + rootPath = file_path[0] + dir_path = file_path[1] + filename = file_path[2] + ut = filename.replace('^', '').replace('$', '').replace('.log', '') + os.system( + "cat %s/%s | grep 'precise test map fileeee:'| uniq >> %s/build/ut_map/%s/%s.txt" + % (dir_path, filename, rootPath, ut, ut)) + + +def doFun(file_path): + get_h_cu_file(file_path) + + +def main(rootPath, dir_path): + """ + get useful message + """ + startTime = int(time.time()) + test_h_cu_dict = {} + pool = threadPool(23) + for i in range(pool.__len__()): + pool[i].start() + files = os.listdir(dir_path) + for filename in files: + file_path = [rootPath, dir_path, filename] + taskQueue.put(file_path) + taskQueue.join() + endTime = int(time.time()) + print('analy h/cu file cost Time: %s' % (endTime - startTime)) + + +if __name__ == "__main__": + func = sys.argv[1] + if func == 'get_h_file_md5': + rootPath = sys.argv[2] + get_h_file_md5(rootPath) + elif func == 'insert_pile_to_h_file': + rootPath = sys.argv[2] + insert_pile_to_h_file(rootPath) + elif func == 'analy_h_cu_file': + dir_path = sys.argv[2] + rootPath = sys.argv[3] + main(rootPath, dir_path) diff --git a/tools/pyCov_multithreading.py b/tools/pyCov_multithreading.py new file mode 100644 index 00000000000..2df4ac2ef6b --- /dev/null +++ b/tools/pyCov_multithreading.py @@ -0,0 +1,82 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import commands +from xml.etree import ElementTree +import re +import time +import queue +import threading +import os +import json +import sys + +taskQueue = queue.Queue() +lock = threading.RLock() + + +def worker(fun): + while True: + temp = taskQueue.get() + fun(temp) + taskQueue.task_done() + + +def threadPool(threadPoolNum): + threadPool = [] + for i in range(threadPoolNum): + thread = threading.Thread(target=worker, args={doFun, }) + thread.daemon = True + threadPool.append(thread) + return threadPool + + +def getPyCovResult(params): + rootPath = params[0] + ut = params[1] + print("ut: %s" % ut) + startTime = int(time.time()) + path = '%s/build/pytest/%s' % (rootPath, ut) + os.system('cd %s && coverage combine `ls python-coverage.data.*`' % path) + os.system('cd %s && pwd && coverage xml -i -o python-coverage.xml' % path) + xml_path = '%s/python-coverage.xml' % path + os.system("python %s/tools/analysisPyXml.py %s %s" % + (rootPath, rootPath, ut)) + endTime = int(time.time()) + print('pyCov Time: %s' % (endTime - startTime)) + + +def doFun(params): + getPyCovResult(params) + + +def main(rootPath): + """ + 1. get gcov file + 2. get gcov file not coverageratio = 0 + """ + path = '%s/build/pytest' % rootPath + dirs = os.listdir(path) + pool = threadPool(23) + for i in range(pool.__len__()): + pool[i].start() + for ut in dirs: + params = [rootPath, ut] + taskQueue.put(params) + taskQueue.join() + + +if __name__ == "__main__": + rootPath = sys.argv[1] + main(rootPath) -- GitLab