diff --git a/Jenkinsfile b/Jenkinsfile index edbe11d428d0381a81f64bbc823b5b51a5edf199..6dc55be4bd0e8a0c4c5cddfb0a265898d9211c3a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,3 +1,47 @@ + +properties([pipelineTriggers([githubPush()])]) +node { + git url: 'https://github.com/taosdata/TDengine' +} + + +// execute this before anything else, including requesting any time on an agent +if (currentBuild.rawBuild.getCauses().toString().contains('BranchIndexingCause')) { + print "INFO: Build skipped due to trigger being Branch Indexing" + currentBuild.result = 'ABORTED' // optional, gives a better hint to the user that it's been skipped, rather than the default which shows it's successful + return +} + + +def pre_test(){ + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + sudo rmtaos + ''' + } + sh ''' + cd ${WKC} + rm -rf * + cd ${WK} + git reset --hard + git checkout develop + git pull + cd ${WKC} + rm -rf * + mv ${WORKSPACE}/* . + cd ${WK} + export TZ=Asia/Harbin + date + rm -rf ${WK}/debug + mkdir debug + cd debug + cmake .. > /dev/null + make > /dev/null + make install > /dev/null + cd ${WKC}/tests + ''' + return 1 +} pipeline { agent none environment{ @@ -8,85 +52,31 @@ pipeline { stages { stage('Parallel test stage') { parallel { - stage('pytest') { - agent{label '184'} + stage('python p1') { + agent{label 'p1'} steps { + pre_test() sh ''' - date - cd ${WKC} - git reset --hard - git checkout develop - git pull - git submodule update - cd ${WK} - git reset --hard - git checkout develop - git pull - export TZ=Asia/Harbin - date - rm -rf ${WK}/debug - mkdir debug - cd debug - cmake .. > /dev/null - make > /dev/null - make install > /dev/null cd ${WKC}/tests - #./test-all.sh smoke - ./test-all.sh pytest + ./test-all.sh p1 date''' } } stage('test_b1') { - agent{label 'master'} + agent{label 'b1'} steps { + pre_test() sh ''' - cd ${WKC} - git reset --hard - git checkout develop - git pull - - git submodule update - cd ${WK} - git reset --hard - git checkout develop - git pull - export TZ=Asia/Harbin - date - rm -rf ${WK}/debug - mkdir debug - cd debug - cmake .. > /dev/null - make > /dev/null cd ${WKC}/tests - #./test-all.sh smoke ./test-all.sh b1 date''' } } stage('test_crash_gen') { - agent{label "185"} + agent{label "b2"} steps { - sh ''' - cd ${WKC} - git reset --hard - git checkout develop - git pull - - git submodule update - cd ${WK} - git reset --hard - git checkout develop - git pull - export TZ=Asia/Harbin - - rm -rf ${WK}/debug - mkdir debug - cd debug - cmake .. > /dev/null - make > /dev/null - cd ${WKC}/tests/pytest - ''' + pre_test() catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh ''' cd ${WKC}/tests/pytest @@ -109,193 +99,42 @@ pipeline { } stage('test_valgrind') { - agent{label "186"} + agent{label "b3"} steps { + pre_test() + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WKC}/tests/pytest + ./valgrind-test.sh 2>&1 > mem-error-out.log + ./handle_val_log.sh + ''' + } sh ''' - cd ${WKC} - git reset --hard - git checkout develop - git pull - - git submodule update - cd ${WK} - git reset --hard - git checkout develop - git pull - export TZ=Asia/Harbin - date - rm -rf ${WK}/debug - mkdir debug - cd debug - cmake .. > /dev/null - make > /dev/null - cd ${WKC}/tests/pytest - ./valgrind-test.sh 2>&1 > mem-error-out.log - ./handle_val_log.sh - date cd ${WKC}/tests ./test-all.sh b3 date''' } } - stage('connector'){ - agent{label "release"} + stage('python p2'){ + agent{label "p2"} steps{ - sh''' - cd ${WORKSPACE} - git checkout develop + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh p2 + date ''' - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${WORKSPACE}/tests/gotest - bash batchtest.sh - ''' - } - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${WORKSPACE}/tests/examples/python/PYTHONConnectorChecker - python3 PythonChecker.py - ''' - } - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${WORKSPACE}/tests/examples/JDBC/JDBCDemo/ - mvn clean package assembly:single >/dev/null - java -jar target/jdbcChecker-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1 - ''' - } - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC# - dotnet run - ''' - } } } - stage('arm64_build'){ - agent{label 'arm64'} - steps{ - sh ''' - cd ${WK} - git fetch - git checkout develop - git pull - cd ${WKC} - git fetch - git checkout develop - git pull - git submodule update - cd ${WKC}/packaging - ./release.sh -v cluster -c aarch64 -n 2.0.0.0 -m 2.0.0.0 - - ''' - } - } - stage('arm32_build'){ - agent{label 'arm32'} - steps{ - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${WK} - git fetch - git checkout develop - git pull - cd ${WKC} - git fetch - git checkout develop - git pull - git submodule update - cd ${WKC}/packaging - ./release.sh -v cluster -c aarch32 -n 2.0.0.0 -m 2.0.0.0 - - ''' - } - - } - } + + } } } - post { - success { - emailext ( - subject: "SUCCESSFUL: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", - body: ''' - - - - - - - - - - - - -

- 构建信息 -
-
    -
    -
  • 构建名称>>分支:${PROJECT_NAME}
  • -
  • 构建结果: Successful
  • -
  • 构建编号:${BUILD_NUMBER}
  • -
  • 触发用户:${CAUSE}
  • -
  • 变更概要:${CHANGES}
  • -
  • 构建地址:${BUILD_URL}
  • -
  • 构建日志:${BUILD_URL}console
  • -
  • 变更集:${JELLY_SCRIPT}
  • -
    -
-
- - ''', - to: "yqliu@taosdata.com,pxiao@taosdata.com", - from: "support@taosdata.com" - ) - } - failure { - emailext ( - subject: "FAILED: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", - body: ''' - - - - - - - - - - - - -

- 构建信息 -
-
    -
    -
  • 构建名称>>分支:${PROJECT_NAME}
  • -
  • 构建结果: Successful
  • -
  • 构建编号:${BUILD_NUMBER}
  • -
  • 触发用户:${CAUSE}
  • -
  • 变更概要:${CHANGES}
  • -
  • 构建地址:${BUILD_URL}
  • -
  • 构建日志:${BUILD_URL}console
  • -
  • 变更集:${JELLY_SCRIPT}
  • -
    -
-
- - ''', - to: "yqliu@taosdata.com,pxiao@taosdata.com", - from: "support@taosdata.com" - ) - } - } -} \ No newline at end of file + +} diff --git a/tests/Jenkinsfile b/tests/Jenkinsfile index 5e9fcd15cd3c300429ff65d980fbaf5f8ccf2365..e343de789e466143ca05a1e2c42275709aaa9c2a 100644 --- a/tests/Jenkinsfile +++ b/tests/Jenkinsfile @@ -1,18 +1,3 @@ - -properties([pipelineTriggers([githubPush()])]) -node { - git url: 'https://github.com/taosdata/TDengine' -} - - -// execute this before anything else, including requesting any time on an agent -if (currentBuild.rawBuild.getCauses().toString().contains('BranchIndexingCause')) { - print "INFO: Build skipped due to trigger being Branch Indexing" - currentBuild.result = 'ABORTED' // optional, gives a better hint to the user that it's been skipped, rather than the default which shows it's successful - return -} - - def pre_test(){ catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh ''' @@ -21,15 +6,14 @@ def pre_test(){ } sh ''' cd ${WKC} - rm -rf * - cd ${WK} git reset --hard - git checkout develop + git checkout ${BRANCH} git pull - cd ${WKC} - rm -rf * - mv ${WORKSPACE}/* . + git submodule update cd ${WK} + git reset --hard + git checkout ${BRANCH} + git pull export TZ=Asia/Harbin date rm -rf ${WK}/debug @@ -38,13 +22,13 @@ def pre_test(){ cmake .. > /dev/null make > /dev/null make install > /dev/null - cd ${WKC}/tests ''' return 1 } pipeline { agent none environment{ + BRANCH = 'develop' WK = '/var/lib/jenkins/workspace/TDinternal' WKC= '/var/lib/jenkins/workspace/TDinternal/community' } @@ -52,13 +36,13 @@ pipeline { stages { stage('Parallel test stage') { parallel { - stage('python p1') { - agent{label 'p1'} + stage('pytest') { + agent{label '184'} steps { pre_test() sh ''' cd ${WKC}/tests - ./test-all.sh p1 + ./test-all.sh pytest date''' } } @@ -66,6 +50,12 @@ pipeline { agent{label 'master'} steps { pre_test() + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WKC}/tests/pytest + python3 concurrent_inquiry.py -c 1 + ''' + } sh ''' cd ${WKC}/tests ./test-all.sh b1 @@ -74,9 +64,12 @@ pipeline { } stage('test_crash_gen') { - agent{label "b2"} + agent{label "185"} steps { pre_test() + sh ''' + cd ${WKC}/tests/pytest + ''' catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { sh ''' cd ${WKC}/tests/pytest @@ -90,7 +83,6 @@ pipeline { ''' } sh ''' - date cd ${WKC}/tests ./test-all.sh b2 date @@ -99,42 +91,177 @@ pipeline { } stage('test_valgrind') { - agent{label "b3"} + agent{label "186"} steps { pre_test() - catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh ''' - cd ${WKC}/tests/pytest - ./valgrind-test.sh 2>&1 > mem-error-out.log - ./handle_val_log.sh - ''' - } sh ''' + cd ${WKC}/tests/pytest + ./valgrind-test.sh 2>&1 > mem-error-out.log + ./handle_val_log.sh + date cd ${WKC}/tests ./test-all.sh b3 date''' } } - stage('python p2'){ - agent{label "p2"} + stage('connector'){ + agent{label "release"} steps{ - pre_test() - sh ''' - date - cd ${WKC}/tests - ./test-all.sh p2 - date + sh''' + cd ${WORKSPACE} + git checkout develop ''' + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WORKSPACE}/tests/gotest + bash batchtest.sh + ''' + } + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WORKSPACE}/tests/examples/python/PYTHONConnectorChecker + python3 PythonChecker.py + ''' + } + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WORKSPACE}/tests/examples/JDBC/JDBCDemo/ + mvn clean package assembly:single >/dev/null + java -jar target/jdbcChecker-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1 + ''' + } + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC# + dotnet run + ''' + } } } - - + stage('arm64_build'){ + agent{label 'arm64'} + steps{ + sh ''' + cd ${WK} + git fetch + git checkout develop + git pull + cd ${WKC} + git fetch + git checkout develop + git pull + git submodule update + cd ${WKC}/packaging + ./release.sh -v cluster -c aarch64 -n 2.0.0.0 -m 2.0.0.0 + + ''' + } + } + stage('arm32_build'){ + agent{label 'arm32'} + steps{ + catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { + sh ''' + cd ${WK} + git fetch + git checkout develop + git pull + cd ${WKC} + git fetch + git checkout develop + git pull + git submodule update + cd ${WKC}/packaging + ./release.sh -v cluster -c aarch32 -n 2.0.0.0 -m 2.0.0.0 + + ''' + } + + } + } } } } - -} + post { + success { + emailext ( + subject: "SUCCESSFUL: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", + body: ''' + + + + + + + + + + + + +

+ 构建信息 +
+
    +
    +
  • 构建名称>>分支:${PROJECT_NAME}
  • +
  • 构建结果: Successful
  • +
  • 构建编号:${BUILD_NUMBER}
  • +
  • 触发用户:${CAUSE}
  • +
  • 变更概要:${CHANGES}
  • +
  • 构建地址:${BUILD_URL}
  • +
  • 构建日志:${BUILD_URL}console
  • +
  • 变更集:${JELLY_SCRIPT}
  • +
    +
+
+ + ''', + to: "yqliu@taosdata.com,pxiao@taosdata.com", + from: "support@taosdata.com" + ) + } + failure { + emailext ( + subject: "FAILED: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", + body: ''' + + + + + + + + + + + + +

+ 构建信息 +
+
    +
    +
  • 构建名称>>分支:${PROJECT_NAME}
  • +
  • 构建结果: Successful
  • +
  • 构建编号:${BUILD_NUMBER}
  • +
  • 触发用户:${CAUSE}
  • +
  • 变更概要:${CHANGES}
  • +
  • 构建地址:${BUILD_URL}
  • +
  • 构建日志:${BUILD_URL}console
  • +
  • 变更集:${JELLY_SCRIPT}
  • +
    +
+
+ + ''', + to: "yqliu@taosdata.com,pxiao@taosdata.com", + from: "support@taosdata.com" + ) + } + } +} \ No newline at end of file diff --git a/tests/pytest/concurrent_inquiry.py b/tests/pytest/concurrent_inquiry.py index 17a08386928fcd3d632b39e12eeb9266f198fecd..8ae74c5c863a9dcb5521fc9447f51aa13e7c4c1d 100644 --- a/tests/pytest/concurrent_inquiry.py +++ b/tests/pytest/concurrent_inquiry.py @@ -43,7 +43,7 @@ class ConcurrentInquiry: self.subtb_stru_list=[] self.stb_tag_list=[] self.subtb_tag_list=[] - self.probabilities = [0.95,0.05] + self.probabilities = [0.05,0.95] self.ifjoin = [0,1] def SetThreadsNum(self,num): self.numOfTherads=num @@ -117,15 +117,15 @@ class ConcurrentInquiry: return 'where '+random.choice([' and ',' or ']).join(l) def con_interval(self,tlist,col_list,tag_list): - interval = 'interval' + str(random.randint(0,100)) + random.choice(['a','s','d','w','n','y']) + interval = 'interval(' + str(random.randint(0,100)) + random.choice(['a','s','d','w','n','y']) + ')' return interval def con_limit(self,tlist,col_list,tag_list): rand1 = str(random.randint(0,1000)) rand2 = str(random.randint(0,1000)) - return random.choice(['limit ' + rand1,'limit ' + rand1 + 'offset '+rand2, - 'slimit ' + rand1,'slimit ' + rand1 + 'offset ' + rand2,'limit '+rand1 + 'slimit '+ rand2, - 'limit '+ rand1 + 'offset' + rand2 + 'slimit '+ rand1 + 'soffset ' + rand2 ]) + return random.choice(['limit ' + rand1,'limit ' + rand1 + ' offset '+rand2, + ' slimit ' + rand1,' slimit ' + rand1 + ' offset ' + rand2,'limit '+rand1 + ' slimit '+ rand2, + 'limit '+ rand1 + ' offset' + rand2 + ' slimit '+ rand1 + ' soffset ' + rand2 ]) def con_fill(self,tlist,col_list,tag_list): return random.choice(['fill(null)','fill(prev)','fill(none)','fill(LINEAR)']) @@ -194,9 +194,10 @@ class ConcurrentInquiry: tag_list = [] col_intersection = [] tag_intersection = [] - + subtable = None if bool(random.getrandbits(1)): + subtable = True tbname = random.sample(self.subtb_list,2) for i in tbname: col_list.append(self.subtb_stru_list[self.subtb_list.index(i)]) @@ -227,14 +228,15 @@ class ConcurrentInquiry: sel_col_tag.append('t2.' + str(random.choice(col_list[1] + tag_list[1]))) sql += ','.join(sel_col_tag) - sql = sql + 'from '+ ','.join(tbname) + ' ' #select col & func - con_func=[self.con_where,self.con_interval,self.con_limit,self.con_group,self.con_order,self.con_fill] - sel_con=random.sample(con_func,random.randint(0,len(con_func))) - sel_con_list=[] + sql = sql + ' from '+ str(tbname[0]) +' t1,' + str(tbname[1]) + ' t2 ' #select col & func + join_section = None + if subtable: + join_section = ''.join(random.choices(col_intersection)) + sql += 'where t1._c0 = t2._c0 and ' + 't1.' + join_section + '=t2.' + join_section + else: + join_section = ''.join(random.choices(col_intersection+tag_intersection)) + sql += 'where t1._c0 = t2._c0 and ' + 't1.' + join_section + '=t2.' + join_section - # for i in sel_con: - # sel_con_list.append(i(tlist,col_list,tag_list)) #获取对应的条件函数 - sql+=' '.join(sel_con_list) # condition print(sql) return sql