diff --git a/tests/parallel_test/Jenkinsfile b/tests/parallel_test/Jenkinsfile
new file mode 100644
index 0000000000000000000000000000000000000000..e975527df90e3d2c35e6b08fa8abba44f01846c5
--- /dev/null
+++ b/tests/parallel_test/Jenkinsfile
@@ -0,0 +1,305 @@
+import hudson.model.Result
+import hudson.model.*;
+import jenkins.model.CauseOfInterruption
+node {
+}
+def sync_source() {
+ sh 'hostname'
+ sh '''
+ cd ${WKC}
+ [ -f src/connector/grafanaplugin/README.md ] && rm -f src/connector/grafanaplugin/README.md > /dev/null || echo "failed to remove grafanaplugin README.md"
+ git reset --hard HEAD~10 >/dev/null
+ '''
+ script {
+ if (env.CHANGE_TARGET == 'master') {
+ sh '''
+ cd ${WKC}
+ git checkout master
+ '''
+ } else if (env.CHANGE_TARGET == '2.0') {
+ sh '''
+ cd ${WKC}
+ git checkout 2.0
+ '''
+ } else if (env.CHANGE_TARGET == '2.4') {
+ sh '''
+ cd ${WKC}
+ git checkout 2.4
+ '''
+ } else {
+ sh '''
+ cd ${WKC}
+ git checkout develop
+ '''
+ }
+ }
+ sh'''
+ cd ${WKC}
+ git remote prune origin
+ [ -f src/connector/grafanaplugin/README.md ] && rm -f src/connector/grafanaplugin/README.md > /dev/null || echo "failed to remove grafanaplugin README.md"
+ git pull >/dev/null
+ git fetch origin +refs/pull/${CHANGE_ID}/merge
+ git checkout -qf FETCH_HEAD
+ echo "// git clean -dfx"
+ git submodule update --init --recursive
+ cd ${WK}
+ git reset --hard HEAD~10
+ '''
+ script {
+ if (env.CHANGE_TARGET == 'master') {
+ sh '''
+ cd ${WK}
+ git checkout master
+ '''
+ } else if (env.CHANGE_TARGET == '2.0') {
+ sh '''
+ cd ${WK}
+ git checkout 2.0
+ '''
+ } else if (env.CHANGE_TARGET == '2.4') {
+ sh '''
+ cd ${WK}
+ git checkout 2.4
+ '''
+ } else {
+ sh '''
+ cd ${WK}
+ git checkout develop
+ '''
+ }
+ }
+ sh '''
+ cd ${WK}
+ git pull >/dev/null
+ export TZ=Asia/Harbin
+ date
+ echo "// git clean -dfx"
+ '''
+}
+def pre_test() {
+ sync_source()
+ sh '''
+ cd ${WK}
+ mkdir -p debug
+ cd debug
+ cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true > /dev/null
+ make -j8> /dev/null
+ echo "make install > /dev/null"
+ cd ${WKC}/tests
+ echo "pip3 install ${WKC}/src/connector/python/"
+ '''
+ return 1
+}
+pipeline {
+ agent {label " dispatcher "}
+ options { skipDefaultCheckout() }
+ environment{
+ WK = '/root/jenkins/workspace/TDinternal'
+ WKC = '/root/jenkins/workspace/TDinternal/community'
+ LOGDIR = '/root/jenkins/workspace/log'
+ }
+ stages {
+ stage ('pre_build') {
+ steps {
+ sh '''
+ date
+ pwd
+ env
+ hostname
+ '''
+ }
+ }
+ stage ('Parallel build stage') {
+ //only build pr
+ options { skipDefaultCheckout() }
+ when {
+ allOf {
+ changeRequest()
+ not { expression { env.CHANGE_BRANCH =~ /docs\// }}
+ }
+ }
+ parallel {
+ stage ('dispatcher sync source') {
+ steps {
+ sync_source()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "dispatcher ready"
+ date
+ '''
+ }
+ }
+ }
+ }
+ stage ('build worker01') {
+ agent {label " worker01 "}
+ steps {
+ pre_test()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "worker01 build done"
+ date
+ '''
+ }
+ }
+ }
+ }
+ stage ('build worker02') {
+ agent {label " worker02 "}
+ steps {
+ pre_test()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "worker02 build done"
+ date
+ '''
+ }
+ }
+ }
+ }
+ stage ('build worker03') {
+ agent {label " worker03 "}
+ steps {
+ pre_test()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "worker03 build done"
+ date
+ '''
+ }
+ }
+ }
+ }
+ stage ('build worker04') {
+ agent {label " worker04 "}
+ steps {
+ pre_test()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "worker04 build done"
+ date
+ '''
+ }
+ }
+ }
+ }
+ stage ('build worker05') {
+ agent {label " worker05 "}
+ steps {
+ pre_test()
+ timeout(time: 100, unit: 'MINUTES') {
+ script {
+ sh '''
+ echo "worker05 build done"
+ date
+ '''
+ }
+ }
+ }
+ }
+ }
+ }
+ stage('run test') {
+ steps {
+ sh '''
+ date
+ hostname
+ '''
+ timeout(time: 100, unit: 'MINUTES') {
+ sh '''
+ date
+ cd ${WKC}/tests/parallel_test
+ time ./run.sh -m m.json -t cases.task -l ${LOGDIR} -b ${CHANGE_TITLE}
+ date
+ hostname
+ '''
+ }
+ }
+ }
+ }
+ post {
+ success {
+ emailext (
+ subject: "PR-result: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]' SUCCESS",
+ body: """
+
+
+
+
+
+
+
+
+
+ 构建信息
+
+ |
+
+
+
+
+
+ - 构建名称>>分支:${env.BRANCH_NAME}
+ - 构建结果: Successful
+ - 构建编号:${BUILD_NUMBER}
+ - 触发用户:${env.CHANGE_AUTHOR}
+ - 提交信息:${env.CHANGE_TITLE}
+ - 构建地址:${BUILD_URL}
+ - 构建日志:${BUILD_URL}console
+
+
+ |
+
+
+
+ """,
+ to: "${env.CHANGE_AUTHOR_EMAIL}",
+ from: "support@taosdata.com"
+ )
+ }
+ failure {
+ emailext (
+ subject: "PR-result: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]' FAIL",
+ body: """
+
+
+
+
+
+
+
+
+
+ 构建信息
+
+ |
+
+
+
+
+
+ - 构建名称>>分支:${env.BRANCH_NAME}
+ - 构建结果: Failure
+ - 构建编号:${BUILD_NUMBER}
+ - 触发用户:${env.CHANGE_AUTHOR}
+ - 提交信息:${env.CHANGE_TITLE}
+ - 构建地址:${BUILD_URL}
+ - 构建日志:${BUILD_URL}console
+
+
+ |
+
+
+
+ """,
+ to: "${env.CHANGE_AUTHOR_EMAIL}",
+ from: "support@taosdata.com"
+ )
+ }
+ }
+}
diff --git a/tests/parallel_test/m.json b/tests/parallel_test/m.json
new file mode 100644
index 0000000000000000000000000000000000000000..13d7cb776fd7147f6ffe1a17a87e4f3046939a9e
--- /dev/null
+++ b/tests/parallel_test/m.json
@@ -0,0 +1,30 @@
+[{
+ "host":"192.168.0.210",
+ "username":"root",
+ "workdir":"/root/jenkins/workspace",
+ "thread":25
+},
+{
+ "host":"192.168.0.211",
+ "username":"root",
+ "workdir":"/root/jenkins/workspace",
+ "thread":25
+},
+{
+ "host":"192.168.0.212",
+ "username":"root",
+ "workdir":"/root/jenkins/workspace",
+ "thread":25
+},
+{
+ "host":"192.168.0.213",
+ "username":"root",
+ "workdir":"/root/jenkins/workspace",
+ "thread":25
+},
+{
+ "host":"192.168.0.214",
+ "username":"root",
+ "workdir":"/root/jenkins/workspace",
+ "thread":25
+}]
diff --git a/tests/parallel_test/run.sh b/tests/parallel_test/run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cf881300c1a13526e96a3be8163bc2c501be95e0
--- /dev/null
+++ b/tests/parallel_test/run.sh
@@ -0,0 +1,350 @@
+#!/bin/bash
+
+function usage() {
+ echo "$0"
+ echo -e "\t -m vm config file"
+ echo -e "\t -t task file"
+ echo -e "\t -b branch"
+ echo -e "\t -l log dir"
+ echo -e "\t -h help"
+}
+
+while getopts "m:t:b:l:h" opt; do
+ case $opt in
+ m)
+ config_file=$OPTARG
+ ;;
+ t)
+ t_file=$OPTARG
+ ;;
+ b)
+ branch=$OPTARG
+ ;;
+ l)
+ log_dir=$OPTARG
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG"
+ usage
+ exit 0
+ ;;
+ esac
+done
+#config_file=$1
+if [ -z $config_file ]; then
+ usage
+ exit 1
+fi
+if [ ! -f $config_file ]; then
+ echo "$config_file not found"
+ usage
+ exit 1
+fi
+#t_file=$2
+if [ -z $t_file ]; then
+ usage
+ exit 1
+fi
+if [ ! -f $t_file ]; then
+ echo "$t_file not found"
+ usage
+ exit 1
+fi
+date_tag=`date +%Y%m%d-%H%M%S`
+if [ -z $log_dir ]; then
+ log_dir="log/${branch}_${date_tag}"
+else
+ log_dir="$log_dir/${branch}_${date_tag}"
+fi
+
+hosts=()
+usernames=()
+passwords=()
+workdirs=()
+threads=()
+
+i=0
+while [ 1 ]; do
+ host=`jq .[$i].host $config_file`
+ if [ "$host" = "null" ]; then
+ break
+ fi
+ username=`jq .[$i].username $config_file`
+ if [ "$username" = "null" ]; then
+ break
+ fi
+ password=`jq .[$i].password $config_file`
+ if [ "$password" = "null" ]; then
+ password=""
+ fi
+ workdir=`jq .[$i].workdir $config_file`
+ if [ "$workdir" = "null" ]; then
+ break
+ fi
+ thread=`jq .[$i].thread $config_file`
+ if [ "$thread" = "null" ]; then
+ break
+ fi
+ hosts[i]=`echo $host|sed 's/\"$//'|sed 's/^\"//'`
+ usernames[i]=`echo $username|sed 's/\"$//'|sed 's/^\"//'`
+ passwords[i]=`echo $password|sed 's/\"$//'|sed 's/^\"//'`
+ workdirs[i]=`echo $workdir|sed 's/\"$//'|sed 's/^\"//'`
+ threads[i]=$thread
+ i=$(( i + 1 ))
+done
+
+
+function prepare_cases() {
+ cat $t_file >>$task_file
+ local i=0
+ while [ $i -lt $1 ]; do
+ echo "%%FINISHED%%" >>$task_file
+ i=$(( i + 1 ))
+ done
+}
+
+function clean_tmp() {
+ # clean tmp dir
+ local index=$1
+ local ssh_script="sshpass -p ${passwords[index]} ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ if [ -z ${passwords[index]} ]; then
+ ssh_script="ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ fi
+ local cmd="${ssh_script} rm -rf ${workdirs[index]}/tmp"
+ ${cmd}
+}
+# build source
+function build_src() {
+ echo "build source"
+ local index=$1
+ local ssh_script="sshpass -p ${passwords[index]} ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ if [ -z ${passwords[index]} ]; then
+ ssh_script="ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ fi
+ local script=". ~/.bashrc;cd ${workdirs[index]}/TDinternal;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true;make -j8;make install"
+ local cmd="${ssh_script} sh -c \"$script\""
+ echo "$cmd"
+ ${cmd}
+ if [ $? -ne 0 ]; then
+ flock -x $lock_file -c "echo \"${hosts[index]} TDengine build failed\" >>$log_dir/failed.log"
+ return
+ fi
+ script=". ~/.bashrc;cd ${workdirs[index]}/taos-tools;git submodule update --init --recursive;mkdir -p build;cd build;cmake ..;make -j4"
+ cmd="${ssh_script} sh -c \"$script\""
+ ${cmd}
+ if [ $? -ne 0 ]; then
+ flock -x $lock_file -c "echo \"${hosts[index]} taos-tools build failed\" >>$log_dir/failed.log"
+ return
+ fi
+ script="cp -rf ${workdirs[index]}/taos-tools/build/build/bin/* ${workdirs[index]}/TDinternal/debug/build/bin/;cp -rf ${workdirs[index]}/taos-tools/build/build/lib/* ${workdirs[index]}/TDinternal/debug/build/lib/;cp -rf ${workdirs[index]}/taos-tools/build/build/lib64/* ${workdirs[index]}/TDinternal/debug/build/lib/;cp -rf ${workdirs[index]}/TDinternal/debug/build/bin/demo ${workdirs[index]}/TDinternal/debug/build/bin/taosdemo"
+ cmd="${ssh_script} sh -c \"$script\""
+ ${cmd}
+}
+function rename_taosdemo() {
+ local index=$1
+ local ssh_script="sshpass -p ${passwords[index]} ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ if [ -z ${passwords[index]} ]; then
+ ssh_script="ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ fi
+ local script="cp -rf ${workdirs[index]}/TDinternal/debug/build/bin/demo ${workdirs[index]}/TDinternal/debug/build/bin/taosdemo"
+ cmd="${ssh_script} sh -c \"$script\""
+ ${cmd}
+}
+
+function run_thread() {
+ local index=$1
+ local thread_no=$2
+ local runcase_script="sshpass -p ${passwords[index]} ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ if [ -z ${passwords[index]} ]; then
+ runcase_script="ssh -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}"
+ fi
+ local count=0
+ local script="${workdirs[index]}/TDinternal/community/tests/parallel_test/run_container.sh"
+ local cmd="${runcase_script} ${script}"
+
+ # script="echo"
+ while [ 1 ]; do
+ local line=`flock -x $lock_file -c "head -n1 $task_file;sed -i \"1d\" $task_file"`
+ if [ "x$line" = "x%%FINISHED%%" ]; then
+ # echo "$index . $thread_no EXIT"
+ break
+ fi
+ if [ -z "$line" ]; then
+ continue
+ fi
+ echo "$line"|grep -q "^#"
+ if [ $? -eq 0 ]; then
+ continue
+ fi
+ local case_redo_time=`echo "$line"|cut -d, -f2`
+ if [ -z "$case_redo_time" ]; then
+ case_redo_time=1
+ fi
+ local exec_dir=`echo "$line"|cut -d, -f3`
+ local case_cmd=`echo "$line"|cut -d, -f4`
+ local case_file=""
+ echo "$case_cmd"|grep -q "^python3"
+ if [ $? -eq 0 ]; then
+ case_file=`echo "$case_cmd"|grep -o ".*\.py"|awk '{print $NF}'`
+ fi
+ echo "$case_cmd"|grep -q "\.sim"
+ if [ $? -eq 0 ]; then
+ case_file=`echo "$case_cmd"|grep -o ".*\.sim"|awk '{print $NF}'`
+ fi
+ if [ -z "$case_file" ]; then
+ case_file=`echo "$case_cmd"|awk '{print $NF}'`
+ fi
+ if [ -z "$case_file" ]; then
+ continue
+ fi
+ case_file="$exec_dir/${case_file}.${index}.${thread_no}"
+ count=$(( count + 1 ))
+ local case_path=`dirname "$case_file"`
+ if [ ! -z "$case_path" ]; then
+ mkdir -p $log_dir/$case_path
+ fi
+ cmd="${runcase_script} ${script} -w ${workdirs[index]} -c \"${case_cmd}\" -t ${thread_no} -d ${exec_dir}"
+ # echo "$thread_no $count $cmd"
+ local ret=0
+ local redo_count=1
+ start_time=`date +%s`
+ while [ ${redo_count} -lt 6 ]; do
+ echo "${hosts[index]}-${thread_no} order:${count}, redo:${redo_count} task:${line}" >$log_dir/$case_file.log
+ echo -e "\e[33m >>>>> \e[0m ${case_cmd}"
+ date >>$log_dir/$case_file.log
+ # $cmd 2>&1 | tee -a $log_dir/$case_file.log
+ # ret=${PIPESTATUS[0]}
+ $cmd >>$log_dir/$case_file.log 2>&1
+ ret=$?
+ if [ $ret -eq 0 ]; then
+ break
+ fi
+ redo=0
+ grep -q "wait too long for taosd start" $log_dir/$case_file.log
+ if [ $? -eq 0 ]; then
+ redo=1
+ fi
+ grep -q "kex_exchange_identification: Connection closed by remote host" $log_dir/$case_file.log
+ if [ $? -eq 0 ]; then
+ redo=1
+ fi
+ grep -q "kex_exchange_identification: read: Connection reset by peer" $log_dir/$case_file.log
+ if [ $? -eq 0 ]; then
+ redo=1
+ fi
+ grep -q "Database not ready" $log_dir/$case_file.log
+ if [ $? -eq 0 ]; then
+ redo=1
+ fi
+ grep -q "Unable to establish connection" $log_dir/$case_file.log
+ if [ $? -eq 0 ]; then
+ redo=1
+ fi
+ if [ $redo_count -lt $case_redo_time ]; then
+ redo=1
+ fi
+ if [ $redo -eq 0 ]; then
+ break
+ fi
+ redo_count=$(( redo_count + 1 ))
+ done
+ end_time=`date +%s`
+ echo >>$log_dir/$case_file.log
+ echo "${hosts[index]} execute time: $(( end_time - start_time ))s" >>$log_dir/$case_file.log
+ # echo "$thread_no ${line} DONE"
+ if [ $ret -ne 0 ]; then
+ flock -x $lock_file -c "echo \"${hosts[index]} ret:${ret} ${line}\" >>$log_dir/failed.log"
+ mkdir -p $log_dir/${case_file}.coredump
+ local remote_coredump_dir="${workdirs[index]}/tmp/thread_volume/$thread_no/coredump"
+ cmd="sshpass -p ${passwords[index]} scp -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}:${remote_coredump_dir}/* $log_dir/${case_file}.coredump/"
+ if [ -z ${passwords[index]} ]; then
+ cmd="scp -o StrictHostKeyChecking=no ${usernames[index]}@${hosts[index]}:${remote_coredump_dir}/* $log_dir/${case_file}.coredump/"
+ fi
+ $cmd # 2>/dev/null
+ local case_info=`echo "$line"|cut -d, -f 3,4`
+ local corefile=`ls $log_dir/${case_file}.coredump/`
+ corefile=`find $log_dir/${case_file}.coredump/ -name "core.*"`
+ echo -e "$case_info \e[31m failed\e[0m"
+ cat $log_dir/$case_file.log
+ echo -e "\e[34m log file: $log_dir/$case_file.log \e[0m"
+ if [ ! -z $corefile ]; then
+ echo -e "\e[34m corefiles: $corefile \e[0m"
+ fi
+ fi
+ done
+}
+
+# echo "hosts: ${hosts[@]}"
+# echo "usernames: ${usernames[@]}"
+# echo "passwords: ${passwords[@]}"
+# echo "workdirs: ${workdirs[@]}"
+# echo "threads: ${threads[@]}"
+# TODO: check host accessibility
+
+i=0
+while [ $i -lt ${#hosts[*]} ]; do
+ clean_tmp $i &
+ i=$(( i + 1 ))
+done
+wait
+
+mkdir -p $log_dir
+rm -rf $log_dir/*
+task_file=$log_dir/$$.task
+lock_file=$log_dir/$$.lock
+
+i=0
+while [ $i -lt ${#hosts[*]} ]; do
+ # build_src $i &
+ rename_taosdemo $i &
+ i=$(( i + 1 ))
+done
+wait
+# if [ -f "$log_dir/failed.log" ]; then
+# cat $log_dir/failed.log
+# exit 1
+# fi
+
+i=0
+j=0
+while [ $i -lt ${#hosts[*]} ]; do
+ j=$(( j + threads[i] ))
+ i=$(( i + 1 ))
+done
+prepare_cases $j
+
+i=0
+while [ $i -lt ${#hosts[*]} ]; do
+ j=0
+ while [ $j -lt ${threads[i]} ]; do
+ run_thread $i $j &
+ j=$(( j + 1 ))
+ done
+ i=$(( i + 1 ))
+done
+
+wait
+
+rm -f $lock_file
+rm -f $task_file
+
+# docker ps -a|grep -v CONTAINER|awk '{print $1}'|xargs docker rm -f
+RET=0
+i=1
+if [ -f "$log_dir/failed.log" ]; then
+ echo "====================================================="
+ while read line; do
+ line=`echo "$line"|cut -d, -f 3,4`
+ echo -e "$i. $line \e[31m failed\e[0m" >&2
+ i=$(( i + 1 ))
+ done <$log_dir/failed.log
+ RET=1
+fi
+
+echo "${log_dir}" >&2
+
+exit $RET
diff --git a/tests/parallel_test/run_case.sh b/tests/parallel_test/run_case.sh
new file mode 100755
index 0000000000000000000000000000000000000000..5884f400b50701bedb70a314e346a9ed93a6b2b3
--- /dev/null
+++ b/tests/parallel_test/run_case.sh
@@ -0,0 +1,68 @@
+#!/bin/bash
+
+CONTAINER_TESTDIR=/home/community
+# CONTAINER_TESTDIR=/root/tang/repository/TDengine
+
+# export PATH=$PATH:$CONTAINER_TESTDIR/debug/build/bin
+
+function usage() {
+ echo "$0"
+ echo -e "\t -d execution dir"
+ echo -e "\t -c command"
+ echo -e "\t -h help"
+}
+
+while getopts "d:c:h" opt; do
+ case $opt in
+ d)
+ exec_dir=$OPTARG
+ ;;
+ c)
+ cmd=$OPTARG
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG"
+ usage
+ exit 0
+ ;;
+ esac
+done
+
+if [ -z "$exec_dir" ]; then
+ usage
+ exit 0
+fi
+if [ -z "$cmd" ]; then
+ usage
+ exit 0
+fi
+
+go env -w GOPROXY=https://goproxy.cn
+echo "StrictHostKeyChecking no" >>/etc/ssh/ssh_config
+ln -s $CONTAINER_TESTDIR/debug/build/lib/libtaos.so /usr/lib/libtaos.so
+npm config -g set unsafe-perm
+npm config -g set registry https://registry.npm.taobao.org
+mkdir -p /home/sim/tsim
+
+# echo "$cmd"|grep -q "nodejs"
+# if [ $? -eq 0 ]; then
+# cd $CONTAINER_TESTDIR/src/connector/nodejs
+# npm install node-gyp-build@4.3.0 --ignore-scripts
+# fi
+
+cd $CONTAINER_TESTDIR/tests/$exec_dir
+ulimit -c unlimited
+
+$cmd
+RET=$?
+
+if [ $RET -ne 0 ]; then
+ pwd
+fi
+
+exit $RET
+
diff --git a/tests/parallel_test/run_container.sh b/tests/parallel_test/run_container.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ca408147586514f664ff032943d766345ec0921a
--- /dev/null
+++ b/tests/parallel_test/run_container.sh
@@ -0,0 +1,99 @@
+#!/bin/bash
+
+function usage() {
+ echo "$0"
+ echo -e "\t -w work dir"
+ echo -e "\t -d execution dir"
+ echo -e "\t -c command"
+ echo -e "\t -t thread number"
+ echo -e "\t -h help"
+}
+
+while getopts "w:d:c:t:h" opt; do
+ case $opt in
+ w)
+ WORKDIR=$OPTARG
+ ;;
+ d)
+ exec_dir=$OPTARG
+ ;;
+ c)
+ cmd=$OPTARG
+ ;;
+ t)
+ thread_no=$OPTARG
+ ;;
+ h)
+ usage
+ exit 0
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG"
+ usage
+ exit 0
+ ;;
+ esac
+done
+
+if [ -z "$WORKDIR" ]; then
+ usage
+ exit 1
+fi
+if [ -z "$exec_dir" ]; then
+ usage
+ exit 1
+fi
+if [ -z "$cmd" ]; then
+ usage
+ exit 1
+fi
+if [ -z "$thread_no" ]; then
+ usage
+ exit 1
+fi
+
+ulimit -c unlimited
+
+INTERNAL_REPDIR=$WORKDIR/TDinternal
+REPDIR=$INTERNAL_REPDIR/community
+CONTAINER_TESTDIR=/home/community
+TMP_DIR=$WORKDIR/tmp
+
+MOUNT_DIR=""
+mkdir -p ${TMP_DIR}/thread_volume/$thread_no/sim/tsim
+mkdir -p ${TMP_DIR}/thread_volume/$thread_no/node_modules
+mkdir -p ${TMP_DIR}/thread_volume/$thread_no/coredump
+rm -rf ${TMP_DIR}/thread_volume/$thread_no/coredump/*
+if [ ! -d "${TMP_DIR}/thread_volume/$thread_no/$exec_dir" ]; then
+ subdir=`echo "$exec_dir"|cut -d/ -f1`
+ echo "cp -rf ${REPDIR}/tests/$subdir ${TMP_DIR}/thread_volume/$thread_no/"
+ cp -rf ${REPDIR}/tests/$subdir ${TMP_DIR}/thread_volume/$thread_no/
+fi
+MOUNT_DIR="$TMP_DIR/thread_volume/$thread_no/$exec_dir:$CONTAINER_TESTDIR/tests/$exec_dir"
+echo "$thread_no -> ${exec_dir}:$cmd"
+echo "$cmd"|grep -q "nodejs"
+if [ $? -eq 0 ]; then
+ MOUNT_NODE_MOD="-v $TMP_DIR/thread_volume/$thread_no/node_modules:${CONTAINER_TESTDIR}/src/connector/nodejs/node_modules \
+-v $TMP_DIR/thread_volume/$thread_no/node_modules:${CONTAINER_TESTDIR}/tests/examples/nodejs/node_modules \
+-v $TMP_DIR/thread_volume/$thread_no/node_modules:${CONTAINER_TESTDIR}/tests/connectorTest/nodejsTest/nanosupport/node_modules"
+fi
+
+docker run \
+ -v $REPDIR/tests:$CONTAINER_TESTDIR/tests \
+ -v $MOUNT_DIR \
+ -v "$TMP_DIR/thread_volume/$thread_no/sim:${CONTAINER_TESTDIR}/sim" \
+ -v ${TMP_DIR}/thread_volume/$thread_no/coredump:/home/coredump \
+ -v $INTERNAL_REPDIR/debug:$CONTAINER_TESTDIR/debug:ro \
+ -v $REPDIR/deps:$CONTAINER_TESTDIR/deps:ro \
+ -v $REPDIR/src:$CONTAINER_TESTDIR/src \
+ -v $REPDIR/src/inc/taos.h:/usr/include/taos.h:ro \
+ -v $REPDIR/tests/examples:$CONTAINER_TESTDIR/tests/examples \
+ -v $REPDIR/snap:$CONTAINER_TESTDIR/snap:ro \
+ -v $REPDIR/alert:$CONTAINER_TESTDIR/alert:ro \
+ -v $REPDIR/packaging/cfg/taos.cfg:/etc/taos/taos.cfg:ro \
+ -v $REPDIR/README.md:$CONTAINER_TESTDIR/README.md:ro \
+ -v $REPDIR/src/connector/python/taos:/usr/local/lib/python3.8/site-packages/taos:ro \
+ -e LD_LIBRARY_PATH=$CONTAINER_TESTDIR/debug/build/lib:$CONTAINER_TESTDIR/debug/build/lib64 \
+ -e PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:$CONTAINER_TESTDIR/debug/build/bin:/usr/local/go/bin:/usr/local/node-v12.20.0-linux-x64/bin:/usr/local/apache-maven-3.8.4/bin:/usr/local/jdk1.8.0_144/bin \
+ -e JAVA_HOME=/usr/local/jdk1.8.0_144 \
+ --rm --ulimit core=-1 taos_test:v1.0 $CONTAINER_TESTDIR/tests/parallel_test/run_case.sh -d "$exec_dir" -c "$cmd"