提交 236295b8 编写于 作者: S Shengliang Guan

Merge remote-tracking branch 'origin/develop' into feature/wal

properties([pipelineTriggers([githubPush()])])
node {
git url: 'https://github.com/taosdata/TDengine'
}
// execute this before anything else, including requesting any time on an agent
if (currentBuild.rawBuild.getCauses().toString().contains('BranchIndexingCause')) {
print "INFO: Build skipped due to trigger being Branch Indexing"
currentBuild.result = 'ABORTED' // optional, gives a better hint to the user that it's been skipped, rather than the default which shows it's successful
return
}
def pre_test(){
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
sudo rmtaos
'''
}
sh '''
cd ${WKC}
rm -rf *
cd ${WK}
git reset --hard
git checkout develop
git pull
cd ${WKC}
rm -rf *
mv ${WORKSPACE}/* .
cd ${WK}
export TZ=Asia/Harbin
date
rm -rf ${WK}/debug
mkdir debug
cd debug
cmake .. > /dev/null
make > /dev/null
make install > /dev/null
cd ${WKC}/tests
'''
return 1
}
pipeline {
agent none
environment{
......@@ -8,85 +52,31 @@ pipeline {
stages {
stage('Parallel test stage') {
parallel {
stage('pytest') {
agent{label '184'}
stage('python p1') {
agent{label 'p1'}
steps {
pre_test()
sh '''
date
cd ${WKC}
git reset --hard
git checkout develop
git pull
git submodule update
cd ${WK}
git reset --hard
git checkout develop
git pull
export TZ=Asia/Harbin
date
rm -rf ${WK}/debug
mkdir debug
cd debug
cmake .. > /dev/null
make > /dev/null
make install > /dev/null
cd ${WKC}/tests
#./test-all.sh smoke
./test-all.sh pytest
./test-all.sh p1
date'''
}
}
stage('test_b1') {
agent{label 'master'}
agent{label 'b1'}
steps {
pre_test()
sh '''
cd ${WKC}
git reset --hard
git checkout develop
git pull
git submodule update
cd ${WK}
git reset --hard
git checkout develop
git pull
export TZ=Asia/Harbin
date
rm -rf ${WK}/debug
mkdir debug
cd debug
cmake .. > /dev/null
make > /dev/null
cd ${WKC}/tests
#./test-all.sh smoke
./test-all.sh b1
date'''
}
}
stage('test_crash_gen') {
agent{label "185"}
agent{label "b2"}
steps {
sh '''
cd ${WKC}
git reset --hard
git checkout develop
git pull
git submodule update
cd ${WK}
git reset --hard
git checkout develop
git pull
export TZ=Asia/Harbin
rm -rf ${WK}/debug
mkdir debug
cd debug
cmake .. > /dev/null
make > /dev/null
cd ${WKC}/tests/pytest
'''
pre_test()
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
......@@ -109,193 +99,42 @@ pipeline {
}
stage('test_valgrind') {
agent{label "186"}
agent{label "b3"}
steps {
pre_test()
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
./valgrind-test.sh 2>&1 > mem-error-out.log
./handle_val_log.sh
'''
}
sh '''
cd ${WKC}
git reset --hard
git checkout develop
git pull
git submodule update
cd ${WK}
git reset --hard
git checkout develop
git pull
export TZ=Asia/Harbin
date
rm -rf ${WK}/debug
mkdir debug
cd debug
cmake .. > /dev/null
make > /dev/null
cd ${WKC}/tests/pytest
./valgrind-test.sh 2>&1 > mem-error-out.log
./handle_val_log.sh
date
cd ${WKC}/tests
./test-all.sh b3
date'''
}
}
stage('connector'){
agent{label "release"}
stage('python p2'){
agent{label "p2"}
steps{
sh'''
cd ${WORKSPACE}
git checkout develop
pre_test()
sh '''
date
cd ${WKC}/tests
./test-all.sh p2
date
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/gotest
bash batchtest.sh
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/examples/python/PYTHONConnectorChecker
python3 PythonChecker.py
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/examples/JDBC/JDBCDemo/
mvn clean package assembly:single >/dev/null
java -jar target/jdbcChecker-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC#
dotnet run
'''
}
}
}
stage('arm64_build'){
agent{label 'arm64'}
steps{
sh '''
cd ${WK}
git fetch
git checkout develop
git pull
cd ${WKC}
git fetch
git checkout develop
git pull
git submodule update
cd ${WKC}/packaging
./release.sh -v cluster -c aarch64 -n 2.0.0.0 -m 2.0.0.0
'''
}
}
stage('arm32_build'){
agent{label 'arm32'}
steps{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WK}
git fetch
git checkout develop
git pull
cd ${WKC}
git fetch
git checkout develop
git pull
git submodule update
cd ${WKC}/packaging
./release.sh -v cluster -c aarch32 -n 2.0.0.0 -m 2.0.0.0
'''
}
}
}
}
}
}
post {
success {
emailext (
subject: "SUCCESSFUL: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'",
body: '''<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body leftmargin="8" marginwidth="0" topmargin="8" marginheight="4" offset="0">
<table width="95%" cellpadding="0" cellspacing="0" style="font-size: 16pt; font-family: Tahoma, Arial, Helvetica, sans-serif">
<tr>
<td><br />
<b><font color="#0B610B"><font size="6">构建信息</font></font></b>
<hr size="2" width="100%" align="center" /></td>
</tr>
<tr>
<td>
<ul>
<div style="font-size:18px">
<li>构建名称>>分支:${PROJECT_NAME}</li>
<li>构建结果:<span style="color:green"> Successful </span></li>
<li>构建编号:${BUILD_NUMBER}</li>
<li>触发用户:${CAUSE}</li>
<li>变更概要:${CHANGES}</li>
<li>构建地址:<a href=${BUILD_URL}>${BUILD_URL}</a></li>
<li>构建日志:<a href=${BUILD_URL}console>${BUILD_URL}console</a></li>
<li>变更集:${JELLY_SCRIPT}</li>
</div>
</ul>
</td>
</tr>
</table></font>
</body>
</html>''',
to: "yqliu@taosdata.com,pxiao@taosdata.com",
from: "support@taosdata.com"
)
}
failure {
emailext (
subject: "FAILED: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'",
body: '''<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body leftmargin="8" marginwidth="0" topmargin="8" marginheight="4" offset="0">
<table width="95%" cellpadding="0" cellspacing="0" style="font-size: 16pt; font-family: Tahoma, Arial, Helvetica, sans-serif">
<tr>
<td><br />
<b><font color="#0B610B"><font size="6">构建信息</font></font></b>
<hr size="2" width="100%" align="center" /></td>
</tr>
<tr>
<td>
<ul>
<div style="font-size:18px">
<li>构建名称>>分支:${PROJECT_NAME}</li>
<li>构建结果:<span style="color:green"> Successful </span></li>
<li>构建编号:${BUILD_NUMBER}</li>
<li>触发用户:${CAUSE}</li>
<li>变更概要:${CHANGES}</li>
<li>构建地址:<a href=${BUILD_URL}>${BUILD_URL}</a></li>
<li>构建日志:<a href=${BUILD_URL}console>${BUILD_URL}console</a></li>
<li>变更集:${JELLY_SCRIPT}</li>
</div>
</ul>
</td>
</tr>
</table></font>
</body>
</html>''',
to: "yqliu@taosdata.com,pxiao@taosdata.com",
from: "support@taosdata.com"
)
}
}
}
\ No newline at end of file
}
......@@ -14,6 +14,9 @@
*/
#include <iconv.h>
#include <sys/stat.h>
#include <sys/syscall.h>
#include "os.h"
#include "taos.h"
#include "taosdef.h"
......@@ -366,6 +369,7 @@ static error_t parse_opt(int key, char *arg, struct argp_state *state) {
static struct argp argp = {options, parse_opt, args_doc, doc};
static resultStatistics g_resultStatistics = {0};
static FILE *g_fpOfResult = NULL;
static int g_numOfCores = 1;
int taosDumpOut(struct arguments *arguments);
int taosDumpIn(struct arguments *arguments);
......@@ -378,7 +382,7 @@ int32_t taosDumpTable(char *table, char *metric, struct arguments *arguments, FI
int taosDumpTableData(FILE *fp, char *tbname, struct arguments *arguments, TAOS* taosCon, char* dbName);
int taosCheckParam(struct arguments *arguments);
void taosFreeDbInfos();
static void taosStartDumpOutWorkThreads(struct arguments* args, int32_t numOfThread, char *dbName);
static void taosStartDumpOutWorkThreads(void* taosCon, struct arguments* args, int32_t numOfThread, char *dbName);
struct arguments tsArguments = {
// connection option
......@@ -540,6 +544,8 @@ int main(int argc, char *argv[]) {
}
}
g_numOfCores = (int32_t)sysconf(_SC_NPROCESSORS_ONLN);
time_t tTime = time(NULL);
struct tm tm = *localtime(&tTime);
......@@ -692,64 +698,97 @@ int32_t taosSaveTableOfMetricToTempFile(TAOS *taosCon, char* metric, struct argu
sprintf(tmpCommand, "select tbname from %s", metric);
TAOS_RES *result = taos_query(taosCon, tmpCommand);
int32_t code = taos_errno(result);
TAOS_RES *res = taos_query(taosCon, tmpCommand);
int32_t code = taos_errno(res);
if (code != 0) {
fprintf(stderr, "failed to run command %s\n", tmpCommand);
free(tmpCommand);
taos_free_result(result);
taos_free_result(res);
return -1;
}
free(tmpCommand);
TAOS_FIELD *fields = taos_fetch_fields(result);
char tmpBuf[TSDB_FILENAME_LEN + 1];
memset(tmpBuf, 0, TSDB_FILENAME_LEN);
sprintf(tmpBuf, ".select-tbname.tmp");
fd = open(tmpBuf, O_RDWR | O_CREAT | O_TRUNC, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (fd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpBuf);
taos_free_result(res);
return -1;
}
int32_t numOfTable = 0;
int32_t numOfThread = *totalNumOfThread;
char tmpFileName[TSDB_FILENAME_LEN + 1];
while ((row = taos_fetch_row(result)) != NULL) {
if (0 == numOfTable) {
memset(tmpFileName, 0, TSDB_FILENAME_LEN);
sprintf(tmpFileName, ".tables.tmp.%d", numOfThread);
fd = open(tmpFileName, O_RDWR | O_CREAT, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (fd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpFileName);
taos_free_result(result);
for (int32_t loopCnt = 0; loopCnt < numOfThread; loopCnt++) {
sprintf(tmpFileName, ".tables.tmp.%d", loopCnt);
(void)remove(tmpFileName);
}
free(tmpCommand);
return -1;
}
numOfThread++;
}
TAOS_FIELD *fields = taos_fetch_fields(res);
int32_t numOfTable = 0;
while ((row = taos_fetch_row(res)) != NULL) {
memset(&tableRecord, 0, sizeof(STableRecord));
tstrncpy(tableRecord.name, (char *)row[0], fields[0].bytes);
tstrncpy(tableRecord.metric, metric, TSDB_TABLE_NAME_LEN);
taosWrite(fd, &tableRecord, sizeof(STableRecord));
taosWrite(fd, &tableRecord, sizeof(STableRecord));
numOfTable++;
}
taos_free_result(res);
lseek(fd, 0, SEEK_SET);
int maxThreads = arguments->thread_num;
int tableOfPerFile ;
if (numOfTable <= arguments->thread_num) {
tableOfPerFile = 1;
maxThreads = numOfTable;
} else {
tableOfPerFile = numOfTable / arguments->thread_num;
if (0 != numOfTable % arguments->thread_num) {
tableOfPerFile += 1;
}
}
if (numOfTable >= arguments->table_batch) {
numOfTable = 0;
char* tblBuf = (char*)calloc(1, tableOfPerFile * sizeof(STableRecord));
if (NULL == tblBuf){
fprintf(stderr, "failed to calloc %" PRIzu "\n", tableOfPerFile * sizeof(STableRecord));
close(fd);
return -1;
}
int32_t numOfThread = *totalNumOfThread;
int subFd = -1;
for (; numOfThread < maxThreads; numOfThread++) {
memset(tmpBuf, 0, TSDB_FILENAME_LEN);
sprintf(tmpBuf, ".tables.tmp.%d", numOfThread);
subFd = open(tmpBuf, O_RDWR | O_CREAT | O_TRUNC, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (subFd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpBuf);
for (int32_t loopCnt = 0; loopCnt < numOfThread; loopCnt++) {
sprintf(tmpBuf, ".tables.tmp.%d", loopCnt);
(void)remove(tmpBuf);
}
sprintf(tmpBuf, ".select-tbname.tmp");
(void)remove(tmpBuf);
close(fd);
fd = -1;
return -1;
}
// read tableOfPerFile for fd, write to subFd
ssize_t readLen = read(fd, tblBuf, tableOfPerFile * sizeof(STableRecord));
if (readLen <= 0) {
close(subFd);
break;
}
taosWrite(subFd, tblBuf, readLen);
close(subFd);
}
sprintf(tmpBuf, ".select-tbname.tmp");
(void)remove(tmpBuf);
if (fd >= 0) {
close(fd);
fd = -1;
}
taos_free_result(result);
}
*totalNumOfThread = numOfThread;
free(tmpCommand);
return 0;
}
......@@ -946,7 +985,7 @@ int taosDumpOut(struct arguments *arguments) {
}
// start multi threads to dumpout
taosStartDumpOutWorkThreads(arguments, totalNumOfThread, dbInfos[0]->name);
taosStartDumpOutWorkThreads(taos, arguments, totalNumOfThread, dbInfos[0]->name);
char tmpFileName[TSDB_FILENAME_LEN + 1];
_clean_tmp_file:
......@@ -1181,34 +1220,34 @@ void* taosDumpOutWorkThreadFp(void *arg)
STableRecord tableRecord;
int fd;
char tmpFileName[TSDB_FILENAME_LEN*4] = {0};
sprintf(tmpFileName, ".tables.tmp.%d", pThread->threadIndex);
fd = open(tmpFileName, O_RDWR | O_CREAT, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
char tmpBuf[TSDB_FILENAME_LEN*4] = {0};
sprintf(tmpBuf, ".tables.tmp.%d", pThread->threadIndex);
fd = open(tmpBuf, O_RDWR | O_CREAT, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (fd == -1) {
fprintf(stderr, "taosDumpTableFp() failed to open temp file: %s\n", tmpFileName);
fprintf(stderr, "taosDumpTableFp() failed to open temp file: %s\n", tmpBuf);
return NULL;
}
FILE *fp = NULL;
memset(tmpFileName, 0, TSDB_FILENAME_LEN + 128);
memset(tmpBuf, 0, TSDB_FILENAME_LEN + 128);
if (tsArguments.outpath[0] != 0) {
sprintf(tmpFileName, "%s/%s.tables.%d.sql", tsArguments.outpath, pThread->dbName, pThread->threadIndex);
sprintf(tmpBuf, "%s/%s.tables.%d.sql", tsArguments.outpath, pThread->dbName, pThread->threadIndex);
} else {
sprintf(tmpFileName, "%s.tables.%d.sql", pThread->dbName, pThread->threadIndex);
sprintf(tmpBuf, "%s.tables.%d.sql", pThread->dbName, pThread->threadIndex);
}
fp = fopen(tmpFileName, "w");
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
fprintf(stderr, "failed to open file %s\n", tmpFileName);
fprintf(stderr, "failed to open file %s\n", tmpBuf);
close(fd);
return NULL;
}
memset(tmpFileName, 0, TSDB_FILENAME_LEN);
sprintf(tmpFileName, "use %s", pThread->dbName);
memset(tmpBuf, 0, TSDB_FILENAME_LEN);
sprintf(tmpBuf, "use %s", pThread->dbName);
TAOS_RES* tmpResult = taos_query(pThread->taosCon, tmpFileName);
TAOS_RES* tmpResult = taos_query(pThread->taosCon, tmpBuf);
int32_t code = taos_errno(tmpResult);
if (code != 0) {
fprintf(stderr, "invalid database %s\n", pThread->dbName);
......@@ -1218,6 +1257,9 @@ void* taosDumpOutWorkThreadFp(void *arg)
return NULL;
}
int fileNameIndex = 1;
int tablesInOneFile = 0;
int64_t lastRowsPrint = 5000000;
fprintf(fp, "USE %s;\n\n", pThread->dbName);
while (1) {
ssize_t readLen = read(fd, &tableRecord, sizeof(STableRecord));
......@@ -1228,6 +1270,33 @@ void* taosDumpOutWorkThreadFp(void *arg)
// TODO: sum table count and table rows by self
pThread->tablesOfDumpOut++;
pThread->rowsOfDumpOut += ret;
if (pThread->rowsOfDumpOut >= lastRowsPrint) {
printf(" %"PRId64 " rows already be dumpout from database %s\n", pThread->rowsOfDumpOut, pThread->dbName);
lastRowsPrint += 5000000;
}
tablesInOneFile++;
if (tablesInOneFile >= tsArguments.table_batch) {
fclose(fp);
tablesInOneFile = 0;
memset(tmpBuf, 0, TSDB_FILENAME_LEN + 128);
if (tsArguments.outpath[0] != 0) {
sprintf(tmpBuf, "%s/%s.tables.%d-%d.sql", tsArguments.outpath, pThread->dbName, pThread->threadIndex, fileNameIndex);
} else {
sprintf(tmpBuf, "%s.tables.%d-%d.sql", pThread->dbName, pThread->threadIndex, fileNameIndex);
}
fileNameIndex++;
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
fprintf(stderr, "failed to open file %s\n", tmpBuf);
close(fd);
taos_free_result(tmpResult);
return NULL;
}
}
}
}
......@@ -1238,7 +1307,7 @@ void* taosDumpOutWorkThreadFp(void *arg)
return NULL;
}
static void taosStartDumpOutWorkThreads(struct arguments* args, int32_t numOfThread, char *dbName)
static void taosStartDumpOutWorkThreads(void* taosCon, struct arguments* args, int32_t numOfThread, char *dbName)
{
pthread_attr_t thattr;
SThreadParaObj *threadObj = (SThreadParaObj *)calloc(numOfThread, sizeof(SThreadParaObj));
......@@ -1249,12 +1318,7 @@ static void taosStartDumpOutWorkThreads(struct arguments* args, int32_t numOfTh
pThread->threadIndex = t;
pThread->totalThreads = numOfThread;
tstrncpy(pThread->dbName, dbName, TSDB_TABLE_NAME_LEN);
pThread->taosCon = taos_connect(args->host, args->user, args->password, NULL, args->port);
if (pThread->taosCon == NULL) {
fprintf(stderr, "ERROR: thread:%d failed connect to TDengine, reason:%s\n", pThread->threadIndex, taos_errstr(NULL));
exit(0);
}
pThread->taosCon = taosCon;
pthread_attr_init(&thattr);
pthread_attr_setdetachstate(&thattr, PTHREAD_CREATE_JOINABLE);
......@@ -1273,7 +1337,6 @@ static void taosStartDumpOutWorkThreads(struct arguments* args, int32_t numOfTh
int64_t totalRowsOfDumpOut = 0;
int64_t totalChildTblsOfDumpOut = 0;
for (int32_t t = 0; t < numOfThread; ++t) {
taos_close(threadObj[t].taosCon);
totalChildTblsOfDumpOut += threadObj[t].tablesOfDumpOut;
totalRowsOfDumpOut += threadObj[t].rowsOfDumpOut;
}
......@@ -1398,44 +1461,81 @@ int taosDumpDb(SDbInfo *dbInfo, struct arguments *arguments, FILE *fp, TAOS *tao
return -1;
}
TAOS_FIELD *fields = taos_fetch_fields(res);
int32_t numOfTable = 0;
int32_t numOfThread = 0;
char tmpFileName[TSDB_FILENAME_LEN + 1];
while ((row = taos_fetch_row(res)) != NULL) {
if (0 == numOfTable) {
memset(tmpFileName, 0, TSDB_FILENAME_LEN);
sprintf(tmpFileName, ".tables.tmp.%d", numOfThread);
fd = open(tmpFileName, O_RDWR | O_CREAT, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (fd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpFileName);
taos_free_result(res);
for (int32_t loopCnt = 0; loopCnt < numOfThread; loopCnt++) {
sprintf(tmpFileName, ".tables.tmp.%d", loopCnt);
(void)remove(tmpFileName);
}
return -1;
}
char tmpBuf[TSDB_FILENAME_LEN + 1];
memset(tmpBuf, 0, TSDB_FILENAME_LEN);
sprintf(tmpBuf, ".show-tables.tmp");
fd = open(tmpBuf, O_RDWR | O_CREAT | O_TRUNC, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (fd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpBuf);
taos_free_result(res);
return -1;
}
numOfThread++;
}
TAOS_FIELD *fields = taos_fetch_fields(res);
int32_t numOfTable = 0;
while ((row = taos_fetch_row(res)) != NULL) {
memset(&tableRecord, 0, sizeof(STableRecord));
tstrncpy(tableRecord.name, (char *)row[TSDB_SHOW_TABLES_NAME_INDEX], fields[TSDB_SHOW_TABLES_NAME_INDEX].bytes);
tstrncpy(tableRecord.metric, (char *)row[TSDB_SHOW_TABLES_METRIC_INDEX], fields[TSDB_SHOW_TABLES_METRIC_INDEX].bytes);
taosWrite(fd, &tableRecord, sizeof(STableRecord));
numOfTable++;
}
taos_free_result(res);
lseek(fd, 0, SEEK_SET);
if (numOfTable >= arguments->table_batch) {
numOfTable = 0;
int maxThreads = tsArguments.thread_num;
int tableOfPerFile ;
if (numOfTable <= tsArguments.thread_num) {
tableOfPerFile = 1;
maxThreads = numOfTable;
} else {
tableOfPerFile = numOfTable / tsArguments.thread_num;
if (0 != numOfTable % tsArguments.thread_num) {
tableOfPerFile += 1;
}
}
char* tblBuf = (char*)calloc(1, tableOfPerFile * sizeof(STableRecord));
if (NULL == tblBuf){
fprintf(stderr, "failed to calloc %" PRIzu "\n", tableOfPerFile * sizeof(STableRecord));
close(fd);
return -1;
}
int32_t numOfThread = 0;
int subFd = -1;
for (numOfThread = 0; numOfThread < maxThreads; numOfThread++) {
memset(tmpBuf, 0, TSDB_FILENAME_LEN);
sprintf(tmpBuf, ".tables.tmp.%d", numOfThread);
subFd = open(tmpBuf, O_RDWR | O_CREAT | O_TRUNC, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH);
if (subFd == -1) {
fprintf(stderr, "failed to open temp file: %s\n", tmpBuf);
for (int32_t loopCnt = 0; loopCnt < numOfThread; loopCnt++) {
sprintf(tmpBuf, ".tables.tmp.%d", loopCnt);
(void)remove(tmpBuf);
}
sprintf(tmpBuf, ".show-tables.tmp");
(void)remove(tmpBuf);
close(fd);
fd = -1;
return -1;
}
// read tableOfPerFile for fd, write to subFd
ssize_t readLen = read(fd, tblBuf, tableOfPerFile * sizeof(STableRecord));
if (readLen <= 0) {
close(subFd);
break;
}
taosWrite(subFd, tblBuf, readLen);
close(subFd);
}
sprintf(tmpBuf, ".show-tables.tmp");
(void)remove(tmpBuf);
if (fd >= 0) {
close(fd);
fd = -1;
......@@ -1444,10 +1544,10 @@ int taosDumpDb(SDbInfo *dbInfo, struct arguments *arguments, FILE *fp, TAOS *tao
taos_free_result(res);
// start multi threads to dumpout
taosStartDumpOutWorkThreads(arguments, numOfThread, dbInfo->name);
taosStartDumpOutWorkThreads(taosCon, arguments, numOfThread, dbInfo->name);
for (int loopCnt = 0; loopCnt < numOfThread; loopCnt++) {
sprintf(tmpFileName, ".tables.tmp.%d", loopCnt);
(void)remove(tmpFileName);
sprintf(tmpBuf, ".tables.tmp.%d", loopCnt);
(void)remove(tmpBuf);
}
return 0;
......@@ -1552,8 +1652,8 @@ void taosDumpCreateMTableClause(STableDef *tableDes, char *metric, int numOfCols
}
int taosDumpTableData(FILE *fp, char *tbname, struct arguments *arguments, TAOS* taosCon, char* dbName) {
/* char temp[MAX_COMMAND_SIZE] = "\0"; */
int64_t totalRows = 0;
int64_t lastRowsPrint = 5000000;
int64_t totalRows = 0;
int count = 0;
char *pstr = NULL;
TAOS_ROW row = NULL;
......@@ -1680,9 +1780,14 @@ int taosDumpTableData(FILE *fp, char *tbname, struct arguments *arguments, TAOS*
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, ") ");
totalRows++;
totalRows++;
count++;
fprintf(fp, "%s", tmpBuffer);
if (totalRows >= lastRowsPrint) {
printf(" %"PRId64 " rows already be dumpout from %s.%s\n", totalRows, dbName, tbname);
lastRowsPrint += 5000000;
}
total_sqlstr_len += curr_sqlstr_len;
......@@ -2048,6 +2153,7 @@ int taosDumpInOneFile(TAOS * taos, FILE* fp, char* fcharset, char* encode, c
return -1;
}
int lastRowsPrint = 5000000;
int lineNo = 0;
while ((read_len = getline(&line, &line_len, fp)) != -1) {
++lineNo;
......@@ -2074,7 +2180,12 @@ int taosDumpInOneFile(TAOS * taos, FILE* fp, char* fcharset, char* encode, c
}
memset(cmd, 0, TSDB_MAX_ALLOWED_SQL_LEN);
cmd_len = 0;
cmd_len = 0;
if (lineNo >= lastRowsPrint) {
printf(" %d lines already be executed from file %s\n", lineNo, fileName);
lastRowsPrint += 5000000;
}
}
tfree(cmd);
......@@ -2101,7 +2212,7 @@ void* taosDumpInWorkThreadFp(void *arg)
return NULL;
}
static void taosStartDumpInWorkThreads(struct arguments *args)
static void taosStartDumpInWorkThreads(void* taosCon, struct arguments *args)
{
pthread_attr_t thattr;
SThreadParaObj *pThread;
......@@ -2116,11 +2227,7 @@ static void taosStartDumpInWorkThreads(struct arguments *args)
pThread = threadObj + t;
pThread->threadIndex = t;
pThread->totalThreads = totalThreads;
pThread->taosCon = taos_connect(args->host, args->user, args->password, NULL, args->port);
if (pThread->taosCon == NULL) {
fprintf(stderr, "ERROR: thread:%d failed connect to TDengine, reason:%s\n", pThread->threadIndex, taos_errstr(NULL));
exit(0);
}
pThread->taosCon = taosCon;
pthread_attr_init(&thattr);
pthread_attr_setdetachstate(&thattr, PTHREAD_CREATE_JOINABLE);
......@@ -2169,7 +2276,7 @@ int taosDumpIn(struct arguments *arguments) {
taosDumpInOneFile(taos, fp, tsfCharset, arguments->encode, tsDbSqlFile);
}
taosStartDumpInWorkThreads(arguments);
taosStartDumpInWorkThreads(taos, arguments);
taos_close(taos);
taosFreeSQLFiles();
......
......@@ -225,10 +225,11 @@ static void addToWheel(tmr_obj_t* timer, uint32_t delay) {
}
static bool removeFromWheel(tmr_obj_t* timer) {
if (timer->wheel >= tListLen(wheels)) {
uint8_t wheelIdx = timer->wheel;
if (wheelIdx >= tListLen(wheels)) {
return false;
}
time_wheel_t* wheel = wheels + timer->wheel;
time_wheel_t* wheel = wheels + wheelIdx;
bool removed = false;
pthread_mutex_lock(&wheel->mutex);
......
properties([pipelineTriggers([githubPush()])])
node {
git url: 'https://github.com/taosdata/TDengine'
}
// execute this before anything else, including requesting any time on an agent
if (currentBuild.rawBuild.getCauses().toString().contains('BranchIndexingCause')) {
print "INFO: Build skipped due to trigger being Branch Indexing"
currentBuild.result = 'ABORTED' // optional, gives a better hint to the user that it's been skipped, rather than the default which shows it's successful
return
}
def pre_test(){
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
......@@ -21,15 +6,14 @@ def pre_test(){
}
sh '''
cd ${WKC}
rm -rf *
cd ${WK}
git reset --hard
git checkout develop
git checkout ${BRANCH}
git pull
cd ${WKC}
rm -rf *
mv ${WORKSPACE}/* .
git submodule update
cd ${WK}
git reset --hard
git checkout ${BRANCH}
git pull
export TZ=Asia/Harbin
date
rm -rf ${WK}/debug
......@@ -38,13 +22,13 @@ def pre_test(){
cmake .. > /dev/null
make > /dev/null
make install > /dev/null
cd ${WKC}/tests
'''
return 1
}
pipeline {
agent none
environment{
BRANCH = 'develop'
WK = '/var/lib/jenkins/workspace/TDinternal'
WKC= '/var/lib/jenkins/workspace/TDinternal/community'
}
......@@ -52,13 +36,13 @@ pipeline {
stages {
stage('Parallel test stage') {
parallel {
stage('python p1') {
agent{label 'p1'}
stage('pytest') {
agent{label '184'}
steps {
pre_test()
sh '''
cd ${WKC}/tests
./test-all.sh p1
./test-all.sh pytest
date'''
}
}
......@@ -66,6 +50,12 @@ pipeline {
agent{label 'master'}
steps {
pre_test()
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
python3 concurrent_inquiry.py -c 1
'''
}
sh '''
cd ${WKC}/tests
./test-all.sh b1
......@@ -74,9 +64,12 @@ pipeline {
}
stage('test_crash_gen') {
agent{label "b2"}
agent{label "185"}
steps {
pre_test()
sh '''
cd ${WKC}/tests/pytest
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
......@@ -90,7 +83,6 @@ pipeline {
'''
}
sh '''
date
cd ${WKC}/tests
./test-all.sh b2
date
......@@ -99,42 +91,177 @@ pipeline {
}
stage('test_valgrind') {
agent{label "b3"}
agent{label "186"}
steps {
pre_test()
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
./valgrind-test.sh 2>&1 > mem-error-out.log
./handle_val_log.sh
'''
}
sh '''
cd ${WKC}/tests/pytest
./valgrind-test.sh 2>&1 > mem-error-out.log
./handle_val_log.sh
date
cd ${WKC}/tests
./test-all.sh b3
date'''
}
}
stage('python p2'){
agent{label "p2"}
stage('connector'){
agent{label "release"}
steps{
pre_test()
sh '''
date
cd ${WKC}/tests
./test-all.sh p2
date
sh'''
cd ${WORKSPACE}
git checkout develop
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/gotest
bash batchtest.sh
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/examples/python/PYTHONConnectorChecker
python3 PythonChecker.py
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WORKSPACE}/tests/examples/JDBC/JDBCDemo/
mvn clean package assembly:single >/dev/null
java -jar target/jdbcChecker-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC#
dotnet run
'''
}
}
}
stage('arm64_build'){
agent{label 'arm64'}
steps{
sh '''
cd ${WK}
git fetch
git checkout develop
git pull
cd ${WKC}
git fetch
git checkout develop
git pull
git submodule update
cd ${WKC}/packaging
./release.sh -v cluster -c aarch64 -n 2.0.0.0 -m 2.0.0.0
'''
}
}
stage('arm32_build'){
agent{label 'arm32'}
steps{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WK}
git fetch
git checkout develop
git pull
cd ${WKC}
git fetch
git checkout develop
git pull
git submodule update
cd ${WKC}/packaging
./release.sh -v cluster -c aarch32 -n 2.0.0.0 -m 2.0.0.0
'''
}
}
}
}
}
}
}
post {
success {
emailext (
subject: "SUCCESSFUL: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'",
body: '''<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body leftmargin="8" marginwidth="0" topmargin="8" marginheight="4" offset="0">
<table width="95%" cellpadding="0" cellspacing="0" style="font-size: 16pt; font-family: Tahoma, Arial, Helvetica, sans-serif">
<tr>
<td><br />
<b><font color="#0B610B"><font size="6">构建信息</font></font></b>
<hr size="2" width="100%" align="center" /></td>
</tr>
<tr>
<td>
<ul>
<div style="font-size:18px">
<li>构建名称>>分支:${PROJECT_NAME}</li>
<li>构建结果:<span style="color:green"> Successful </span></li>
<li>构建编号:${BUILD_NUMBER}</li>
<li>触发用户:${CAUSE}</li>
<li>变更概要:${CHANGES}</li>
<li>构建地址:<a href=${BUILD_URL}>${BUILD_URL}</a></li>
<li>构建日志:<a href=${BUILD_URL}console>${BUILD_URL}console</a></li>
<li>变更集:${JELLY_SCRIPT}</li>
</div>
</ul>
</td>
</tr>
</table></font>
</body>
</html>''',
to: "yqliu@taosdata.com,pxiao@taosdata.com",
from: "support@taosdata.com"
)
}
failure {
emailext (
subject: "FAILED: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'",
body: '''<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body leftmargin="8" marginwidth="0" topmargin="8" marginheight="4" offset="0">
<table width="95%" cellpadding="0" cellspacing="0" style="font-size: 16pt; font-family: Tahoma, Arial, Helvetica, sans-serif">
<tr>
<td><br />
<b><font color="#0B610B"><font size="6">构建信息</font></font></b>
<hr size="2" width="100%" align="center" /></td>
</tr>
<tr>
<td>
<ul>
<div style="font-size:18px">
<li>构建名称>>分支:${PROJECT_NAME}</li>
<li>构建结果:<span style="color:green"> Successful </span></li>
<li>构建编号:${BUILD_NUMBER}</li>
<li>触发用户:${CAUSE}</li>
<li>变更概要:${CHANGES}</li>
<li>构建地址:<a href=${BUILD_URL}>${BUILD_URL}</a></li>
<li>构建日志:<a href=${BUILD_URL}console>${BUILD_URL}console</a></li>
<li>变更集:${JELLY_SCRIPT}</li>
</div>
</ul>
</td>
</tr>
</table></font>
</body>
</html>''',
to: "yqliu@taosdata.com,pxiao@taosdata.com",
from: "support@taosdata.com"
)
}
}
}
\ No newline at end of file
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
/*
* Copyright 2007-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.*;
import java.io.*;
import java.nio.channels.*;
import java.util.Properties;
public class MavenWrapperDownloader {
private static final String WRAPPER_VERSION = "0.5.6";
/**
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
*/
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
/**
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
* use instead of the default one.
*/
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
".mvn/wrapper/maven-wrapper.properties";
/**
* Path where the maven-wrapper.jar will be saved to.
*/
private static final String MAVEN_WRAPPER_JAR_PATH =
".mvn/wrapper/maven-wrapper.jar";
/**
* Name of the property which should be used to override the default download url for the wrapper.
*/
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
public static void main(String args[]) {
System.out.println("- Downloader started");
File baseDirectory = new File(args[0]);
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
// If the maven-wrapper.properties exists, read it and check if it contains a custom
// wrapperUrl parameter.
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
String url = DEFAULT_DOWNLOAD_URL;
if (mavenWrapperPropertyFile.exists()) {
FileInputStream mavenWrapperPropertyFileInputStream = null;
try {
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
Properties mavenWrapperProperties = new Properties();
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
} catch (IOException e) {
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
} finally {
try {
if (mavenWrapperPropertyFileInputStream != null) {
mavenWrapperPropertyFileInputStream.close();
}
} catch (IOException e) {
// Ignore ...
}
}
}
System.out.println("- Downloading from: " + url);
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
if (!outputFile.getParentFile().exists()) {
if (!outputFile.getParentFile().mkdirs()) {
System.out.println(
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
}
}
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
try {
downloadFileFromURL(url, outputFile);
System.out.println("Done");
System.exit(0);
} catch (Throwable e) {
System.out.println("- Error downloading");
e.printStackTrace();
System.exit(1);
}
}
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
String username = System.getenv("MVNW_USERNAME");
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password);
}
});
}
URL website = new URL(urlString);
ReadableByteChannel rbc;
rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(destination);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
fos.close();
rbc.close();
}
}
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Maven Start Up Batch script
#
# Required ENV vars:
# ------------------
# JAVA_HOME - location of a JDK home dir
#
# Optional ENV vars
# -----------------
# M2_HOME - location of maven2's installed home dir
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
# e.g. to debug Maven itself, use
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
# ----------------------------------------------------------------------------
if [ -z "$MAVEN_SKIP_RC" ]; then
if [ -f /etc/mavenrc ]; then
. /etc/mavenrc
fi
if [ -f "$HOME/.mavenrc" ]; then
. "$HOME/.mavenrc"
fi
fi
# OS specific support. $var _must_ be set to either true or false.
cygwin=false
darwin=false
mingw=false
case "$(uname)" in
CYGWIN*) cygwin=true ;;
MINGW*) mingw=true ;;
Darwin*)
darwin=true
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
if [ -z "$JAVA_HOME" ]; then
if [ -x "/usr/libexec/java_home" ]; then
export JAVA_HOME="$(/usr/libexec/java_home)"
else
export JAVA_HOME="/Library/Java/Home"
fi
fi
;;
esac
if [ -z "$JAVA_HOME" ]; then
if [ -r /etc/gentoo-release ]; then
JAVA_HOME=$(java-config --jre-home)
fi
fi
if [ -z "$M2_HOME" ]; then
## resolve links - $0 may be a link to maven's home
PRG="$0"
# need this for relative symlinks
while [ -h "$PRG" ]; do
ls=$(ls -ld "$PRG")
link=$(expr "$ls" : '.*-> \(.*\)$')
if expr "$link" : '/.*' >/dev/null; then
PRG="$link"
else
PRG="$(dirname "$PRG")/$link"
fi
done
saveddir=$(pwd)
M2_HOME=$(dirname "$PRG")/..
# make it fully qualified
M2_HOME=$(cd "$M2_HOME" && pwd)
cd "$saveddir"
# echo Using m2 at $M2_HOME
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin; then
[ -n "$M2_HOME" ] &&
M2_HOME=$(cygpath --unix "$M2_HOME")
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=$(cygpath --unix "$JAVA_HOME")
[ -n "$CLASSPATH" ] &&
CLASSPATH=$(cygpath --path --unix "$CLASSPATH")
fi
# For Mingw, ensure paths are in UNIX format before anything is touched
if $mingw; then
[ -n "$M2_HOME" ] &&
M2_HOME="$( (
cd "$M2_HOME"
pwd
))"
[ -n "$JAVA_HOME" ] &&
JAVA_HOME="$( (
cd "$JAVA_HOME"
pwd
))"
fi
if [ -z "$JAVA_HOME" ]; then
javaExecutable="$(which javac)"
if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then
# readlink(1) is not available as standard on Solaris 10.
readLink=$(which readlink)
if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then
if $darwin; then
javaHome="$(dirname \"$javaExecutable\")"
javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac"
else
javaExecutable="$(readlink -f \"$javaExecutable\")"
fi
javaHome="$(dirname \"$javaExecutable\")"
javaHome=$(expr "$javaHome" : '\(.*\)/bin')
JAVA_HOME="$javaHome"
export JAVA_HOME
fi
fi
fi
if [ -z "$JAVACMD" ]; then
if [ -n "$JAVA_HOME" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD="$(which java)"
fi
fi
if [ ! -x "$JAVACMD" ]; then
echo "Error: JAVA_HOME is not defined correctly." >&2
echo " We cannot execute $JAVACMD" >&2
exit 1
fi
if [ -z "$JAVA_HOME" ]; then
echo "Warning: JAVA_HOME environment variable is not set."
fi
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
# traverses directory structure from process work directory to filesystem root
# first directory with .mvn subdirectory is considered project base directory
find_maven_basedir() {
if [ -z "$1" ]; then
echo "Path not specified to find_maven_basedir"
return 1
fi
basedir="$1"
wdir="$1"
while [ "$wdir" != '/' ]; do
if [ -d "$wdir"/.mvn ]; then
basedir=$wdir
break
fi
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
if [ -d "${wdir}" ]; then
wdir=$(
cd "$wdir/.."
pwd
)
fi
# end of workaround
done
echo "${basedir}"
}
# concatenates all lines of a file
concat_lines() {
if [ -f "$1" ]; then
echo "$(tr -s '\n' ' ' <"$1")"
fi
}
BASE_DIR=$(find_maven_basedir "$(pwd)")
if [ -z "$BASE_DIR" ]; then
exit 1
fi
##########################################################################################
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
# This allows using the maven wrapper in projects that prohibit checking in binary data.
##########################################################################################
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found .mvn/wrapper/maven-wrapper.jar"
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
fi
if [ -n "$MVNW_REPOURL" ]; then
jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
else
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
fi
while IFS="=" read key value; do
case "$key" in wrapperUrl)
jarUrl="$value"
break
;;
esac
done <"$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
if [ "$MVNW_VERBOSE" = true ]; then
echo "Downloading from: $jarUrl"
fi
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
if $cygwin; then
wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath")
fi
if command -v wget >/dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found wget ... using wget"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
wget "$jarUrl" -O "$wrapperJarPath"
else
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
fi
elif command -v curl >/dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found curl ... using curl"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
curl -o "$wrapperJarPath" "$jarUrl" -f
else
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Falling back to using Java to download"
fi
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
# For Cygwin, switch paths to Windows format before running javac
if $cygwin; then
javaClass=$(cygpath --path --windows "$javaClass")
fi
if [ -e "$javaClass" ]; then
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Compiling MavenWrapperDownloader.java ..."
fi
# Compiling the Java class
("$JAVA_HOME/bin/javac" "$javaClass")
fi
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
# Running the downloader
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Running MavenWrapperDownloader.java ..."
fi
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
fi
fi
fi
fi
##########################################################################################
# End of extension
##########################################################################################
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
if [ "$MVNW_VERBOSE" = true ]; then
echo $MAVEN_PROJECTBASEDIR
fi
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
[ -n "$M2_HOME" ] &&
M2_HOME=$(cygpath --path --windows "$M2_HOME")
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME")
[ -n "$CLASSPATH" ] &&
CLASSPATH=$(cygpath --path --windows "$CLASSPATH")
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR")
fi
# Provide a "standardized" way to retrieve the CLI args that will
# work with both Windows and non-Windows executions.
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
export MAVEN_CMD_LINE_ARGS
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
exec "$JAVACMD" \
$MAVEN_OPTS \
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%" == "on" pause
if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
exit /B %ERROR_CODE%
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.4.0</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.taosdata</groupId>
<artifactId>taosdemo</artifactId>
<version>2.0</version>
<name>taosdemo</name>
<description>Demo project for TDengine</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<!-- taos jdbc -->
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.14</version>
</dependency>
<!-- mysql -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
<!-- mybatis-plus -->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>3.1.2</version>
</dependency>
<!-- log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- springboot -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>2.1.4</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
<filtering>true</filtering>
</resource>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
package com.taosdata.taosdemo;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@MapperScan(basePackages = {"com.taosdata.taosdemo.mapper"})
@SpringBootApplication
public class TaosdemoApplication {
public static void main(String[] args) {
SpringApplication.run(TaosdemoApplication.class, args);
}
}
package com.taosdata.taosdemo.components;
import com.taosdata.taosdemo.domain.*;
import com.taosdata.taosdemo.service.DatabaseService;
import com.taosdata.taosdemo.service.SubTableService;
import com.taosdata.taosdemo.service.SuperTableService;
import com.taosdata.taosdemo.service.data.SubTableMetaGenerator;
import com.taosdata.taosdemo.service.data.SubTableValueGenerator;
import com.taosdata.taosdemo.service.data.SuperTableMetaGenerator;
import com.taosdata.taosdemo.utils.JdbcTaosdemoConfig;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.concurrent.TimeUnit;
@Component
public class TaosDemoCommandLineRunner implements CommandLineRunner {
private static Logger logger = Logger.getLogger(TaosDemoCommandLineRunner.class);
@Autowired
private DatabaseService databaseService;
@Autowired
private SuperTableService superTableService;
@Autowired
private SubTableService subTableService;
private SuperTableMeta superTableMeta;
private List<SubTableMeta> subTableMetaList;
private List<SubTableValue> subTableValueList;
private List<List<SubTableValue>> dataList;
@Override
public void run(String... args) throws Exception {
// 读配置参数
JdbcTaosdemoConfig config = new JdbcTaosdemoConfig(args);
boolean isHelp = Arrays.asList(args).contains("--help");
if (isHelp) {
JdbcTaosdemoConfig.printHelp();
System.exit(0);
}
// 准备数据
prepareData(config);
// 创建数据库
createDatabaseTask(config);
// 建表
createTableTask(config);
// 插入
insertTask(config);
// 查询: 1. 生成查询语句, 2. 执行查询
// 删除表
if (config.dropTable) {
superTableService.drop(config.database, config.superTable);
}
System.exit(0);
}
private void createDatabaseTask(JdbcTaosdemoConfig config) {
long start = System.currentTimeMillis();
Map<String, String> databaseParam = new HashMap<>();
databaseParam.put("database", config.database);
databaseParam.put("keep", Integer.toString(config.keep));
databaseParam.put("days", Integer.toString(config.days));
databaseParam.put("replica", Integer.toString(config.replica));
//TODO: other database parameters
databaseService.dropDatabase(config.database);
databaseService.createDatabase(databaseParam);
databaseService.useDatabase(config.database);
long end = System.currentTimeMillis();
logger.info(">>> insert time cost : " + (end - start) + " ms.");
}
// 建超级表,三种方式:1. 指定SQL,2. 指定field和tags的个数,3. 默认
private void createTableTask(JdbcTaosdemoConfig config) {
long start = System.currentTimeMillis();
if (config.doCreateTable) {
superTableService.create(superTableMeta);
// 批量建子表
subTableService.createSubTable(subTableMetaList, config.numOfThreadsForCreate);
}
long end = System.currentTimeMillis();
logger.info(">>> create table time cost : " + (end - start) + " ms.");
}
private void insertTask(JdbcTaosdemoConfig config) {
long start = System.currentTimeMillis();
int numOfThreadsForInsert = config.numOfThreadsForInsert;
int sleep = config.sleep;
if (config.autoCreateTable) {
// 批量插入,自动建表
dataList.stream().forEach(subTableValues -> {
subTableService.insertAutoCreateTable(subTableValues, numOfThreadsForInsert);
sleep(sleep);
});
} else {
dataList.stream().forEach(subTableValues -> {
subTableService.insert(subTableValues, numOfThreadsForInsert);
sleep(sleep);
});
}
long end = System.currentTimeMillis();
logger.info(">>> insert time cost : " + (end - start) + " ms.");
}
private void prepareData(JdbcTaosdemoConfig config) {
long start = System.currentTimeMillis();
// 超级表的meta
superTableMeta = createSupertable(config);
// 子表的meta
subTableMetaList = SubTableMetaGenerator.generate(superTableMeta, config.numOfTables, config.tablePrefix);
// 子表的data
subTableValueList = SubTableValueGenerator.generate(subTableMetaList, config.numOfRowsPerTable, config.startTime, config.timeGap);
// 如果有乱序,给数据搞乱
if (config.order != 0) {
SubTableValueGenerator.disrupt(subTableValueList, config.rate, config.range);
}
// 分割数据
int numOfTables = config.numOfTables;
int numOfTablesPerSQL = config.numOfTablesPerSQL;
int numOfRowsPerTable = config.numOfRowsPerTable;
int numOfValuesPerSQL = config.numOfValuesPerSQL;
dataList = SubTableValueGenerator.split(subTableValueList, numOfTables, numOfTablesPerSQL, numOfRowsPerTable, numOfValuesPerSQL);
long end = System.currentTimeMillis();
logger.info(">>> prepare data time cost : " + (end - start) + " ms.");
}
private SuperTableMeta createSupertable(JdbcTaosdemoConfig config) {
SuperTableMeta tableMeta;
// create super table
logger.info(">>> create super table <<<");
if (config.superTableSQL != null) {
// use a sql to create super table
tableMeta = SuperTableMetaGenerator.generate(config.superTableSQL);
} else if (config.numOfFields == 0) {
// default sql = "create table test.weather (ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)";
SuperTableMeta superTableMeta = new SuperTableMeta();
superTableMeta.setDatabase(config.database);
superTableMeta.setName(config.superTable);
List<FieldMeta> fields = new ArrayList<>();
fields.add(new FieldMeta("ts", "timestamp"));
fields.add(new FieldMeta("temperature", "float"));
fields.add(new FieldMeta("humidity", "int"));
superTableMeta.setFields(fields);
List<TagMeta> tags = new ArrayList<>();
tags.add(new TagMeta("location", "nchar(64)"));
tags.add(new TagMeta("groupId", "int"));
superTableMeta.setTags(tags);
return superTableMeta;
} else {
// create super table with specified field size and tag size
tableMeta = SuperTableMetaGenerator.generate(config.database, config.superTable, config.numOfFields, config.prefixOfFields, config.numOfTags, config.prefixOfTags);
}
return tableMeta;
}
private static void sleep(int sleep) {
if (sleep <= 0)
return;
try {
TimeUnit.MILLISECONDS.sleep(sleep);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
package com.taosdata.taosdemo.controller;
import com.taosdata.taosdemo.service.DatabaseService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
@RestController
@RequestMapping
public class DatabaseController {
@Autowired
private DatabaseService databaseService;
/**
* create database
***/
@PostMapping
public int create(@RequestBody Map<String, String> map) {
return databaseService.createDatabase(map);
}
/**
* drop database
**/
@DeleteMapping("/{dbname}")
public int delete(@PathVariable("dbname") String dbname) {
return databaseService.dropDatabase(dbname);
}
/**
* use database
**/
@GetMapping("/{dbname}")
public int use(@PathVariable("dbname") String dbname) {
return databaseService.useDatabase(dbname);
}
}
package com.taosdata.taosdemo.controller;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class InsertController {
//TODO:多线程写一张表, thread = 10, table = 1
//TODO:一个批次写多张表, insert into t1 using weather values() t2 using weather values()
//TODO:插入的频率,
//TODO:指定一张表内的records数量
//TODO:是否乱序,
//TODO:乱序的比例,乱序的范围
//TODO:先建表,自动建表
//TODO:一个批次写多张表
}
package com.taosdata.taosdemo.controller;
import com.taosdata.taosdemo.domain.TableValue;
import com.taosdata.taosdemo.service.SuperTableService;
import com.taosdata.taosdemo.service.TableService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class SubTableController {
@Autowired
private TableService tableService;
@Autowired
private SuperTableService superTableService;
//TODO: 使用supertable创建一个子表
//TODO:使用supertable创建多个子表
//TODO:使用supertable多线程创建子表
//TODO:使用supertable多线程创建子表,指定子表的name_prefix,子表的数量,使用线程的个数
/**
* 创建表,超级表或者普通表
**/
/**
* 创建超级表的子表
**/
@PostMapping("/{database}/{superTable}")
public int createTable(@PathVariable("database") String database,
@PathVariable("superTable") String superTable,
@RequestBody TableValue tableMetadta) {
tableMetadta.setDatabase(database);
return 0;
}
}
package com.taosdata.taosdemo.controller;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.service.SuperTableService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
public class SuperTableController {
@Autowired
private SuperTableService superTableService;
@PostMapping("/{database}")
public int createTable(@PathVariable("database") String database, @RequestBody SuperTableMeta tableMetadta) {
tableMetadta.setDatabase(database);
return superTableService.create(tableMetadta);
}
//TODO: 删除超级表
//TODO:查询超级表
//TODO:统计查询表
}
package com.taosdata.taosdemo.controller;
public class TableController {
//TODO:创建普通表,create table(ts timestamp, temperature float)
//TODO:创建普通表,指定表的列数,包括第一列timestamp
//TODO:创建普通表,指定表每列的name和type
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
@Data
public class FieldMeta {
private String name;
private String type;
public FieldMeta() {
}
public FieldMeta(String name, String type) {
this.name = name;
this.type = type;
}
}
\ No newline at end of file
package com.taosdata.taosdemo.domain;
import lombok.Data;
@Data
public class FieldValue<T> {
private String name;
private T value;
public FieldValue() {
}
public FieldValue(String name, T value) {
this.name = name;
this.value = value;
}
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class RowValue {
private List<FieldValue> fields;
public RowValue(List<FieldValue> fields) {
this.fields = fields;
}
}
\ No newline at end of file
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class SubTableMeta {
private String database;
private String supertable;
private String name;
private List<TagValue> tags;
private List<FieldMeta> fields;
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class SubTableValue {
private String database;
private String supertable;
private String name;
private List<TagValue> tags;
private List<RowValue> values;
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class SuperTableMeta {
private String database;
private String name;
private List<FieldMeta> fields;
private List<TagMeta> tags;
}
\ No newline at end of file
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class TableMeta {
private String database;
private String name;
private List<FieldMeta> fields;
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
import java.util.List;
@Data
public class TableValue {
private String database;
private String name;
private List<FieldMeta> columns;
private List<RowValue> values;
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
@Data
public class TagMeta {
private String name;
private String type;
public TagMeta() {
}
public TagMeta(String name, String type) {
this.name = name;
this.type = type;
}
}
package com.taosdata.taosdemo.domain;
import lombok.Data;
@Data
public class TagValue<T> {
private String name;
private T value;
public TagValue() {
}
public TagValue(String name, T value) {
this.name = name;
this.value = value;
}
}
package com.taosdata.taosdemo.mapper;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.Map;
@Repository
public interface DatabaseMapper {
// create database if not exists XXX
int createDatabase(@Param("database") String dbname);
// drop database if exists XXX
int dropDatabase(@Param("database") String dbname);
// create database if not exists XXX keep XX days XX replica XX
int createDatabaseWithParameters(Map<String, String> map);
// use XXX
int useDatabase(@Param("database") String dbname);
//TODO: alter database
//TODO: show database
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.taosdata.taosdemo.mapper.DatabaseMapper">
<!-- create database XXX -->
<update id="createDatabase" parameterType="java.lang.String">
create database if not exists ${database}
</update>
<update id="dropDatabase" parameterType="java.lang.String">
DROP database if exists ${database}
</update>
<update id="createDatabaseWithParameters" parameterType="map">
CREATE database if not exists ${database}
<if test="keep != null">
KEEP ${keep}
</if>
<if test="days != null">
DAYS ${days}
</if>
<if test="replica != null">
REPLICA ${replica}
</if>
<if test="cache != null">
cache ${cache}
</if>
<if test="blocks != null">
blocks ${blocks}
</if>
<if test="minrows != null">
minrows ${minrows}
</if>
<if test="maxrows != null">
maxrows ${maxrows}
</if>
</update>
<update id="useDatabase" parameterType="java.lang.String">
use ${database}
</update>
<!-- TODO: alter database -->
<!-- TODO: show database -->
</mapper>
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.SubTableValue;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface SubTableMapper {
// 创建:子表
int createUsingSuperTable(SubTableMeta subTableMeta);
// 插入:一张子表多个values
int insertOneTableMultiValues(SubTableValue subTableValue);
// 插入:一张子表多个values, 自动建表
int insertOneTableMultiValuesUsingSuperTable(SubTableValue subTableValue);
// 插入:多张表多个values
int insertMultiTableMultiValues(@Param("tables") List<SubTableValue> tables);
// 插入:多张表多个values,自动建表
int insertMultiTableMultiValuesUsingSuperTable(@Param("tables") List<SubTableValue> tables);
//<!-- TODO:修改子表标签值 alter table ${tablename} set tag tagName=newTagValue-->
}
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.taosdata.taosdemo.mapper.SubTableMapper">
<!-- 创建子表 -->
<update id="createUsingSuperTable">
CREATE table IF NOT EXISTS ${database}.${name} USING ${supertable} TAGS
<foreach collection="tags" item="tag" index="index" open="(" close=")" separator=",">
#{tag.value}
</foreach>
</update>
<!-- 插入:向一张表中插入多张表 -->
<insert id="insertOneTableMultiValues">
INSERT INTO ${database}.${name}
VALUES
<foreach collection="values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
#{field.value}
</foreach>
</foreach>
</insert>
<!-- 插入:使用自动建表模式,向一张表中插入多条数据 -->
<insert id="insertOneTableMultiValuesUsingSuperTable">
INSERT INTO ${database}.${name} USING ${supertable} TAGS
<foreach collection="tags" item="tag" index="index" open="(" close=")" separator=",">
#{tag.value}
</foreach>
VALUES
<foreach collection="values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
#{field.value}
</foreach>
</foreach>
</insert>
<!-- TODO:插入:向一张表中插入多张表, 指定列 -->
<!-- TODO:插入:向一张表中插入多张表, 自动建表,指定列 -->
<!-- 插入:向多张表中插入多条数据 -->
<insert id="insertMultiTableMultiValues">
INSERT INTO
<foreach collection="tables" item="table">
${table.database}.${table.name}
VALUES
<foreach collection="table.values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
#{field.value}
</foreach>
</foreach>
</foreach>
</insert>
<!-- 插入:向多张表中插入多条数据,自动建表 -->
<insert id="insertMultiTableMultiValuesUsingSuperTable">
INSERT INTO
<foreach collection="tables" item="table">
${table.database}.${table.name} USING ${table.supertable} TAGS
<foreach collection="table.tags" item="tag" index="index" open="(" close=")" separator=",">
#{tag.value}
</foreach>
VALUES
<foreach collection="table.values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
#{field.value}
</foreach>
</foreach>
</foreach>
</insert>
<!-- TODO:插入:向多张表中插入多张表, 指定列 -->
<!-- TODO:插入:向多张表中插入多张表, 自动建表,指定列 -->
<!-- TODO:修改子表标签值 alter table ${tablename} set tag tagName=newTagValue -->
<!-- TODO: -->
</mapper>
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
@Repository
public interface SuperTableMapper {
// 创建超级表,使用自己定义的SQL语句
int createSuperTableUsingSQL(@Param("createSuperTableSQL") String sql);
// 创建超级表 create table if not exists xxx.xxx (f1 type1, f2 type2, ... ) tags( t1 type1, t2 type2 ...)
int createSuperTable(SuperTableMeta tableMetadata);
// 删除超级表 drop table if exists xxx;
int dropSuperTable(@Param("database") String database, @Param("name") String name);
//<!-- TODO:查询所有超级表信息 show stables -->
//<!-- TODO:查询表结构 describe stable -->
//<!-- TODO:增加列 alter table ${tablename} add column fieldName dataType -->
//<!-- TODO:删除列 alter table ${tablename} drop column fieldName -->
//<!-- TODO:添加标签 alter table ${tablename} add tag new_tagName tag_type -->
//<!-- TODO:删除标签 alter table ${tablename} drop tag_name -->
//<!-- TODO:修改标签名 alter table ${tablename} change tag old_tagName new_tagName -->
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.taosdata.taosdemo.mapper.SuperTableMapper">
<update id="createSuperTableUsingSQL">
${createSuperTableSQL}
</update>
<!-- 创建超级表 -->
<update id="createSuperTable">
create table if not exists ${database}.${name}
<foreach collection="fields" item="field" index="index" open="(" close=")" separator=",">
${field.name} ${field.type}
</foreach>
tags
<foreach collection="tags" item="tag" index="index" open="(" close=")" separator=",">
${tag.name} ${tag.type}
</foreach>
</update>
<!-- 删除超级表:drop super table -->
<delete id="dropSuperTable">
drop table if exists ${database}.${name}
</delete>
<!-- TODO:查询所有超级表信息 show stables -->
<!-- TODO:查询表结构 describe stable -->
<!-- TODO:增加列 alter table ${tablename} add column fieldName dataType -->
<!-- TODO:删除列 alter table ${tablename} drop column fieldName -->
<!-- TODO:添加标签 alter table ${tablename} add tag new_tagName tag_type -->
<!-- TODO:删除标签 alter table ${tablename} drop tag_name -->
<!-- TODO:修改标签名 alter table ${tablename} change tag old_tagName new_tagName -->
</mapper>
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.TableMeta;
import com.taosdata.taosdemo.domain.TableValue;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface TableMapper {
// 创建:普通表
int create(TableMeta tableMeta);
// 插入:一张表多个value
int insertOneTableMultiValues(TableValue values);
// 插入: 一张表多个value,指定的列
int insertOneTableMultiValuesWithColumns(TableValue values);
// 插入:多个表多个value
int insertMultiTableMultiValues(@Param("tables") List<TableValue> tables);
// 插入:多个表多个value, 指定的列
int insertMultiTableMultiValuesWithColumns(@Param("tables") List<TableValue> tables);
}
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.taosdata.taosdemo.mapper.TableMapper">
<!-- 创建普通表 -->
<update id="create" parameterType="com.taosdata.taosdemo.domain.TableMeta">
create table if not exists ${database}.${name}
<foreach collection="fields" item="field" index="index" open="(" close=")" separator=",">
${field.name} ${field.type}
</foreach>
</update>
<!-- 插入:向一张普通表中插入多条数据 -->
<insert id="insertOneTableMultiValues" parameterType="com.taosdata.taosdemo.domain.TableValue">
insert into ${database}.${name} values
<foreach collection="values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
${field.value}
</foreach>
</foreach>
</insert>
<!-- 向一张表中插入指定列的数据 insert into XXX.xx (f1,f2,f3...) values(v1,v2,v3...) -->
<insert id="insertOneTableMultiValuesWithColumns" parameterType="com.taosdata.taosdemo.domain.TableValue">
insert into ${database}.${name}
<foreach collection="columns" item="column" open="(" close=")" separator=",">
${column.name}
</foreach>
values
<foreach collection="values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
${field.value}
</foreach>
</foreach>
</insert>
<!-- 向多个表中插入多条数据 -->
<insert id="insertMultiTableMultiValues">
insert into
<foreach collection="tables" item="table">
${table.database}.${table.name} values
<foreach collection="table.values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
${field.value}
</foreach>
</foreach>
</foreach>
</insert>
<!-- 向多张表中指定的列插入多条数据 -->
<insert id="insertMultiTableMultiValuesWithColumns">
insert into
<foreach collection="tables" item="table">
${table.database}.${table.name}
<foreach collection="table.columns" item="column" open="(" close=")" separator=",">
${column.name}
</foreach>
values
<foreach collection="table.values" item="value">
<foreach collection="value.fields" item="field" open="(" close=")" separator=",">
${field.value}
</foreach>
</foreach>
</foreach>
</insert>
</mapper>
\ No newline at end of file
package com.taosdata.taosdemo.service;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
public class AbstractService {
protected int getAffectRows(List<Future<Integer>> futureList) {
int count = 0;
for (Future<Integer> future : futureList) {
try {
count += future.get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
return count;
}
protected int getAffectRows(Future<Integer> future) {
int count = 0;
try {
count += future.get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
return count;
}
}
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.mapper.DatabaseMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Map;
@Service
public class DatabaseService {
@Autowired
private DatabaseMapper databaseMapper;
// 建库,指定 name
public int createDatabase(String database) {
return databaseMapper.createDatabase(database);
}
// 建库,指定参数 keep,days,replica等
public int createDatabase(Map<String, String> map) {
if (map.isEmpty())
return 0;
if (map.containsKey("database") && map.size() == 1)
return databaseMapper.createDatabase(map.get("database"));
return databaseMapper.createDatabaseWithParameters(map);
}
// drop database
public int dropDatabase(String dbname) {
return databaseMapper.dropDatabase(dbname);
}
// use database
public int useDatabase(String dbname) {
return databaseMapper.useDatabase(dbname);
}
}
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.SubTableValue;
import com.taosdata.taosdemo.mapper.SubTableMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
@Service
public class SubTableService extends AbstractService {
@Autowired
private SubTableMapper mapper;
/**
* 1. 选择database,找到所有supertable
* 2. 选择supertable,可以拿到表结构,包括field和tag
* 3. 指定子表的前缀和个数
* 4. 指定创建子表的线程数
*/
//TODO:指定database、supertable、子表前缀、子表个数、线程数
// 多线程创建表,指定线程个数
public int createSubTable(List<SubTableMeta> subTables, int threadSize) {
ExecutorService executor = Executors.newFixedThreadPool(threadSize);
List<Future<Integer>> futureList = new ArrayList<>();
for (SubTableMeta subTableMeta : subTables) {
Future<Integer> future = executor.submit(() -> createSubTable(subTableMeta));
futureList.add(future);
}
executor.shutdown();
return getAffectRows(futureList);
}
// 创建一张子表,可以指定database,supertable,tablename,tag值
public int createSubTable(SubTableMeta subTableMeta) {
return mapper.createUsingSuperTable(subTableMeta);
}
// 单线程创建多张子表,每张子表分别可以指定自己的database,supertable,tablename,tag值
public int createSubTable(List<SubTableMeta> subTables) {
return createSubTable(subTables, 1);
}
/*************************************************************************************************************************/
// 插入:多线程,多表
public int insert(List<SubTableValue> subTableValues, int threadSize) {
ExecutorService executor = Executors.newFixedThreadPool(threadSize);
Future<Integer> future = executor.submit(() -> insert(subTableValues));
executor.shutdown();
return getAffectRows(future);
}
// 插入:多线程,多表, 自动建表
public int insertAutoCreateTable(List<SubTableValue> subTableValues, int threadSize) {
ExecutorService executor = Executors.newFixedThreadPool(threadSize);
Future<Integer> future = executor.submit(() -> insertAutoCreateTable(subTableValues));
executor.shutdown();
return getAffectRows(future);
}
// 插入:单表,insert into xxx values(),()...
public int insert(SubTableValue subTableValue) {
return mapper.insertOneTableMultiValues(subTableValue);
}
// 插入: 多表,insert into xxx values(),()... xxx values(),()...
public int insert(List<SubTableValue> subTableValues) {
return mapper.insertMultiTableMultiValuesUsingSuperTable(subTableValues);
}
// 插入:单表,自动建表, insert into xxx using xxx tags(...) values(),()...
public int insertAutoCreateTable(SubTableValue subTableValue) {
return mapper.insertOneTableMultiValuesUsingSuperTable(subTableValue);
}
// 插入:多表,自动建表, insert into xxx using XXX tags(...) values(),()... xxx using XXX tags(...) values(),()...
public int insertAutoCreateTable(List<SubTableValue> subTableValues) {
return mapper.insertMultiTableMultiValuesUsingSuperTable(subTableValues);
}
// ExecutorService executors = Executors.newFixedThreadPool(threadSize);
// int count = 0;
//
// //
// List<SubTableValue> subTableValues = new ArrayList<>();
// for (int tableIndex = 1; tableIndex <= numOfTablesPerSQL; tableIndex++) {
// // each table
// SubTableValue subTableValue = new SubTableValue();
// subTableValue.setDatabase();
// subTableValue.setName();
// subTableValue.setSupertable();
//
// List<RowValue> values = new ArrayList<>();
// for (int valueCnt = 0; valueCnt < numOfValuesPerSQL; valueCnt++) {
// List<FieldValue> fields = new ArrayList<>();
// for (int fieldInd = 0; fieldInd <; fieldInd++) {
// FieldValue<Object> field = new FieldValue<>("", "");
// fields.add(field);
// }
// RowValue row = new RowValue();
// row.setFields(fields);
// values.add(row);
// }
// subTableValue.setValues(values);
// subTableValues.add(subTableValue);
// }
}
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.mapper.SuperTableMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class SuperTableService {
@Autowired
private SuperTableMapper superTableMapper;
// 创建超级表,指定每个field的名称和类型,每个tag的名称和类型
public int create(SuperTableMeta superTableMeta) {
return superTableMapper.createSuperTable(superTableMeta);
}
public void drop(String database, String name) {
superTableMapper.dropSuperTable(database, name);
}
}
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.TableMeta;
import com.taosdata.taosdemo.mapper.TableMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
@Service
public class TableService extends AbstractService {
@Autowired
private TableMapper tableMapper;
//创建一张表
public int create(TableMeta tableMeta) {
return tableMapper.create(tableMeta);
}
//创建多张表
public int create(List<TableMeta> tables) {
return create(tables, 1);
}
//多线程创建多张表
public int create(List<TableMeta> tables, int threadSize) {
ExecutorService executors = Executors.newFixedThreadPool(threadSize);
List<Future<Integer>> futures = new ArrayList<>();
for (TableMeta table : tables) {
Future<Integer> future = executors.submit(() -> create(table));
futures.add(future);
}
return getAffectRows(futures);
}
}
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.FieldValue;
import com.taosdata.taosdemo.domain.RowValue;
import com.taosdata.taosdemo.utils.DataGenerator;
import java.util.*;
public class FieldValueGenerator {
public static Random random = new Random(System.currentTimeMillis());
// 生成start到end的时间序列,时间戳为顺序,不含有乱序,field的value为随机生成
public static List<RowValue> generate(long start, long end, long timeGap, List<FieldMeta> fieldMetaList) {
List<RowValue> values = new ArrayList<>();
for (long ts = start; ts < end; ts += timeGap) {
List<FieldValue> fieldValues = new ArrayList<>();
// timestamp
fieldValues.add(new FieldValue(fieldMetaList.get(0).getName(), ts));
// other values
for (int fieldInd = 1; fieldInd < fieldMetaList.size(); fieldInd++) {
FieldMeta fieldMeta = fieldMetaList.get(fieldInd);
fieldValues.add(new FieldValue(fieldMeta.getName(), DataGenerator.randomValue(fieldMeta.getType())));
}
values.add(new RowValue(fieldValues));
}
return values;
}
// 生成start到end的时间序列,时间戳为顺序,含有乱序,rate为乱序的比例,range为乱序前跳范围,field的value为随机生成
public static List<RowValue> disrupt(List<RowValue> values, int rate, long range) {
long timeGap = (long) (values.get(1).getFields().get(0).getValue()) - (long) (values.get(0).getFields().get(0).getValue());
int bugSize = values.size() * rate / 100;
Set<Integer> bugIndSet = new HashSet<>();
while (bugIndSet.size() < bugSize) {
bugIndSet.add(random.nextInt(values.size()));
}
for (Integer bugInd : bugIndSet) {
Long timestamp = (Long) values.get(bugInd).getFields().get(0).getValue();
Long newTimestamp = timestamp - timeGap - random.nextInt((int) range);
values.get(bugInd).getFields().get(0).setValue(newTimestamp);
}
return values;
}
}
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagValue;
import java.util.ArrayList;
import java.util.List;
public class SubTableMetaGenerator {
// 创建tableSize张子表,使用tablePrefix作为子表名的前缀,使用superTableMeta的元数据
// create table xxx using XXX tags(XXX)
public static List<SubTableMeta> generate(SuperTableMeta superTableMeta, int tableSize, String tablePrefix) {
List<SubTableMeta> subTableMetaList = new ArrayList<>();
for (int i = 1; i <= tableSize; i++) {
SubTableMeta subTableMeta = new SubTableMeta();
// create table xxx.xxx using xxx tags(...)
subTableMeta.setDatabase(superTableMeta.getDatabase());
subTableMeta.setName(tablePrefix + i);
subTableMeta.setSupertable(superTableMeta.getName());
subTableMeta.setFields(superTableMeta.getFields());
List<TagValue> tagValues = TagValueGenerator.generate(superTableMeta.getTags());
subTableMeta.setTags(tagValues);
subTableMetaList.add(subTableMeta);
}
return subTableMetaList;
}
}
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.RowValue;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.SubTableValue;
import com.taosdata.taosdemo.utils.TimeStampUtil;
import org.springframework.beans.BeanUtils;
import java.util.ArrayList;
import java.util.List;
public class SubTableValueGenerator {
public static List<SubTableValue> generate(List<SubTableMeta> subTableMetaList, int numOfRowsPerTable, long start, long timeGap) {
List<SubTableValue> subTableValueList = new ArrayList<>();
subTableMetaList.stream().forEach((subTableMeta) -> {
// insert into xxx.xxx using xxxx tags(...) values(),()...
SubTableValue subTableValue = new SubTableValue();
subTableValue.setDatabase(subTableMeta.getDatabase());
subTableValue.setName(subTableMeta.getName());
subTableValue.setSupertable(subTableMeta.getSupertable());
subTableValue.setTags(subTableMeta.getTags());
TimeStampUtil.TimeTuple tuple = TimeStampUtil.range(start, timeGap, numOfRowsPerTable);
List<RowValue> values = FieldValueGenerator.generate(tuple.start, tuple.end, tuple.timeGap, subTableMeta.getFields());
subTableValue.setValues(values);
subTableValueList.add(subTableValue);
});
return subTableValueList;
}
public static void disrupt(List<SubTableValue> subTableValueList, int rate, long range) {
subTableValueList.stream().forEach((tableValue) -> {
List<RowValue> values = tableValue.getValues();
FieldValueGenerator.disrupt(values, rate, range);
});
}
public static List<List<SubTableValue>> split(List<SubTableValue> subTableValueList, int numOfTables, int numOfTablesPerSQL, int numOfRowsPerTable, int numOfValuesPerSQL) {
List<List<SubTableValue>> dataList = new ArrayList<>();
if (numOfRowsPerTable < numOfValuesPerSQL)
numOfValuesPerSQL = numOfRowsPerTable;
if (numOfTables < numOfTablesPerSQL)
numOfTablesPerSQL = numOfTables;
//table
for (int tableCnt = 0; tableCnt < numOfTables; ) {
int tableSize = numOfTablesPerSQL;
if (tableCnt + tableSize > numOfTables) {
tableSize = numOfTables - tableCnt;
}
// row
for (int rowCnt = 0; rowCnt < numOfRowsPerTable; ) {
int rowSize = numOfValuesPerSQL;
if (rowCnt + rowSize > numOfRowsPerTable) {
rowSize = numOfRowsPerTable - rowCnt;
}
// System.out.println("rowCnt: " + rowCnt + ", rowSize: " + rowSize + ", tableCnt: " + tableCnt + ", tableSize: " + tableSize);
// split
List<SubTableValue> blocks = subTableValueList.subList(tableCnt, tableCnt + tableSize);
List<SubTableValue> newBlocks = new ArrayList<>();
for (int i = 0; i < blocks.size(); i++) {
SubTableValue subTableValue = blocks.get(i);
SubTableValue newSubTableValue = new SubTableValue();
BeanUtils.copyProperties(subTableValue, newSubTableValue);
List<RowValue> values = subTableValue.getValues().subList(rowCnt, rowCnt + rowSize);
newSubTableValue.setValues(values);
newBlocks.add(newSubTableValue);
}
dataList.add(newBlocks);
rowCnt += rowSize;
}
tableCnt += tableSize;
}
return dataList;
}
public static void main(String[] args) {
split(null, 99, 10, 99, 10);
}
}
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagMeta;
import com.taosdata.taosdemo.utils.TaosConstants;
import java.util.ArrayList;
import java.util.List;
public class SuperTableMetaGenerator {
// 创建超级表,使用指定SQL语句
public static SuperTableMeta generate(String superTableSQL) {
SuperTableMeta tableMeta = new SuperTableMeta();
// for example : create table superTable (ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)
superTableSQL = superTableSQL.trim().toLowerCase();
if (!superTableSQL.startsWith("create"))
throw new RuntimeException("invalid create super table SQL");
if (superTableSQL.contains("tags")) {
String tagSQL = superTableSQL.substring(superTableSQL.indexOf("tags") + 4).trim();
tagSQL = tagSQL.substring(tagSQL.indexOf("(") + 1, tagSQL.lastIndexOf(")"));
String[] tagPairs = tagSQL.split(",");
List<TagMeta> tagMetaList = new ArrayList<>();
for (String tagPair : tagPairs) {
String name = tagPair.trim().split("\\s+")[0];
String type = tagPair.trim().split("\\s+")[1];
tagMetaList.add(new TagMeta(name, type));
}
tableMeta.setTags(tagMetaList);
superTableSQL = superTableSQL.substring(0, superTableSQL.indexOf("tags"));
}
if (superTableSQL.contains("(")) {
String fieldSQL = superTableSQL.substring(superTableSQL.indexOf("(") + 1, superTableSQL.indexOf(")"));
String[] fieldPairs = fieldSQL.split(",");
List<FieldMeta> fieldList = new ArrayList<>();
for (String fieldPair : fieldPairs) {
String name = fieldPair.trim().split("\\s+")[0];
String type = fieldPair.trim().split("\\s+")[1];
fieldList.add(new FieldMeta(name, type));
}
tableMeta.setFields(fieldList);
superTableSQL = superTableSQL.substring(0, superTableSQL.indexOf("("));
}
superTableSQL = superTableSQL.substring(superTableSQL.indexOf("table") + 5).trim();
if (superTableSQL.contains(".")) {
String database = superTableSQL.split("\\.")[0];
tableMeta.setDatabase(database);
superTableSQL = superTableSQL.substring(superTableSQL.indexOf(".") + 1);
}
tableMeta.setName(superTableSQL.trim());
return tableMeta;
}
// 创建超级表,指定field和tag的个数
public static SuperTableMeta generate(String database, String name, int fieldSize, String fieldPrefix, int tagSize, String tagPrefix) {
if (fieldSize < 2 || tagSize < 1) {
throw new RuntimeException("create super table but fieldSize less than 2 or tagSize less than 1");
}
SuperTableMeta tableMetadata = new SuperTableMeta();
tableMetadata.setDatabase(database);
tableMetadata.setName(name);
// fields
List<FieldMeta> fields = new ArrayList<>();
fields.add(new FieldMeta("ts", "timestamp"));
for (int i = 1; i <= fieldSize; i++) {
fields.add(new FieldMeta(fieldPrefix + "" + i, TaosConstants.DATA_TYPES[i % TaosConstants.DATA_TYPES.length]));
}
tableMetadata.setFields(fields);
// tags
List<TagMeta> tags = new ArrayList<>();
for (int i = 1; i <= tagSize; i++) {
tags.add(new TagMeta(tagPrefix + "" + i, TaosConstants.DATA_TYPES[i % TaosConstants.DATA_TYPES.length]));
}
tableMetadata.setTags(tags);
return tableMetadata;
}
}
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.TagMeta;
import com.taosdata.taosdemo.domain.TagValue;
import com.taosdata.taosdemo.utils.DataGenerator;
import java.util.ArrayList;
import java.util.List;
public class TagValueGenerator {
// 创建标签值:使用tagMetas
public static List<TagValue> generate(List<TagMeta> tagMetas) {
List<TagValue> tagValues = new ArrayList<>();
for (int i = 0; i < tagMetas.size(); i++) {
TagMeta tagMeta = tagMetas.get(i);
TagValue tagValue = new TagValue();
tagValue.setName(tagMeta.getName());
tagValue.setValue(DataGenerator.randomValue(tagMeta.getType()));
tagValues.add(tagValue);
}
return tagValues;
}
}
package com.taosdata.taosdemo.utils;
import java.util.Random;
public class DataGenerator {
private static Random random = new Random(System.currentTimeMillis());
private static final String alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
// "timestamp", "int", "bigint", "float", "double", "binary(64)", "smallint", "tinyint", "bool", "nchar(64)",
public static Object randomValue(String type) {
int length = 64;
if (type.contains("(")) {
length = Integer.parseInt(type.substring(type.indexOf("(") + 1, type.indexOf(")")));
type = type.substring(0, type.indexOf("("));
}
switch (type.trim().toLowerCase()) {
case "timestamp":
return randomTimestamp();
case "int":
return randomInt();
case "bigint":
return randomBigint();
case "float":
return randomFloat();
case "double":
return randomDouble();
case "binary":
return randomBinary(length);
case "smallint":
return randomSmallint();
case "tinyint":
return randomTinyint();
case "bool":
return randomBoolean();
case "nchar":
return randomNchar(length);
default:
throw new IllegalArgumentException("Unexpected value: " + type);
}
}
public static Long randomTimestamp() {
long start = System.currentTimeMillis();
return randomTimestamp(start, start + 60l * 60l * 1000l);
}
public static Long randomTimestamp(Long start, Long end) {
return start + (long) random.nextInt((int) (end - start));
}
public static String randomNchar(int length) {
return randomChinese(length);
}
public static Boolean randomBoolean() {
return random.nextBoolean();
}
public static Integer randomTinyint() {
return randomInt(-127, 127);
}
public static Integer randomSmallint() {
return randomInt(-32767, 32767);
}
public static String randomBinary(int length) {
return randomString(length);
}
public static String randomString(int length) {
String zh_en = "";
for (int i = 0; i < length; i++) {
zh_en += alphabet.charAt(random.nextInt(alphabet.length()));
}
return zh_en;
}
public static String randomChinese(int length) {
String zh_cn = "";
int bottom = Integer.parseInt("4e00", 16);
int top = Integer.parseInt("9fa5", 16);
for (int i = 0; i < length; i++) {
char c = (char) (random.nextInt(top - bottom + 1) + bottom);
zh_cn += new String(new char[]{c});
}
return zh_cn;
}
public static Double randomDouble() {
return randomDouble(0, 100);
}
public static Double randomDouble(double bottom, double top) {
return bottom + (top - bottom) * random.nextDouble();
}
public static Float randomFloat() {
return randomFloat(0, 100);
}
public static Float randomFloat(float bottom, float top) {
return bottom + (top - bottom) * random.nextFloat();
}
public static Long randomBigint() {
return random.nextLong();
}
public static Integer randomInt(int bottom, int top) {
return bottom + random.nextInt((top - bottom));
}
public static Integer randomInt() {
return randomInt(0, 100);
}
}
package com.taosdata.taosdemo.utils;
public final class JdbcTaosdemoConfig {
// instance
public String host; //host
public int port = 6030; //port
public String user = "root"; //user
public String password = "taosdata"; //password
// database
public String database = "test"; //database
public int keep = 3650; //keep
public int days = 30; //days
public int replica = 1; //replica
//super table
public boolean doCreateTable = true;
public String superTable = "weather"; //super table name
public String prefixOfFields = "col";
public int numOfFields;
public String prefixOfTags = "tag";
public int numOfTags;
public String superTableSQL;
//sub table
public String tablePrefix = "t";
public int numOfTables = 100;
public int numOfThreadsForCreate = 1;
// insert task
public boolean autoCreateTable;
public int numOfRowsPerTable = 100;
public int numOfThreadsForInsert = 1;
public int numOfTablesPerSQL = 10;
public int numOfValuesPerSQL = 10;
public long startTime;
public long timeGap;
public int sleep = 0;
public int order = 0;
public int rate = 10;
public long range = 1000l;
// select task
// drop task
public boolean dropTable = false;
public static void printHelp() {
System.out.println("Usage: java -jar jdbc-taosdemo-2.0.jar [OPTION...]");
// instance
System.out.println("-host The host to connect to TDengine which you must specify");
System.out.println("-port The TCP/IP port number to use for the connection. Default is 6030");
System.out.println("-user The TDengine user name to use when connecting to the server. Default is 'root'");
System.out.println("-password The password to use when connecting to the server.Default is 'taosdata'");
// database
System.out.println("-database Destination database. Default is 'test'");
System.out.println("-keep database keep parameter. Default is 3650");
System.out.println("-days database days parameter. Default is 30");
System.out.println("-replica database replica parameter. Default 1, min: 1, max: 3");
// super table
System.out.println("-doCreateTable do create super table and sub table, true or false, Default true");
System.out.println("-superTable super table name. Default 'weather'");
System.out.println("-prefixOfFields The prefix of field in super table. Default is 'col'");
System.out.println("-numOfFields The number of field in super table. Default is (ts timestamp, temperature float, humidity int).");
System.out.println("-prefixOfTags The prefix of tag in super table. Default is 'tag'");
System.out.println("-numOfTags The number of tag in super table. Default is (location nchar(64), groupId int).");
System.out.println("-superTableSQL specify a sql statement for the super table.\n" +
" Default is 'create table weather(ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int). \n" +
" if you use this parameter, the numOfFields and numOfTags will be invalid'");
// sub table
System.out.println("-tablePrefix The prefix of sub tables. Default is 't'");
System.out.println("-numOfTables The number of tables. Default is 1");
System.out.println("-numOfThreadsForCreate The number of thread during create sub table. Default is 1");
// insert task
System.out.println("-autoCreateTable Use auto Create sub tables SQL. Default is false");
System.out.println("-numOfRowsPerTable The number of records per table. Default is 1");
System.out.println("-numOfThreadsForInsert The number of threads during insert row. Default is 1");
System.out.println("-numOfTablesPerSQL The number of table per SQL. Default is 1");
System.out.println("-numOfValuesPerSQL The number of value per SQL. Default is 1");
System.out.println("-startTime start time for insert task, The format is \"yyyy-MM-dd HH:mm:ss.SSS\".");
System.out.println("-timeGap the number of time gap. Default is 1000 ms");
System.out.println("-sleep The number of milliseconds for sleep after each insert. default is 0");
System.out.println("-order Insert mode--0: In order, 1: Out of order. Default is in order");
System.out.println("-rate The proportion of data out of order. effective only if order is 1. min 0, max 100, default is 10");
System.out.println("-range The range of data out of order. effective only if order is 1. default is 1000 ms");
// query task
// System.out.println("-sqlFile The select sql file");
// drop task
System.out.println("-dropTable Drop data before quit. Default is false");
System.out.println("--help Give this help list");
}
/**
* parse args from command line
*
* @param args command line args
* @return JdbcTaosdemoConfig
*/
public JdbcTaosdemoConfig(String[] args) {
for (int i = 0; i < args.length; i++) {
// instance
if ("-host".equals(args[i]) && i < args.length - 1) {
host = args[++i];
}
if ("-port".equals(args[i]) && i < args.length - 1) {
port = Integer.parseInt(args[++i]);
}
if ("-user".equals(args[i]) && i < args.length - 1) {
user = args[++i];
}
if ("-password".equals(args[i]) && i < args.length - 1) {
password = args[++i];
}
// database
if ("-database".equals(args[i]) && i < args.length - 1) {
database = args[++i];
}
if ("-keep".equals(args[i]) && i < args.length - 1) {
keep = Integer.parseInt(args[++i]);
}
if ("-days".equals(args[i]) && i < args.length - 1) {
days = Integer.parseInt(args[++i]);
}
if ("-replica".equals(args[i]) && i < args.length - 1) {
replica = Integer.parseInt(args[++i]);
}
// super table
if ("-doCreateTable".equals(args[i]) && i < args.length - 1) {
doCreateTable = Boolean.parseBoolean(args[++i]);
}
if ("-superTable".equals(args[i]) && i < args.length - 1) {
superTable = args[++i];
}
if ("-prefixOfFields".equals(args[i]) && i < args.length - 1) {
prefixOfFields = args[++i];
}
if ("-numOfFields".equals(args[i]) && i < args.length - 1) {
numOfFields = Integer.parseInt(args[++i]);
}
if ("-prefixOfTags".equals(args[i]) && i < args.length - 1) {
prefixOfTags = args[++i];
}
if ("-numOfTags".equals(args[i]) && i < args.length - 1) {
numOfTags = Integer.parseInt(args[++i]);
}
if ("-superTableSQL".equals(args[i]) && i < args.length - 1) {
superTableSQL = args[++i];
}
// sub table
if ("-tablePrefix".equals(args[i]) && i < args.length - 1) {
tablePrefix = args[++i];
}
if ("-numOfTables".equals(args[i]) && i < args.length - 1) {
numOfTables = Integer.parseInt(args[++i]);
}
if ("-autoCreateTable".equals(args[i]) && i < args.length - 1) {
autoCreateTable = Boolean.parseBoolean(args[++i]);
}
if ("-numOfThreadsForCreate".equals(args[i]) && i < args.length - 1) {
numOfThreadsForCreate = Integer.parseInt(args[++i]);
}
// insert task
if ("-numOfRowsPerTable".equals(args[i]) && i < args.length - 1) {
numOfRowsPerTable = Integer.parseInt(args[++i]);
}
if ("-numOfThreadsForInsert".equals(args[i]) && i < args.length - 1) {
numOfThreadsForInsert = Integer.parseInt(args[++i]);
}
if ("-numOfTablesPerSQL".equals(args[i]) && i < args.length - 1) {
numOfTablesPerSQL = Integer.parseInt(args[++i]);
}
if ("-numOfValuesPerSQL".equals(args[i]) && i < args.length - 1) {
numOfValuesPerSQL = Integer.parseInt(args[++i]);
}
if ("-startTime".equals(args[i]) && i < args.length - 1) {
startTime = TimeStampUtil.datetimeToLong(args[++i]);
}
if ("-timeGap".equals(args[i]) && i < args.length - 1) {
timeGap = Long.parseLong(args[++i]);
}
if ("-sleep".equals(args[i]) && i < args.length - 1) {
sleep = Integer.parseInt(args[++i]);
}
if ("-order".equals(args[i]) && i < args.length - 1) {
order = Integer.parseInt(args[++i]);
}
if ("-rate".equals(args[i]) && i < args.length - 1) {
rate = Integer.parseInt(args[++i]);
if (rate < 0 || rate > 100)
throw new IllegalArgumentException("rate must between 0 and 100");
}
if ("-range".equals(args[i]) && i < args.length - 1) {
range = Integer.parseInt(args[++i]);
}
// select task
// drop task
if ("-dropTable".equals(args[i]) && i < args.length - 1) {
dropTable = Boolean.parseBoolean(args[++i]);
}
}
}
public static void main(String[] args) {
JdbcTaosdemoConfig config = new JdbcTaosdemoConfig(args);
}
}
package com.taosdata.taosdemo.utils;
public class TaosConstants {
public static final String[] DATA_TYPES = {
"timestamp", "int", "bigint", "float", "double",
"binary(64)", "smallint", "tinyint", "bool", "nchar(64)",
};
}
package com.taosdata.taosdemo.utils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class TimeStampUtil {
private static final String datetimeFormat = "yyyy-MM-dd HH:mm:ss.SSS";
public static long datetimeToLong(String dateTime) {
SimpleDateFormat sdf = new SimpleDateFormat(datetimeFormat);
try {
return sdf.parse(dateTime).getTime();
} catch (ParseException e) {
throw new IllegalArgumentException("invalid datetime string >>> " + dateTime);
}
}
public static String longToDatetime(long time) {
SimpleDateFormat sdf = new SimpleDateFormat(datetimeFormat);
return sdf.format(new Date(time));
}
public static class TimeTuple {
public Long start;
public Long end;
public Long timeGap;
TimeTuple(long start, long end, long timeGap) {
this.start = start;
this.end = end;
this.timeGap = timeGap;
}
}
public static TimeTuple range(long start, long timeGap, long size) {
long now = System.currentTimeMillis();
if (timeGap < 1)
timeGap = 1;
if (start == 0)
start = now - size * timeGap;
// 如果size小于1异常
if (size < 1)
throw new IllegalArgumentException("size less than 1.");
// 如果timeGap为1,已经超长,需要前移start
if (start + size > now) {
start = now - size;
return new TimeTuple(start, now, 1);
}
long end = start + (long) (timeGap * size);
if (end > now) {
//压缩timeGap
end = now;
double gap = (end - start) / (size * 1.0f);
if (gap < 1.0f) {
timeGap = 1;
start = end - size;
} else {
timeGap = (long) gap;
end = start + (long) (timeGap * size);
}
}
return new TimeTuple(start, end, timeGap);
}
}
#spring.datasource.url=jdbc:mysql://master:3306/?useSSL=false&useUnicode=true&characterEncoding=UTF-8
#spring.datasource.driver-class-name=com.mysql.jdbc.Driver
#spring.datasource.username=root
#spring.datasource.password=123456
spring.datasource.url=jdbc:TAOS://master:6030/?charset=UTF-8&locale=en_US.UTF-8&timezone=UTC-8
spring.datasource.driver-class-name=com.taosdata.jdbc.TSDBDriver
spring.datasource.username=root
spring.datasource.password=taosdata
spring.datasource.hikari.maximum-pool-size=10
spring.datasource.hikari.minimum-idle=10
spring.datasource.hikari.max-lifetime=600000
logging.level.com.taosdata.taosdemo.mapper=debug
\ No newline at end of file
### 设置###
log4j.rootLogger=debug,stdout,DebugLog,ErrorLog
### 输出信息到控制抬 ###
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n
### 输出DEBUG 级别以上的日志到=logs/debug.log
log4j.appender.DebugLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DebugLog.File=logs/debug.log
log4j.appender.DebugLog.Append=true
log4j.appender.DebugLog.Threshold=DEBUG
log4j.appender.DebugLog.layout=org.apache.log4j.PatternLayout
log4j.appender.DebugLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
### 输出ERROR 级别以上的日志到=logs/error.log
log4j.appender.ErrorLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.ErrorLog.File=logs/error.log
log4j.appender.ErrorLog.Append=true
log4j.appender.ErrorLog.Threshold=ERROR
log4j.appender.ErrorLog.layout=org.apache.log4j.PatternLayout
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
\ No newline at end of file
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Index</title>
</head>
<body>
<h1>Hello~~~</h1>
</body>
</html>
\ No newline at end of file
package com.taosdata.taosdemo;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class TaosdemoApplicationTests {
@Test
void contextLoads() {
}
}
package com.taosdata.taosdemo.mapper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.HashMap;
import java.util.Map;
@RunWith(SpringRunner.class)
@SpringBootTest
public class DatabaseMapperTest {
@Autowired
private DatabaseMapper databaseMapper;
@Test
public void createDatabase() {
databaseMapper.createDatabase("db_test");
}
@Test
public void dropDatabase() {
databaseMapper.dropDatabase("db_test");
}
@Test
public void creatDatabaseWithParameters() {
Map<String, String> map = new HashMap<>();
map.put("dbname", "weather");
map.put("keep", "3650");
map.put("days", "30");
map.put("replica", "1");
databaseMapper.createDatabaseWithParameters(map);
}
@Test
public void useDatabase() {
databaseMapper.useDatabase("test");
}
}
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SubTableMapperTest {
@Autowired
private SubTableMapper subTableMapper;
private List<SubTableValue> tables;
@Test
public void createUsingSuperTable() {
SubTableMeta subTableMeta = new SubTableMeta();
subTableMeta.setDatabase("test");
subTableMeta.setSupertable("weather");
subTableMeta.setName("t1");
List<TagValue> tags = new ArrayList<>();
for (int i = 0; i < 3; i++) {
tags.add(new TagValue("tag" + (i + 1), "nchar(64)"));
}
subTableMeta.setTags(tags);
subTableMapper.createUsingSuperTable(subTableMeta);
}
@Test
public void insertOneTableMultiValues() {
subTableMapper.insertOneTableMultiValues(tables.get(0));
}
@Test
public void insertOneTableMultiValuesUsingSuperTable() {
subTableMapper.insertOneTableMultiValuesUsingSuperTable(tables.get(0));
}
@Test
public void insertMultiTableMultiValues() {
subTableMapper.insertMultiTableMultiValues(tables);
}
@Test
public void insertMultiTableMultiValuesUsingSuperTable() {
subTableMapper.insertMultiTableMultiValuesUsingSuperTable(tables);
}
@Before
public void before() {
tables = new ArrayList<>();
for (int ind = 0; ind < 3; ind++) {
SubTableValue table = new SubTableValue();
table.setDatabase("test");
// supertable
table.setSupertable("weather");
table.setName("t" + (ind + 1));
// tags
List<TagValue> tags = new ArrayList<>();
for (int i = 0; i < 3; i++) {
tags.add(new TagValue("tag" + (i + 1), "beijing"));
}
table.setTags(tags);
// values
List<RowValue> values = new ArrayList<>();
for (int i = 0; i < 2; i++) {
List<FieldValue> fields = new ArrayList<>();
for (int j = 0; j < 4; j++) {
fields.add(new FieldValue("f" + (j + 1), (j + 1) * 10));
}
values.add(new RowValue(fields));
}
table.setValues(values);
tables.add(table);
}
}
}
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagMeta;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SuperTableMapperTest {
@Autowired
private SuperTableMapper superTableMapper;
@Test
public void testCreateSuperTableUsingSQL() {
String sql = "create table test.weather (ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)";
superTableMapper.createSuperTableUsingSQL(sql);
}
@Test
public void createSuperTable() {
SuperTableMeta superTableMeta = new SuperTableMeta();
superTableMeta.setDatabase("test");
superTableMeta.setName("weather");
List<FieldMeta> fields = new ArrayList<>();
for (int i = 0; i < 5; i++) {
fields.add(new FieldMeta("f" + (i + 1), "int"));
}
superTableMeta.setFields(fields);
List<TagMeta> tags = new ArrayList<>();
for (int i = 0; i < 3; i++) {
tags.add(new TagMeta("t" + (i + 1), "nchar(64)"));
}
superTableMeta.setTags(tags);
superTableMapper.createSuperTable(superTableMeta);
}
@Test
public void dropSuperTable() {
superTableMapper.dropSuperTable("test", "weather");
}
}
\ No newline at end of file
package com.taosdata.taosdemo.mapper;
import com.taosdata.taosdemo.domain.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@SpringBootTest
@RunWith(SpringRunner.class)
public class TableMapperTest {
@Autowired
private TableMapper tableMapper;
private static Random random = new Random(System.currentTimeMillis());
@Test
public void create() {
TableMeta table = new TableMeta();
table.setDatabase("test");
table.setName("t1");
List<FieldMeta> fields = new ArrayList<>();
for (int i = 0; i < 3; i++) {
FieldMeta field = new FieldMeta();
field.setName("f" + (i + 1));
field.setType("nchar(64)");
fields.add(field);
}
table.setFields(fields);
tableMapper.create(table);
}
@Test
public void insertOneTableMultiValues() {
TableValue table = new TableValue();
table.setDatabase("test");
table.setName("t1");
List<RowValue> values = new ArrayList<>();
for (int j = 0; j < 5; j++) {
List<FieldValue> fields = new ArrayList<>();
for (int k = 0; k < 2; k++) {
FieldValue field = new FieldValue<>();
field.setValue((k + 1) * 100);
fields.add(field);
}
values.add(new RowValue(fields));
}
table.setValues(values);
tableMapper.insertOneTableMultiValues(table);
}
@Test
public void insertOneTableMultiValuesWithCoulmns() {
TableValue tableValue = new TableValue();
tableValue.setDatabase("test");
tableValue.setName("weather");
// columns
List<FieldMeta> columns = new ArrayList<>();
for (int i = 0; i < 3; i++) {
FieldMeta field = new FieldMeta();
field.setName("f" + (i + 1));
columns.add(field);
}
tableValue.setColumns(columns);
// values
List<RowValue> values = new ArrayList<>();
for (int i = 0; i < 3; i++) {
List<FieldValue> fields = new ArrayList<>();
for (int j = 0; j < 3; j++) {
FieldValue field = new FieldValue();
field.setValue(j);
fields.add(field);
}
values.add(new RowValue(fields));
}
tableValue.setValues(values);
tableMapper.insertOneTableMultiValuesWithColumns(tableValue);
}
@Test
public void insertMultiTableMultiValues() {
List<TableValue> tables = new ArrayList<>();
for (int i = 0; i < 3; i++) {
TableValue table = new TableValue();
table.setDatabase("test");
table.setName("t" + (i + 1));
List<RowValue> values = new ArrayList<>();
for (int j = 0; j < 5; j++) {
List<FieldValue> fields = new ArrayList<>();
for (int k = 0; k < 2; k++) {
FieldValue field = new FieldValue<>();
field.setValue((k + 1) * 10);
fields.add(field);
}
values.add(new RowValue(fields));
}
table.setValues(values);
tables.add(table);
}
tableMapper.insertMultiTableMultiValues(tables);
}
@Test
public void insertMultiTableMultiValuesWithCoulumns() {
List<TableValue> tables = new ArrayList<>();
for (int i = 0; i < 3; i++) {
TableValue table = new TableValue();
table.setDatabase("test");
table.setName("t" + (i + 1));
// columns
List<FieldMeta> columns = new ArrayList<>();
for (int j = 0; j < 3; j++) {
FieldMeta field = new FieldMeta();
field.setName("f" + (j + 1));
columns.add(field);
}
table.setColumns(columns);
// values
List<RowValue> values = new ArrayList<>();
for (int j = 0; j < 5; j++) {
List<FieldValue> fields = new ArrayList<>();
for (int k = 0; k < columns.size(); k++) {
FieldValue field = new FieldValue<>();
field.setValue((k + 1) * 10);
fields.add(field);
}
values.add(new RowValue(fields));
}
table.setValues(values);
tables.add(table);
}
tableMapper.insertMultiTableMultiValuesWithColumns(tables);
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class DatabaseServiceTest {
@Autowired
private DatabaseService service;
@Test
public void testCreateDatabase1() {
service.createDatabase("testXXXX");
}
@Test
public void dropDatabase() {
service.dropDatabase("testXXXX");
}
@Test
public void useDatabase() {
service.useDatabase("test");
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.TagValue;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SubTableServiceTest {
@Autowired
private SubTableService service;
private List<SubTableMeta> subTables;
@Before
public void before() {
subTables = new ArrayList<>();
for (int i = 1; i <= 1; i++) {
SubTableMeta subTableMeta = new SubTableMeta();
subTableMeta.setDatabase("test");
subTableMeta.setSupertable("weather");
subTableMeta.setName("t" + i);
List<TagValue> tags = new ArrayList<>();
tags.add(new TagValue("location", "beijing"));
tags.add(new TagValue("groupId", i));
subTableMeta.setTags(tags);
subTables.add(subTableMeta);
}
}
@Test
public void testCreateSubTable() {
int count = service.createSubTable(subTables);
System.out.println("count >>> " + count);
}
@Test
public void testCreateSubTableList() {
int count = service.createSubTable(subTables, 10);
System.out.println("count >>> " + count);
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagMeta;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SuperTableServiceTest {
@Autowired
private SuperTableService service;
@Test
public void testCreate() {
SuperTableMeta superTableMeta = new SuperTableMeta();
superTableMeta.setDatabase("test");
superTableMeta.setName("weather");
List<FieldMeta> fields = new ArrayList<>();
fields.add(new FieldMeta("ts", "timestamp"));
fields.add(new FieldMeta("temperature", "float"));
fields.add(new FieldMeta("humidity", "int"));
superTableMeta.setFields(fields);
List<TagMeta> tags = new ArrayList<>();
tags.add(new TagMeta("location", "nchar(64)"));
tags.add(new TagMeta("groupId", "int"));
superTableMeta.setTags(tags);
service.create(superTableMeta);
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service;
import com.taosdata.taosdemo.domain.TableMeta;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.List;
@RunWith(SpringRunner.class)
@SpringBootTest
public class TableServiceTest {
@Autowired
private TableService tableService;
private List<TableMeta> tables;
@Before
public void before() {
tables = new ArrayList<>();
for (int i = 0; i < 1; i++) {
TableMeta tableMeta = new TableMeta();
tableMeta.setDatabase("test");
tableMeta.setName("weather" + (i + 1));
tables.add(tableMeta);
}
}
@Test
public void testCreate() {
int count = tableService.create(tables);
System.out.println(count);
}
@Test
public void testCreateMultiThreads() {
System.out.println(tableService.create(tables, 10));
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.RowValue;
import com.taosdata.taosdemo.utils.TimeStampUtil;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class FieldValueGeneratorTest {
private List<RowValue> rowValues;
@Test
public void generate() {
List<FieldMeta> fieldMetas = new ArrayList<>();
fieldMetas.add(new FieldMeta("ts", "timestamp"));
fieldMetas.add(new FieldMeta("temperature", "float"));
fieldMetas.add(new FieldMeta("humidity", "int"));
long start = TimeStampUtil.datetimeToLong("2020-01-01 00:00:00.000");
long end = TimeStampUtil.datetimeToLong("2020-01-01 10:00:00.000");
rowValues = FieldValueGenerator.generate(start, end, 1000l * 3600, fieldMetas);
Assert.assertEquals(10, rowValues.size());
}
@Test
public void disrupt() {
List<FieldMeta> fieldMetas = new ArrayList<>();
fieldMetas.add(new FieldMeta("ts", "timestamp"));
fieldMetas.add(new FieldMeta("temperature", "float"));
fieldMetas.add(new FieldMeta("humidity", "int"));
long start = TimeStampUtil.datetimeToLong("2020-01-01 00:00:00.000");
long end = TimeStampUtil.datetimeToLong("2020-01-01 10:00:00.000");
rowValues = FieldValueGenerator.generate(start, end, 1000l * 3600l, fieldMetas);
FieldValueGenerator.disrupt(rowValues, 20, 1000);
Assert.assertEquals(10, rowValues.size());
}
@After
public void after() {
for (RowValue row : rowValues) {
row.getFields().stream().forEach(field -> {
if (field.getName().equals("ts")) {
System.out.print(TimeStampUtil.longToDatetime((Long) field.getValue()));
} else
System.out.print(" ," + field.getValue());
});
System.out.println();
}
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.SubTableMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagMeta;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class SubTableMetaGeneratorTest {
List<SubTableMeta> subTableMetas;
@Test
public void generate() {
SuperTableMeta superTableMeta = new SuperTableMeta();
superTableMeta.setDatabase("test");
superTableMeta.setName("weather");
List<FieldMeta> fields = new ArrayList<>();
fields.add(new FieldMeta("ts", "timestamp"));
fields.add(new FieldMeta("temperature", "float"));
fields.add(new FieldMeta("humidity", "int"));
superTableMeta.setFields(fields);
List<TagMeta> tags = new ArrayList<>();
tags.add(new TagMeta("location", "nchar(64)"));
tags.add(new TagMeta("groupId", "int"));
superTableMeta.setTags(tags);
subTableMetas = SubTableMetaGenerator.generate(superTableMeta, 10, "t");
Assert.assertEquals(10, subTableMetas.size());
Assert.assertEquals("t1", subTableMetas.get(0).getName());
Assert.assertEquals("t2", subTableMetas.get(1).getName());
Assert.assertEquals("t3", subTableMetas.get(2).getName());
Assert.assertEquals("t4", subTableMetas.get(3).getName());
Assert.assertEquals("t5", subTableMetas.get(4).getName());
Assert.assertEquals("t6", subTableMetas.get(5).getName());
Assert.assertEquals("t7", subTableMetas.get(6).getName());
Assert.assertEquals("t8", subTableMetas.get(7).getName());
Assert.assertEquals("t9", subTableMetas.get(8).getName());
Assert.assertEquals("t10", subTableMetas.get(9).getName());
}
@After
public void after() {
for (SubTableMeta subTableMeta : subTableMetas) {
System.out.println(subTableMeta);
}
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.FieldMeta;
import com.taosdata.taosdemo.domain.SuperTableMeta;
import com.taosdata.taosdemo.domain.TagMeta;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
public class SuperTableMetaGeneratorImplTest {
private SuperTableMeta meta;
@Test
public void generate() {
String sql = "create table test.weather (ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)";
meta = SuperTableMetaGenerator.generate(sql);
Assert.assertEquals("test", meta.getDatabase());
Assert.assertEquals("weather", meta.getName());
Assert.assertEquals(3, meta.getFields().size());
Assert.assertEquals("ts", meta.getFields().get(0).getName());
Assert.assertEquals("timestamp", meta.getFields().get(0).getType());
Assert.assertEquals("temperature", meta.getFields().get(1).getName());
Assert.assertEquals("float", meta.getFields().get(1).getType());
Assert.assertEquals("humidity", meta.getFields().get(2).getName());
Assert.assertEquals("int", meta.getFields().get(2).getType());
Assert.assertEquals("location", meta.getTags().get(0).getName());
Assert.assertEquals("nchar(64)", meta.getTags().get(0).getType());
Assert.assertEquals("groupid", meta.getTags().get(1).getName());
Assert.assertEquals("int", meta.getTags().get(1).getType());
}
@Test
public void generate2() {
meta = SuperTableMetaGenerator.generate("test", "weather", 10, "col", 10, "tag");
Assert.assertEquals("test", meta.getDatabase());
Assert.assertEquals("weather", meta.getName());
Assert.assertEquals(11, meta.getFields().size());
for (FieldMeta fieldMeta : meta.getFields()) {
Assert.assertNotNull(fieldMeta.getName());
Assert.assertNotNull(fieldMeta.getType());
}
for (TagMeta tagMeta : meta.getTags()) {
Assert.assertNotNull(tagMeta.getName());
Assert.assertNotNull(tagMeta.getType());
}
}
@After
public void after() {
System.out.println(meta.getDatabase());
System.out.println(meta.getName());
for (FieldMeta fieldMeta : meta.getFields()) {
System.out.println(fieldMeta);
}
for (TagMeta tagMeta : meta.getTags()) {
System.out.println(tagMeta);
}
}
}
\ No newline at end of file
package com.taosdata.taosdemo.service.data;
import com.taosdata.taosdemo.domain.TagMeta;
import com.taosdata.taosdemo.domain.TagValue;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class TagValueGeneratorTest {
List<TagValue> tagvalues;
@Test
public void generate() {
List<TagMeta> tagMetaList = new ArrayList<>();
tagMetaList.add(new TagMeta("location", "nchar(10)"));
tagMetaList.add(new TagMeta("groupId", "int"));
tagMetaList.add(new TagMeta("ts", "timestamp"));
tagMetaList.add(new TagMeta("temperature", "float"));
tagMetaList.add(new TagMeta("humidity", "double"));
tagMetaList.add(new TagMeta("text", "binary(10)"));
tagvalues = TagValueGenerator.generate(tagMetaList);
Assert.assertEquals("location", tagvalues.get(0).getName());
Assert.assertEquals("groupId", tagvalues.get(1).getName());
Assert.assertEquals("ts", tagvalues.get(2).getName());
Assert.assertEquals("temperature", tagvalues.get(3).getName());
Assert.assertEquals("humidity", tagvalues.get(4).getName());
Assert.assertEquals("text", tagvalues.get(5).getName());
}
@After
public void after() {
tagvalues.stream().forEach(System.out::println);
}
}
\ No newline at end of file
package com.taosdata.taosdemo.utils;
import org.junit.Assert;
import org.junit.Test;
public class DataGeneratorTest {
@Test
public void randomValue() {
for (int i = 0; i < TaosConstants.DATA_TYPES.length; i++) {
System.out.println(TaosConstants.DATA_TYPES[i] + " >>> " + DataGenerator.randomValue(TaosConstants.DATA_TYPES[i]));
}
}
@Test
public void randomNchar() {
String s = DataGenerator.randomNchar(10);
Assert.assertEquals(10, s.length());
}
}
\ No newline at end of file
package com.taosdata.taosdemo.utils;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class TimeStampUtilTest {
@Test
public void datetimeToLong() {
final String startTime = "2005-01-01 00:00:00.000";
long start = TimeStampUtil.datetimeToLong(startTime);
assertEquals(1104508800000l, start);
String dateTimeStr = TimeStampUtil.longToDatetime(start);
assertEquals("2005-01-01 00:00:00.000", dateTimeStr);
}
@Test
public void longToDatetime() {
String datetime = TimeStampUtil.longToDatetime(1510000000000L);
assertEquals("2017-11-07 04:26:40.000", datetime);
long timestamp = TimeStampUtil.datetimeToLong(datetime);
assertEquals(1510000000000L, timestamp);
}
@Test
public void range() {
long start = TimeStampUtil.datetimeToLong("2020-10-01 00:00:00.000");
long timeGap = 1000;
long numOfRowsPerTable = 1000l * 3600l * 24l * 90l;
TimeStampUtil.TimeTuple timeTuple = TimeStampUtil.range(start, timeGap, numOfRowsPerTable);
System.out.println(TimeStampUtil.longToDatetime(timeTuple.start));
System.out.println(TimeStampUtil.longToDatetime(timeTuple.end));
System.out.println(timeTuple.timeGap);
}
}
\ No newline at end of file
......@@ -43,7 +43,7 @@ class ConcurrentInquiry:
self.subtb_stru_list=[]
self.stb_tag_list=[]
self.subtb_tag_list=[]
self.probabilities = [0.95,0.05]
self.probabilities = [0.05,0.95]
self.ifjoin = [0,1]
def SetThreadsNum(self,num):
self.numOfTherads=num
......@@ -117,15 +117,15 @@ class ConcurrentInquiry:
return 'where '+random.choice([' and ',' or ']).join(l)
def con_interval(self,tlist,col_list,tag_list):
interval = 'interval' + str(random.randint(0,100)) + random.choice(['a','s','d','w','n','y'])
interval = 'interval(' + str(random.randint(0,100)) + random.choice(['a','s','d','w','n','y']) + ')'
return interval
def con_limit(self,tlist,col_list,tag_list):
rand1 = str(random.randint(0,1000))
rand2 = str(random.randint(0,1000))
return random.choice(['limit ' + rand1,'limit ' + rand1 + 'offset '+rand2,
'slimit ' + rand1,'slimit ' + rand1 + 'offset ' + rand2,'limit '+rand1 + 'slimit '+ rand2,
'limit '+ rand1 + 'offset' + rand2 + 'slimit '+ rand1 + 'soffset ' + rand2 ])
return random.choice(['limit ' + rand1,'limit ' + rand1 + ' offset '+rand2,
' slimit ' + rand1,' slimit ' + rand1 + ' offset ' + rand2,'limit '+rand1 + ' slimit '+ rand2,
'limit '+ rand1 + ' offset' + rand2 + ' slimit '+ rand1 + ' soffset ' + rand2 ])
def con_fill(self,tlist,col_list,tag_list):
return random.choice(['fill(null)','fill(prev)','fill(none)','fill(LINEAR)'])
......@@ -194,9 +194,10 @@ class ConcurrentInquiry:
tag_list = []
col_intersection = []
tag_intersection = []
subtable = None
if bool(random.getrandbits(1)):
subtable = True
tbname = random.sample(self.subtb_list,2)
for i in tbname:
col_list.append(self.subtb_stru_list[self.subtb_list.index(i)])
......@@ -227,14 +228,15 @@ class ConcurrentInquiry:
sel_col_tag.append('t2.' + str(random.choice(col_list[1] + tag_list[1])))
sql += ','.join(sel_col_tag)
sql = sql + 'from '+ ','.join(tbname) + ' ' #select col & func
con_func=[self.con_where,self.con_interval,self.con_limit,self.con_group,self.con_order,self.con_fill]
sel_con=random.sample(con_func,random.randint(0,len(con_func)))
sel_con_list=[]
sql = sql + ' from '+ str(tbname[0]) +' t1,' + str(tbname[1]) + ' t2 ' #select col & func
join_section = None
if subtable:
join_section = ''.join(random.choices(col_intersection))
sql += 'where t1._c0 = t2._c0 and ' + 't1.' + join_section + '=t2.' + join_section
else:
join_section = ''.join(random.choices(col_intersection+tag_intersection))
sql += 'where t1._c0 = t2._c0 and ' + 't1.' + join_section + '=t2.' + join_section
# for i in sel_con:
# sel_con_list.append(i(tlist,col_list,tag_list)) #获取对应的条件函数
sql+=' '.join(sel_con_list) # condition
print(sql)
return sql
......
......@@ -34,7 +34,6 @@ python3 ./test.py -f table/alter_column.py
python3 ./test.py -f table/boundary.py
python3 ./test.py -f table/create.py
python3 ./test.py -f table/del_stable.py
python3 ./test.py -f table/queryWithTaosdKilled.py
# tag
......@@ -172,6 +171,9 @@ python3 ./test.py -f query/sliding.py
python3 ./test.py -f query/unionAllTest.py
python3 ./test.py -f query/bug2281.py
python3 ./test.py -f query/bug2119.py
python3 ./test.py -f query/isNullTest.py
python3 ./test.py -f query/queryWithTaosdKilled.py
#stream
python3 ./test.py -f stream/metric_1.py
python3 ./test.py -f stream/new.py
......
......@@ -75,7 +75,46 @@ class TDTestCase:
tdSql.checkData(18, 1, 9.75000)
tdSql.checkData(19, 1, 10)
tdSql.execute("create table t2(ts timestamp, c int)")
tdSql.execute("insert into t2 values(%d, 1)" % (self.ts + 3000))
tdSql.query("select twa(c) from t2 where ts >= '2018-09-17 09:00:00.000' and ts <= '2018-09-17 09:01:30.000' ")
tdSql.checkRows(1)
tdSql.checkData(0, 0, 1)
tdSql.query("select twa(c) from t2 where ts >= '2018-09-17 09:00:00.000' and ts <= '2018-09-17 09:01:30.000' interval(2s) ")
tdSql.checkRows(1)
tdSql.checkData(0, 1, 1)
tdSql.query("select twa(c) from t2 where ts >= '2018-09-17 09:00:00.000' and ts <= '2018-09-17 09:01:30.000' interval(2s) sliding(1s) ")
tdSql.checkRows(2)
tdSql.checkData(0, 1, 1)
tdSql.checkData(1, 1, 1)
tdSql.query("select twa(c) from t2 where ts >= '2018-09-17 09:00:04.000' and ts <= '2018-09-17 09:01:30.000' ")
tdSql.checkRows(0)
tdSql.query("select twa(c) from t2 where ts >= '2018-09-17 08:00:00.000' and ts <= '2018-09-17 09:00:00.000' ")
tdSql.checkRows(0)
tdSql.execute("create table t3(ts timestamp, c int)")
tdSql.execute("insert into t3 values(%d, 1)" % (self.ts))
tdSql.execute("insert into t3 values(%d, -2)" % (self.ts + 3000))
tdSql.query("select twa(c) from t3 where ts >= '2018-09-17 08:59:00.000' and ts <= '2018-09-17 09:01:30.000'")
tdSql.checkRows(1)
tdSql.checkData(-0.5)
tdSql.query("select twa(c) from t3 where ts >= '2018-09-17 08:59:00.000' and ts <= '2018-09-17 09:01:30.000' interval(1s)")
tdSql.checkRows(2)
tdSql.checkData(0, 1, 0.5005)
tdSql.checkData(1, 1, -2)
tdSql.query("select twa(c) from t3 where ts >= '2018-09-17 08:59:00.000' and ts <= '2018-09-17 09:01:30.000' interval(2s) sliding(1s)")
tdSql.checkRows(4)
tdSql.checkData(0, 1, 0.5005)
tdSql.checkData(1, 1, 0.0005)
tdSql.checkData(2, 1, -1.5)
tdSql.checkData(3, 1, -2)
def stop(self):
tdSql.close()
......
......@@ -19,6 +19,7 @@ python3 ./test.py -f insert/randomNullCommit.py
python3 insert/retentionpolicy.py
python3 ./test.py -f insert/alterTableAndInsert.py
python3 ./test.py -f insert/insertIntoTwoTables.py
python3 ./test.py -f query/isNullTest.py
#table
python3 ./test.py -f table/alter_wal0.py
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
import taos
from util.log import tdLog
from util.cases import tdCases
from util.sql import tdSql
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
self.ts = 1537146000000
def run(self):
tdSql.prepare()
print("==============step1")
tdSql.execute("create table st(ts timestamp, c1 int, c2 binary(20), c3 nchar(20)) tags(t1 int, t2 binary(20), t3 nchar(20))")
tdSql.execute("create table t1 using st tags(1, 'binary1', 'nchar1')")
tdSql.execute("insert into t2(ts, c2) using st(t2) tags('') values(%d, '')" % (self.ts + 10))
tdSql.execute("insert into t3(ts, c2) using st(t3) tags('') values(%d, '')" % (self.ts + 10))
for i in range(10):
tdSql.execute("insert into t1 values(%d, %d, 'binary%d', 'nchar%d')" % (self.ts + i, i, i, i))
tdSql.execute("insert into t2 values(%d, %d, 'binary%d', 'nchar%d')" % (self.ts + i, i, i, i))
tdSql.execute("insert into t3 values(%d, %d, 'binary%d', 'nchar%d')" % (self.ts + i, i, i, i))
tdSql.execute("insert into t1(ts, c2) values(%d, '')" % (self.ts + 10))
tdSql.execute("insert into t1(ts, c3) values(%d, '')" % (self.ts + 11))
tdSql.execute("insert into t2(ts, c3) values(%d, '')" % (self.ts + 11))
tdSql.execute("insert into t3(ts, c3) values(%d, '')" % (self.ts + 11))
tdSql.query("select count(*) from st")
tdSql.checkData(0, 0, 36)
tdSql.query("select count(*) from st where t1 is null")
tdSql.checkData(0, 0, 24)
tdSql.query("select count(*) from st where t1 is not null")
tdSql.checkData(0, 0, 12)
tdSql.query("select count(*) from st where t2 is null")
tdSql.checkData(0, 0, 12)
tdSql.query("select count(*) from st where t2 is not null")
tdSql.checkData(0, 0, 24)
tdSql.error("select count(*) from st where t2 <> null")
tdSql.error("select count(*) from st where t2 = null")
tdSql.query("select count(*) from st where t2 = '' ")
tdSql.checkData(0, 0, 12)
tdSql.query("select count(*) from st where t2 <> '' ")
tdSql.checkData(0, 0, 24)
tdSql.query("select count(*) from st where t3 is null")
tdSql.checkData(0, 0, 12)
tdSql.query("select count(*) from st where t3 is not null")
tdSql.checkData(0, 0, 24)
tdSql.error("select count(*) from st where t3 <> null")
tdSql.error("select count(*) from st where t3 = null")
tdSql.query("select count(*) from st where t3 = '' ")
tdSql.checkData(0, 0, 12)
tdSql.query("select count(*) from st where t3 <> '' ")
tdSql.checkData(0, 0, 24)
tdSql.query("select count(*) from st where c1 is not null")
tdSql.checkData(0, 0, 30)
tdSql.query("select count(*) from st where c1 is null")
tdSql.checkData(0, 0, 6)
tdSql.query("select count(*) from st where c2 is not null")
tdSql.checkData(0, 0, 33)
tdSql.query("select count(*) from st where c2 is null")
tdSql.checkData(0, 0, 3)
tdSql.error("select count(*) from st where c2 <> null")
tdSql.error("select count(*) from st where c2 = null")
tdSql.query("select count(*) from st where c2 = '' ")
tdSql.checkData(0, 0, 3)
tdSql.query("select count(*) from st where c2 <> '' ")
tdSql.checkData(0, 0, 30)
tdSql.query("select count(*) from st where c3 is not null")
tdSql.checkData(0, 0, 33)
tdSql.query("select count(*) from st where c3 is null")
tdSql.checkData(0, 0, 3)
tdSql.error("select count(*) from st where c3 <> null")
tdSql.error("select count(*) from st where c3 = null")
tdSql.query("select count(*) from st where c3 = '' ")
tdSql.checkData(0, 0, 3)
tdSql.query("select count(*) from st where c3 <> '' ")
tdSql.checkData(0, 0, 30)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
......@@ -87,6 +87,10 @@ class TDTestCase:
tdSql.checkData(0, 3, rowNum)
except Exception as e:
tdLog.info(repr(e))
tdSql.query("show streams")
tdSql.checkRows(1)
tdSql.checkData(0, 2, 's0')
tdLog.info("===== step8 =====")
tdSql.query(
......@@ -142,6 +146,12 @@ class TDTestCase:
except Exception as e:
tdLog.info(repr(e))
tdSql.query("show streams")
tdSql.checkRows(2)
tdSql.checkData(0, 2, 's1')
tdSql.checkData(1, 2, 's0')
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册