提交 4711bb36 编写于 作者: 陶建辉(Jeff)'s avatar 陶建辉(Jeff)

Merge branch 'hotfix/rpcSendRequest' of github.com:taosdata/TDengine into hotfix/rpcSendRequest

......@@ -1259,8 +1259,6 @@ int tsParseInsertSql(SSqlObj *pSql) {
if ((code = tscMergeTableDataBlocks(pSql, pCmd->pDataBlocks)) != TSDB_CODE_SUCCESS) {
goto _error;
}
} else {
pCmd->pDataBlocks = tscDestroyBlockArrayList(pCmd->pDataBlocks);
}
code = TSDB_CODE_SUCCESS;
......
......@@ -153,6 +153,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_MND_TAG_ALREAY_EXIST, 0, 0x0369, "mnode tag
TAOS_DEFINE_ERROR(TSDB_CODE_MND_TAG_NOT_EXIST, 0, 0x036A, "mnode tag not exist")
TAOS_DEFINE_ERROR(TSDB_CODE_MND_FIELD_ALREAY_EXIST, 0, 0x036B, "mnode field already exist")
TAOS_DEFINE_ERROR(TSDB_CODE_MND_FIELD_NOT_EXIST, 0, 0x036C, "mnode field not exist")
TAOS_DEFINE_ERROR(TSDB_CODE_MND_INVALID_STABLE_NAME, 0, 0x036D, "mnode invalid stable name")
TAOS_DEFINE_ERROR(TSDB_CODE_MND_DB_NOT_SELECTED, 0, 0x0380, "mnode db not selected")
TAOS_DEFINE_ERROR(TSDB_CODE_MND_DB_ALREADY_EXIST, 0, 0x0381, "mnode database aleady exist")
......
......@@ -808,27 +808,31 @@ void *readMetric(void *sarg) {
}
void queryDB(TAOS *taos, char *command) {
int i = 5;
int i;
TAOS_RES *pSql = NULL;
int32_t code = -1;
while (i > 0 && code != 0) {
int32_t code = -1;
for (i = 0; i < 5; i++) {
if (NULL != pSql) {
taos_free_result(pSql);
pSql = NULL;
}
pSql = taos_query(taos, command);
code = taos_errno(pSql);
taos_free_result(pSql);
pSql = NULL;
if (code == 0) {
if (0 == code) {
break;
}
i--;
}
}
if (code != 0) {
fprintf(stderr, "Failed to run %s, reason: %s\n", command, taos_errstr(pSql));
taos_free_result(pSql);
taos_close(taos);
exit(EXIT_FAILURE);
}
taos_free_result(pSql);
}
// sync insertion
......
......@@ -382,11 +382,13 @@ static void mnodeAddTableIntoStable(SSuperTableObj *pStable, SChildTableObj *pCt
pStable->numOfTables++;
if (pStable->vgHash == NULL) {
pStable->vgHash = taosHashInit(100000, taosGetDefaultHashFunction(TSDB_DATA_TYPE_INT), false);
pStable->vgHash = taosHashInit(64, taosGetDefaultHashFunction(TSDB_DATA_TYPE_INT), false);
}
if (pStable->vgHash != NULL) {
taosHashPut(pStable->vgHash, (char *)&pCtable->vgId, sizeof(pCtable->vgId), &pCtable->vgId, sizeof(pCtable->vgId));
if (taosHashGet(pStable->vgHash, &pCtable->vgId, sizeof(pCtable->vgId)) == NULL) {
taosHashPut(pStable->vgHash, &pCtable->vgId, sizeof(pCtable->vgId), &pCtable->vgId, sizeof(pCtable->vgId));
}
}
}
......@@ -1964,9 +1966,15 @@ static int32_t mnodeDoGetChildTableMeta(SMnodeMsg *pMsg, STableMetaMsg *pMeta) {
static int32_t mnodeAutoCreateChildTable(SMnodeMsg *pMsg) {
SCMTableInfoMsg *pInfo = pMsg->rpcMsg.pCont;
STagData *pTag = (STagData *)pInfo->tags;
STagData *pTags = (STagData *)pInfo->tags;
int32_t tagLen = htonl(pTags->dataLen);
if (pTags->name[0] == 0) {
mError("app:%p:%p, table:%s, failed to create table on demand for stable is empty, tagLen:%d", pMsg->rpcMsg.ahandle,
pMsg, pInfo->tableId, tagLen);
return TSDB_CODE_MND_INVALID_STABLE_NAME;
}
int32_t contLen = sizeof(SCMCreateTableMsg) + offsetof(STagData, data) + htonl(pTag->dataLen);
int32_t contLen = sizeof(SCMCreateTableMsg) + offsetof(STagData, data) + tagLen;
SCMCreateTableMsg *pCreateMsg = rpcMallocCont(contLen);
if (pCreateMsg == NULL) {
mError("app:%p:%p, table:%s, failed to create table while get meta info, no enough memory", pMsg->rpcMsg.ahandle,
......@@ -1981,9 +1989,9 @@ static int32_t mnodeAutoCreateChildTable(SMnodeMsg *pMsg) {
pCreateMsg->getMeta = 1;
pCreateMsg->contLen = htonl(contLen);
memcpy(pCreateMsg->schema, pInfo->tags, contLen - sizeof(SCMCreateTableMsg));
mDebug("app:%p:%p, table:%s, start to create on demand, stable:%s", pMsg->rpcMsg.ahandle, pMsg, pInfo->tableId,
((STagData *)(pCreateMsg->schema))->name);
memcpy(pCreateMsg->schema, pTags, contLen - sizeof(SCMCreateTableMsg));
mDebug("app:%p:%p, table:%s, start to create on demand, tagLen:%d stable:%s",
pMsg->rpcMsg.ahandle, pMsg, pInfo->tableId, tagLen, pTags->name);
rpcFreeCont(pMsg->rpcMsg.pCont);
pMsg->rpcMsg.msgType = TSDB_MSG_TYPE_CM_CREATE_TABLE;
......
......@@ -359,6 +359,8 @@ void httpExecCmd(HttpContext *pContext) {
void httpProcessRequestCb(void *param, TAOS_RES *result, int code) {
HttpContext *pContext = param;
taos_free_result(result);
if (pContext == NULL) return;
if (code < 0) {
......
......@@ -3270,6 +3270,8 @@ static bool hasMainOutput(SQuery *pQuery) {
}
static STableQueryInfo *createTableQueryInfo( SQueryRuntimeEnv *pRuntimeEnv, void* pTable, STimeWindow win) {
SQuery* pQuery = pRuntimeEnv->pQuery;
STableQueryInfo *pTableQueryInfo = calloc(1, sizeof(STableQueryInfo));
pTableQueryInfo->win = win;
......@@ -3278,7 +3280,15 @@ static STableQueryInfo *createTableQueryInfo( SQueryRuntimeEnv *pRuntimeEnv, voi
pTableQueryInfo->pTable = pTable;
pTableQueryInfo->cur.vgroupIndex = -1;
initWindowResInfo(&pTableQueryInfo->windowResInfo, pRuntimeEnv, 100, 100, TSDB_DATA_TYPE_INT);
int32_t initialSize = 1;
int32_t initialThreshold = 1;
if (isIntervalQuery(pQuery) || isGroupbyNormalCol(pQuery->pGroupbyExpr)) {
initialSize = 20;
initialThreshold = 100;
}
initWindowResInfo(&pTableQueryInfo->windowResInfo, pRuntimeEnv, initialSize, initialThreshold, TSDB_DATA_TYPE_INT);
return pTableQueryInfo;
}
......@@ -3310,6 +3320,10 @@ void setExecutionContext(SQInfo *pQInfo, int32_t groupIndex, TSKEY nextKey) {
STableQueryInfo *pTableQueryInfo = pRuntimeEnv->pQuery->current;
SWindowResInfo *pWindowResInfo = &pRuntimeEnv->windowResInfo;
// lastKey needs to be updated
pTableQueryInfo->lastKey = nextKey;
setAdditionalInfo(pQInfo, pTableQueryInfo->pTable, pTableQueryInfo);
if (pRuntimeEnv->prevGroupId != INT32_MIN && pRuntimeEnv->prevGroupId == groupIndex) {
return;
}
......@@ -3335,9 +3349,6 @@ void setExecutionContext(SQInfo *pQInfo, int32_t groupIndex, TSKEY nextKey) {
pRuntimeEnv->prevGroupId = groupIndex;
setWindowResOutputBuf(pRuntimeEnv, pWindowRes);
initCtxOutputBuf(pRuntimeEnv);
pTableQueryInfo->lastKey = nextKey;
setAdditionalInfo(pQInfo, pTableQueryInfo->pTable, pTableQueryInfo);
}
void setWindowResOutputBuf(SQueryRuntimeEnv *pRuntimeEnv, SWindowResult *pResult) {
......
#!/bin/bash
WORK_DIR=/mnt/root
# Coloured Echoes #
function red_echo { echo -e "\033[31m$@\033[0m"; } #
function green_echo { echo -e "\033[32m$@\033[0m"; } #
......@@ -16,6 +18,17 @@ function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") &&
function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } #
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } #
function setMaxTablesPerVnode {
echo "/etc/taos/taos.cfg maxTablesPerVnode will be set to $1"
hasText=`grep "maxTablesPerVnode" /etc/taos/taos.cfg`
if [[ -z "$hasText" ]]; then
echo "maxTablesPerVnode $1" >> /etc/taos/taos.cfg
else
sed -i 's/^maxTablesPerVnode.*$/maxTablesPerVnode '"$1"'/g' /etc/taos/taos.cfg
fi
}
function setMaxConnections {
echo "/etc/taos/taos.cfg maxConnection will be set to $1"
......@@ -27,6 +40,28 @@ function setMaxConnections {
fi
}
function setQDebugFlag {
echo "/etc/taos/taos.cfg qDebugFlag will be set to $1"
hasText=`grep -w "qDebugFlag" /etc/taos/taos.cfg`
if [[ -z "$hasText" ]]; then
echo "qDebugFlag $1" >> /etc/taos/taos.cfg
else
sed -i 's/^qDebugFlag.*$/qDebugFlag '"$1"'/g' /etc/taos/taos.cfg
fi
}
function setDebugFlag {
echo "/etc/taos/taos.cfg DebugFlag will be set to $1"
hasText=`grep -w "DebugFlag" /etc/taos/taos.cfg`
if [[ -z "$hasText" ]]; then
echo "DebugFlag $1" >> /etc/taos/taos.cfg
else
sed -i 's/^DebugFlag.*$/DebugFlag '"$1"'/g' /etc/taos/taos.cfg
fi
}
function setWal {
echo "/etc/taos/taos.cfg walLevel will be set to $1"
......@@ -47,9 +82,10 @@ function collectSysInfo {
}
function buildTDengine {
cd /root/TDengine
echoInfo "Build TDengine"
cd $WORK_DIR/TDengine
git remote update
git remote update > /dev/null
REMOTE_COMMIT=`git rev-parse --short remotes/origin/develop`
LOCAL_COMMIT=`git rev-parse --short @`
......@@ -59,29 +95,17 @@ function buildTDengine {
echo "repo up-to-date"
else
echo "repo need to pull"
git pull
git pull > /dev/null
LOCAL_COMMIT=`git rev-parse --short @`
cd debug
rm -rf *
cmake ..
cmake .. > /dev/null
make > /dev/null
make install
fi
}
function restartTaosd {
systemctl stop taosd
pkill -KILL -x taosd
sleep 10
rm -rf /mnt/var/log/taos/*
rm -rf /mnt/var/lib/taos/*
taosd 2>&1 > /dev/null &
sleep 10
}
function sendReport {
receiver="sdsang@taosdata.com, sangshuduo@gmail.com"
mimebody="MIME-Version: 1.0\nContent-Type: text/html; charset=utf-8\n"
......@@ -93,57 +117,90 @@ function sendReport {
(cat - && uuencode perftest-13d-wal1-$today.log perftest-13d-wal1-$today.log)| \
(cat - && uuencode perftest-13d-wal1-report.csv perftest-13d-wal1-report-$today.csv) | \
(cat - && uuencode perftest-13d-wal1-report.png perftest-13d-wal1-report-$today.png) | \
(cat - && uuencode perftest-var10k-int10s-wal1-$today.log perftest-var10k-int10s-wal1-$today.log)| \
(cat - && uuencode perftest-var10k-int10s-wal1-report.csv perftest-var10k-int10s-wal1-report-$today.csv) | \
(cat - && uuencode perftest-var10k-int10s-wal1-report.png perftest-var10k-int10s-wal1-report-$today.png) | \
(cat - && uuencode taosdemo-wal1-$today.log taosdemo-wal1-$today.log) | \
(cat - && uuencode taosdemo-wal1-report.csv taosdemo-wal1-report-$today.csv) | \
(cat - && uuencode taosdemo-rps-wal1-report.csv taosdemo-rps-wal1-report-$today.csv) | \
(cat - && uuencode taosdemo-wal1-report.png taosdemo-wal1-report-$today.png) | \
(cat - && uuencode taosdemo-rps-wal1-report.csv taosdemo-rps-wal1-report-$today.csv) | \
(cat - && uuencode taosdemo-rps-wal1-report.png taosdemo-rps-wal1-report-$today.png) | \
(cat - && uuencode perftest-1d-wal2-$today.log perftest-1d-wal2-$today.log)| \
(cat - && uuencode perftest-1d-wal2-report.csv perftest-1d-wal2-report-$today.csv) | \
(cat - && uuencode perftest-1d-wal2-report.png perftest-1d-wal2-report-$today.png) | \
(cat - && uuencode perftest-13d-wal2-$today.log perftest-13d-wal2-$today.log)| \
(cat - && uuencode perftest-13d-wal2-report.csv perftest-13d-wal2-report-$today.csv) | \
(cat - && uuencode perftest-13d-wal2-report.png perftest-13d-wal2-report-$today.png) | \
(cat - && uuencode perftest-var10k-int10s-wal2-$today.log perftest-var10k-int10s-wal2-$today.log)| \
(cat - && uuencode perftest-var10k-int10s-wal2-report.csv perftest-var10k-int10s-wal2-report-$today.csv) | \
(cat - && uuencode perftest-var10k-int10s-wal2-report.png perftest-var10k-int10s-wal2-report-$today.png) | \
(cat - && uuencode taosdemo-wal2-$today.log taosdemo-wal2-$today.log) | \
(cat - && uuencode taosdemo-wal2-report.csv taosdemo-wal2-report-$today.csv) | \
(cat - && uuencode taosdemo-wal2-report.png taosdemo-wal2-report-$today.png) | \
(cat - && uuencode taosdemo-rps-wal2-report.csv taosdemo-rps-wal2-report-$today.csv) | \
(cat - && uuencode taosdemo-rps-wal2-report.png taosdemo-rps-wal2-report-$today.png) | \
(cat - && uuencode sysinfo.log sysinfo.txt) | \
(cat - && uuencode taos.cfg taos-cfg-$today.txt) | \
ssmtp "${receiver}"
}
today=`date +"%Y%m%d"`
cd /root
echo -e "cron-ran-at-${today}" >> cron.log
cd $WORK_DIR
echo -e "cron-ran-at-${today}" >> $WORK_DIR/cron.log
echoInfo "Build TDengine"
buildTDengine
############################
setMaxConnections 100
setMaxConnections 1000
setMaxTablesPerVnode 6000
setDebugFlag 131
setQDebugFlag 131
############################
setWal "2"
cd /root
./perftest-tsdb-compare-1d.sh "wal2"
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-taosdemo.sh "wal2"
date >> $WORK_DIR/cron.log
cd /root
./perftest-tsdb-compare-13d.sh "wal2"
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-1d.sh
date >> $WORK_DIR/cron.log
cd /root
./perftest-taosdemo.sh "wal2"
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-13d.sh
date >> $WORK_DIR/cron.log
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-var10k-int10s.sh
date >> $WORK_DIR/cron.log
#############################
setWal "1"
cd /root
./perftest-tsdb-compare-1d.sh "wal1"
cd /root
./perftest-tsdb-compare-13d.sh "wal1"
cd /root
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-taosdemo.sh "wal1"
date >> $WORK_DIR/cron.log
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-1d.sh
date >> $WORK_DIR/cron.log
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-13d.sh
date >> $WORK_DIR/cron.log
cd $WORK_DIR
date >> $WORK_DIR/cron.log
./perftest-tsdb-compare-var10k-int10s.sh
date >> $WORK_DIR/cron.log
#############################
collectSysInfo
......
#!/bin/bash
WORK_DIR=/mnt/root
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
logDir=`grep "^logDir" /etc/taos/taos.cfg | awk '{print $2}'`
dataDir=`grep "^dataDir" /etc/taos/taos.cfg | awk '{print $2}'`
# Coloured Echoes
function red_echo { echo -e "\033[31m$@\033[0m"; }
function green_echo { echo -e "\033[32m$@\033[0m"; }
......@@ -17,13 +32,20 @@ function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; }
function restartTaosd {
echo "Stop taosd"
systemctl stop taosd
pkill -KILL -x taosd
sleep 10
rm -rf /mnt/var/log/taos/*
rm -rf /mnt/var/lib/taos/*
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
rm -rf $logDir/*
rm -rf $dataDir/*
echo "Start taosd"
taosd 2>&1 > /dev/null &
sleep 10
}
......@@ -32,7 +54,7 @@ function runCreateTableOnly {
echoInfo "Restart Taosd"
restartTaosd
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo -n 0 2>&1 | tee taosdemo-$1-$today.log"
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo -n 0 2>&1 | tee taosdemo-$walPostfix-$today.log"
demoCreateTableOnly=`grep "Total:" totaltime.out|awk '{print $2}'`
}
......@@ -40,7 +62,7 @@ function runDeleteTableOnly {
echoInfo "Restart Taosd"
restartTaosd
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo -t 0 -D 1 2>&1 | tee taosdemo-$1-$today.log"
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo -t 0 -D 1 2>&1 | tee taosdemo-$walPostfix-$today.log"
demoDeleteTableOnly=`grep "Total:" totaltime.out|awk '{print $2}'`
}
......@@ -48,41 +70,44 @@ function runCreateTableThenInsert {
echoInfo "Restart Taosd"
restartTaosd
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo 2>&1 | tee -a taosdemo-$1-$today.log"
/usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo 2>&1 | tee -a taosdemo-$walPostfix-$today.log"
demoTableAndInsert=`grep "Total:" totaltime.out|awk '{print $2}'`
demoRPS=`grep "records\/second" taosdemo-$1-$today.log | tail -n1 | awk '{print $13}'`
demoRPS=`grep "records\/second" taosdemo-$walPostfix-$today.log | tail -n1 | awk '{print $13}'`
}
function generateTaosdemoPlot {
echo "${today} $1, demoCreateTableOnly: ${demoCreateTableOnly}, demoDeleteTableOnly: ${demoDeleteTableOnly}, demoTableAndInsert: ${demoTableAndInsert}" | tee -a taosdemo-$today.log
echo "${today}, ${demoCreateTableOnly}, ${demoDeleteTableOnly}, ${demoTableAndInsert}">> taosdemo-$1-report.csv
echo "${today}, ${demoRPS}" >> taosdemo-rps-$1-report.csv
echo "${today} $walPostfix, demoCreateTableOnly: ${demoCreateTableOnly}, demoDeleteTableOnly: ${demoDeleteTableOnly}, demoTableAndInsert: ${demoTableAndInsert}" | tee -a taosdemo-$today.log
echo "${today}, ${demoCreateTableOnly}, ${demoDeleteTableOnly}, ${demoTableAndInsert}">> taosdemo-$walPostfix-report.csv
echo "${today}, ${demoRPS}" >> taosdemo-rps-$walPostfix-report.csv
csvLines=`cat taosdemo-$1-report.csv | wc -l`
csvLines=`cat taosdemo-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '1d' taosdemo-$1-report.csv
sed -i '1d' taosdemo-$walPostfix-report.csv
fi
csvLines=`cat taosdemo-rps-$1-report.csv | wc -l`
csvLines=`cat taosdemo-rps-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '1d' taosdemo-rps-$1-report.csv
sed -i '1d' taosdemo-rps-$walPostfix-report.csv
fi
gnuplot -e "filename='taosdemo-$1-report'" -p taosdemo-csv2png.gnuplot
gnuplot -e "filename='taosdemo-rps-$1-report'" -p taosdemo-rps-csv2png.gnuplot
gnuplot -e "filename='taosdemo-$walPostfix-report'" -p taosdemo-csv2png.gnuplot
gnuplot -e "filename='taosdemo-rps-$walPostfix-report'" -p taosdemo-rps-csv2png.gnuplot
}
today=`date +"%Y%m%d"`
cd /root
cd $WORK_DIR
echoInfo "Test Create Table Only "
runCreateTableOnly $1
runCreateTableOnly
echoInfo "Test Create Table then Insert data"
runDeleteTableOnly $1
runDeleteTableOnly
echoInfo "Test Create Table then Insert data"
runCreateTableThenInsert $1
runCreateTableThenInsert
echoInfo "Generate plot for taosdemo"
generateTaosdemoPlot $1
echoInfo "End of TaosDemo Test"
generateTaosdemoPlot
tar czf $WORK_DIR/taos-log-taosdemo-$today.tar.gz $logDir/*
echoInfo "End of TaosDemo Test" | tee -a $WORK_DIR/cron.log
#!/bin/bash
WORK_DIR=/mnt/root
TSDB_CMP_DIR=timeseriesdatabase-comparisons/build/tsdbcompare
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
logDir=`grep "^logDir" /etc/taos/taos.cfg | awk '{print $2}'`
dataDir=`grep "^dataDir" /etc/taos/taos.cfg | awk '{print $2}'`
# Coloured Echoes #
function red_echo { echo -e "\033[31m$@\033[0m"; } #
function green_echo { echo -e "\033[32m$@\033[0m"; } #
......@@ -17,13 +33,20 @@ function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } #
function restartTaosd {
echo "Stop taosd"
systemctl stop taosd
pkill -KILL -x taosd
sleep 10
rm -rf /mnt/var/log/taos/*
rm -rf /mnt/var/lib/taos/*
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
rm -rf $logDir/*
rm -rf $dataDir/*
echo "Start taosd"
taosd 2>&1 > /dev/null &
sleep 10
}
......@@ -32,27 +55,30 @@ function runPerfTest13d {
echoInfo "Restart Taosd"
restartTaosd
cd /home/taos/tliu/timeseriesdatabase-comparisons/build/tsdbcompare
./runreal-13d-csv.sh $1 2>&1 | tee /root/perftest-13d-$1-$today.log
cd $WORK_DIR/$TSDB_CMP_DIR
./runTDengine.sh -d 13 -w -q 2>&1 | tee $WORK_DIR/perftest-13d-$walPostfix-$today.log
}
function generatePerfPlot13d {
cd /root
cd $WORK_DIR
csvLines=`cat perftest-13d-$1-report.csv | wc -l`
csvLines=`cat perftest-13d-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '1d' perftest-13d-$1-report.csv
sed -i '1d' perftest-13d-$walPostfix-report.csv
fi
gnuplot -e "filename='perftest-13d-$1-report'" -p perftest-csv2png.gnuplot
gnuplot -e "filename='perftest-13d-$walPostfix-report'" -p perftest-csv2png.gnuplot
}
today=`date +"%Y%m%d"`
cd /root
cd $WORK_DIR
echoInfo "run Performance Test with 13 days data"
runPerfTest13d $1
echoInfo "Generate plot of 13 days data"
generatePerfPlot13d $1
echoInfo "End of TSDB-Compare 13-days-data Test"
tar czf $WORK_DIR/taos-log-13d-$today.tar.gz $logDir/*
echoInfo "End of TSDB-Compare 13-days-data Test" | tee -a $WORK_DIR/cron.log
#!/bin/bash
WORK_DIR=/mnt/root
TSDB_CMP_DIR=timeseriesdatabase-comparisons/build/tsdbcompare
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
logDir=`grep "^logDir" /etc/taos/taos.cfg | awk '{print $2}'`
dataDir=`grep "^dataDir" /etc/taos/taos.cfg | awk '{print $2}'`
# Coloured Echoes #
function red_echo { echo -e "\033[31m$@\033[0m"; } #
function green_echo { echo -e "\033[32m$@\033[0m"; } #
......@@ -17,13 +33,20 @@ function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } #
function restartTaosd {
echo "Stop taosd"
systemctl stop taosd
pkill -KILL -x taosd
sleep 10
rm -rf /mnt/var/log/taos/*
rm -rf /mnt/var/lib/taos/*
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
rm -rf $logDir/*
rm -rf $dataDir/*
echo "Start taosd"
taosd 2>&1 > /dev/null &
sleep 10
}
......@@ -32,27 +55,30 @@ function runPerfTest1d {
echoInfo "Restart Taosd"
restartTaosd
cd /home/taos/tliu/timeseriesdatabase-comparisons/build/tsdbcompare
./runreal-1d-csv.sh $1 2>&1 | tee /root/perftest-1d-$1-$today.log
cd $WORK_DIR/$TSDB_CMP_DIR
./runTDengine.sh -d 1 -w -q 2>&1 | tee $WORK_DIR/perftest-1d-$walPostfix-$today.log
}
function generatePerfPlot1d {
cd /root
cd $WORK_DIR
csvLines=`cat perftest-1d-$1-report.csv | wc -l`
csvLines=`cat perftest-1d-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '2d' perftest-1d-$1-report.csv
sed -i '1d' perftest-1d-$walPostfix-report.csv
fi
gnuplot -e "filename='perftest-1d-$1-report'" -p perftest-csv2png.gnuplot
gnuplot -e "filename='perftest-1d-$walPostfix-report'" -p perftest-csv2png.gnuplot
}
today=`date +"%Y%m%d"`
cd /root
cd $WORK_DIR
echoInfo "run Performance Test with 1 day data"
runPerfTest1d $1
runPerfTest1d
echoInfo "Generate plot of 1 day data"
generatePerfPlot1d $1
echoInfo "End of TSDB-Compare 1-day-data Test"
generatePerfPlot1d
tar czf $WORK_DIR/taos-log-1d-$today.tar.gz $logDir/*
echoInfo "End of TSDB-Compare 1-day-data Test" | tee -a $WORK_DIR/cron.log
#!/bin/bash
WORK_DIR=/mnt/root
TSDB_CMP_DIR=timeseriesdatabase-comparisons/build/tsdbcompare
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
logDir=`grep "^logDir" /etc/taos/taos.cfg | awk '{print $2}'`
dataDir=`grep "^dataDir" /etc/taos/taos.cfg | awk '{print $2}'`
# Coloured Echoes #
function red_echo { echo -e "\033[31m$@\033[0m"; } #
function green_echo { echo -e "\033[32m$@\033[0m"; } #
function yellow_echo { echo -e "\033[33m$@\033[0m"; } #
function white_echo { echo -e "\033[1;37m$@\033[0m"; } #
# Coloured Printfs #
function red_printf { printf "\033[31m$@\033[0m"; } #
function green_printf { printf "\033[32m$@\033[0m"; } #
function yellow_printf { printf "\033[33m$@\033[0m"; } #
function white_printf { printf "\033[1;37m$@\033[0m"; } #
# Debugging Outputs #
function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } #
function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } #
function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } #
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } #
function restartTaosd {
echo "Stop taosd"
systemctl stop taosd
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
rm -rf $logDir/*
rm -rf $dataDir/*
echo "Start taosd"
taosd 2>&1 > /dev/null &
sleep 10
}
function runPerfTestVar10K {
echoInfo "Restart Taosd"
restartTaosd
cd $WORK_DIR/$TSDB_CMP_DIR
./runTDengine.sh -v 10000 -i 100 -w -q 2>&1 | tee $WORK_DIR/perftest-var10k-int100s-$walPostfix-$today.log
}
function generatePerfPlotVar10K {
cd $WORK_DIR
csvLines=`cat perftest-var10k-int100s-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '1d' perftest-var10k-int100s-$walPostfix-report.csv
fi
gnuplot -e "filename='perftest-var10k-int100s-$walPostfix-report'" -p perftest-csv2png.gnuplot
}
today=`date +"%Y%m%d"`
cd $WORK_DIR
echoInfo "run Performance Test with 10K tables data"
runPerfTestVar10K
echoInfo "Generate plot of 10K tables data"
generatePerfPlotVar10K
tar czf $WORK_DIR/taos-log-var10k-int100s-$today.tar.gz $logDir/*
echoInfo "End of TSDB-Compare var10k-int100s-tables-data Test" | tee -a $WORK_DIR/cron.log
#!/bin/bash
WORK_DIR=/mnt/root
TSDB_CMP_DIR=timeseriesdatabase-comparisons/build/tsdbcompare
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
logDir=`grep "^logDir" /etc/taos/taos.cfg | awk '{print $2}'`
dataDir=`grep "^dataDir" /etc/taos/taos.cfg | awk '{print $2}'`
# Coloured Echoes #
function red_echo { echo -e "\033[31m$@\033[0m"; } #
function green_echo { echo -e "\033[32m$@\033[0m"; } #
function yellow_echo { echo -e "\033[33m$@\033[0m"; } #
function white_echo { echo -e "\033[1;37m$@\033[0m"; } #
# Coloured Printfs #
function red_printf { printf "\033[31m$@\033[0m"; } #
function green_printf { printf "\033[32m$@\033[0m"; } #
function yellow_printf { printf "\033[33m$@\033[0m"; } #
function white_printf { printf "\033[1;37m$@\033[0m"; } #
# Debugging Outputs #
function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } #
function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } #
function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } #
function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } #
function restartTaosd {
echo "Stop taosd"
systemctl stop taosd
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
rm -rf $logDir/*
rm -rf $dataDir/*
echo "Start taosd"
taosd 2>&1 > /dev/null &
sleep 10
}
function runPerfTestVar10K {
echoInfo "Restart Taosd"
restartTaosd
cd $WORK_DIR/$TSDB_CMP_DIR
./runTDengine.sh -v 10000 -w -q 2>&1 | tee $WORK_DIR/perftest-var10k-int10s-$walPostfix-$today.log
}
function generatePerfPlotVar10K {
cd $WORK_DIR
csvLines=`cat perftest-var10k-int10s-$walPostfix-report.csv | wc -l`
if [ "$csvLines" -gt "10" ]; then
sed -i '1d' perftest-var10k-int10s-$walPostfix-report.csv
fi
gnuplot -e "filename='perftest-var10k-int10s-$walPostfix-report'" -p perftest-csv2png.gnuplot
}
today=`date +"%Y%m%d"`
cd $WORK_DIR
echoInfo "run Performance Test with 10K tables data"
runPerfTestVar10K
echoInfo "Generate plot of 10K tables data"
generatePerfPlotVar10K
tar czf $WORK_DIR/taos-log-var10k-int10s-$today.tar.gz $logDir/*
echoInfo "End of TSDB-Compare var10k-int10s-tables-data Test" | tee -a $WORK_DIR/cron.log
#!/bin/bash
#set -x
WORK_DIR=/mnt/root
DATA_DIR=/mnt/data
# Color setting
RED='\033[0;31m'
GREEN='\033[1;32m'
GREEN_DARK='\033[0;32m'
GREEN_UNDERLINE='\033[4;32m'
NC='\033[0m'
# default value
DEFAULT_BATCH=5000
DEFAULT_DAYS=1
DEFAULT_INTERVAL=1
DEFAULT_SCALEVAR=10
DEFAULT_DOPREPARE=false
DEFAULT_DOWRITE=false
DEFAULT_DOQUERY=false
# function
function do_prepare {
echo
echo "---------------Generating Data-----------------"
echo
echo
echo "Prepare data for InfluxDB...."
echo "bin/bulk_data_gen -seed 123 -format influx-bulk -sampling-interval $interval_s \
-scale-var $scalevar -use-case devops -timestamp-start $TIME_START \
-timestamp-end $TIME_END > $DATA_FILE"
bin/bulk_data_gen -seed 123 -format influx-bulk -sampling-interval $interval_s \
-scale-var $scalevar -use-case devops -timestamp-start $TIME_START \
-timestamp-end $TIME_END > $DATA_FILE
}
function do_write {
echo "cat $DATA_FILE | bin/bulk_load_influx \
--batch-size=$batch --workers=20 --urls=http://172.15.1.5:8086 | grep loaded"
INFLUXRES=`cat $DATA_FILE | bin/bulk_load_influx \
--batch-size=$batch --workers=20 --urls="http://172.15.1.5:8086" | grep loaded`
echo -e "${GREEN}InfluxDB writing result:${NC}"
echo -e "${GREEN}$INFLUXRES${NC}"
DATA=`echo $INFLUXRES|awk '{print($2)}'`
TMP=`echo $INFLUXRES|awk '{print($5)}'`
IFWTM=`echo ${TMP%s*}`
}
function do_query {
echo
echo "------------------Querying Data-----------------"
echo
echo
echo "start query test, query max from 8 hosts group by 1 hour, InfluxDB"
echo
#Test case 1
#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ;
# a,b,c,d,e,f,g,h are random 8 numbers.
echo "IFQS1=bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-all -scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb -urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall"
IFQS1=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-all -scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb -urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall`
echo -e "${GREEN}InfluxDB query test case 1 result:${NC}"
echo -e "${GREEN}$IFQS1${NC}"
TMP=`echo $IFQS1|awk '{print($4)}'`
IFQ1=`echo ${TMP%s*}`
#Test case 2
#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h);
# a,b,c,d,e,f,g,h are random 8 numbers
echo "IFQS2=bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-allbyhr -scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb -urls=http://172.15.1.5:8086 -workers 50 -print-interval 0|grep wall"
IFQS2=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-allbyhr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb \
-urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall`
echo -e "${GREEN}InfluxDB query test case 2 result:${NC}"
echo -e "${GREEN}$IFQS2${NC}"
TMP=`echo $IFQS2|awk '{print($4)}'`
IFQ2=`echo ${TMP%s*}`
#Test case 3
#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time <y interval(10m);
# a,b,c,d,e,f,g,h are random 8 numbers, y-x =12 hour
echo "IFQS3=bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-12-hr -scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb -urls=http://172.15.1.5:8086 -workers 50 -print-interval 0|grep wall"
IFQS3=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-12-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb \
-urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall`
echo -e "${GREEN}InfluxDB query test case 3 result:${NC}"
echo -e "${GREEN}$IFQS3${NC}"
TMP=`echo $IFQS3|awk '{print($4)}'`
IFQ3=`echo ${TMP%s*}`
#Test case 4
#测试用例4,随机查询1个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1分钟为粒度,查询每1分钟的最大值
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time <y interval(10m);
# a,b,c,d,e,f,g,h are random 8 numbers, y-x =1 hours
echo "IFQS4=bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-1-hr -scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb -urls=http://172.15.1.5:8086 -workers 50 -print-interval 0|grep wall"
IFQS4=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-1-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_influxdb \
-urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall`
echo -e "${GREEN}InfluxDB query test case 4 result:${NC}"
echo -e "${GREEN}$IFQS4${NC}"
TMP=`echo $IFQS4|awk '{print($4)}'`
IFQ4=`echo ${TMP%s*}`
}
batch=$DEFAULT_BATCH
days=$DEFAULT_DAYS
interval=$DEFAULT_INTERVAL
scalevar=$DEFAULT_SCALEVAR
doprepare=$DEFAULT_DOPREPARE
dowrite=$DEFAULT_DOWRITE
doquery=$DEFAULT_DOQUERY
help="$(basename "$0") [-h] [-b batchsize] [-d data-of-days] [-i interval] [-v scalevar] [-p false for don't prepare] [-w false for don't do write] [-q false for don't do query]"
while getopts ':b:d:i:v:pwqh' flag; do
case "${flag}" in
b) batch=${OPTARG};;
d) days=${OPTARG};;
i) interval=${OPTARG};;
v) scalevar=${OPTARG};;
p) doprepare=${OPTARG};;
w) dowrite=${OPTARG};;
q) doquery=${OPTARG};;
:) echo -e "{RED}Missing argument!{NC}"
echo "$help"
exit 1
;;
h) echo "$help"
exit 1
;;
esac
done
if [[ $scalevar -eq 10000 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-02T00:00:00Z"
if [[ $interval -eq 100 ]]
then
interval_s=100s
DATA_FILE=$DATA_DIR/influxdb-var10k-int100s.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-influxdb-report-var10k-int100s.csv
else
interval_s=10s
DATA_FILE=$DATA_DIR/influxdb-var10k-int10s.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-influxdb-report-var10k-int10s.csv
fi
else
if [[ $days -eq 1 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-02T00:00:00Z"
DATA_FILE=$DATA_DIR/influxdb-1d.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-influxdb-report-1d.csv
elif [[ $days -eq 13 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-14T00:00:00Z"
DATA_FILE=$DATA_DIR/influxdb-13d.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-influxdb-report-13d.csv
else
echo -e "{RED} don't support input $days{NC}"
exit 1
fi
interval_s=${interval}s
fi
echo "TIME_START: $TIME_START, TIME_END: $TIME_END, DATA_FILE: $DATA_FILE"
echo "doprepare: $doprepare, dowrite: $dowrite, doquery: $doquery"
if $doprepare;
then
do_prepare
fi
docker network create --ip-range 172.15.1.255/24 --subnet 172.15.1.1/16 tsdbcomp >>/dev/null 2>&1
INFLUX=`docker run -d -p 8086:8086 --net tsdbcomp --ip 172.15.1.5 influxdb` >>/dev/null 2>&1
sleep 10
if $dowrite;
then
echo -e "Start test InfluxDB writting, result in ${GREEN}Green line${NC}"
do_write
fi
if $doquery;
then
echo -e "Start test InfluxDB query, result in ${GREEN}Green line${NC}"
do_query
fi
echo
echo
echo "======================================================"
echo " tsdb performance comparision "
echo "======================================================"
if $dowrite;
then
echo -e " Writing $DATA records test takes: "
printf " InfluxDB | %-4.5f Seconds \n" $IFWTM
echo "------------------------------------------------------"
fi
if $doquery;
then
echo " Query test cases: "
echo " case 1: select the max(value) from all data "
echo " filtered out 8 hosts "
echo " Query test case 1 takes: "
printf " InfluxDB | %-4.5f Seconds \n" $IFQ1
echo "------------------------------------------------------"
echo " case 2: select the max(value) from all data "
echo " filtered out 8 hosts with an interval of 1 hour "
echo " case 2 takes: "
printf " InfluxDB | %-4.5f Seconds \n" $IFQ2
echo "------------------------------------------------------"
echo " case 3: select the max(value) from random 12 hours"
echo " data filtered out 8 hosts with an interval of 10 min "
echo " filtered out 8 hosts interval(1h) "
echo " case 3 takes: "
printf " InfluxDB | %-4.5f Seconds \n" $IFQ3
echo "------------------------------------------------------"
echo " case 4: select the max(value) from random 1 hour data "
echo " data filtered out 8 hosts with an interval of 1 min "
echo " case 4 takes: "
printf " InfluxDB | %-4.5f Seconds \n" $IFQ4
echo "------------------------------------------------------"
fi
docker stop $INFLUX >>/dev/null 2>&1
docker container rm -f $INFLUX >>/dev/null 2>&1
docker network rm tsdbcomp >>/dev/null 2>&1
today=`date +"%Y%m%d"`
echo "${today}, ${IFWTM}, ${IFQ1}, ${IFQ2}, ${IFQ3}, ${IFQ4}" >> $RECORD_CSV_FILE
#!/bin/bash
#set -x
WORK_DIR=/mnt/root
DATA_DIR=/mnt/data
# Color setting
RED='\033[0;31m'
GREEN='\033[1;32m'
GREEN_DARK='\033[0;32m'
GREEN_UNDERLINE='\033[4;32m'
NC='\033[0m'
# default value
DEFAULT_BATCH=5000
DEFAULT_DAYS=1
DEFAULT_INTERVAL=1
DEFAULT_SCALEVAR=10
DEFAULT_DOPREPARE=false
DEFAULT_DOWRITE=false
DEFAULT_DOQUERY=false
# function
function do_prepare {
echo
echo "---------------Generating Data-----------------"
echo
echo
echo "Prepare data for TDengine...."
# bin/bulk_data_gen -seed 123 -format tdengine -tdschema-file config/TDengineSchema.toml -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > $DATA_DIR/tdengine.dat
echo "bin/bulk_data_gen -seed 123 -format tdengine -sampling-interval $interval_s \
-tdschema-file config/TDengineSchema.toml -scale-var $scalevar \
-use-case devops -timestamp-start $TIME_START \
-timestamp-end $TIME_END \
> $DATA_FILE"
bin/bulk_data_gen -seed 123 -format tdengine -sampling-interval $interval_s \
-tdschema-file config/TDengineSchema.toml -scale-var $scalevar \
-use-case devops -timestamp-start $TIME_START \
-timestamp-end $TIME_END \
> $DATA_FILE
}
function do_write {
echo "TDENGINERES=cat $DATA_FILE |bin/bulk_load_tdengine --url 127.0.0.1:0 \
--batch-size $batch -do-load -report-tags n1 -workers 20 -fileout=false| grep loaded"
TDENGINERES=`cat $DATA_FILE |bin/bulk_load_tdengine --url 127.0.0.1:0 \
--batch-size $batch -do-load -report-tags n1 -workers 20 -fileout=false| grep loaded`
echo
echo -e "${GREEN}TDengine writing result:${NC}"
echo -e "${GREEN}$TDENGINERES${NC}"
DATA=`echo $TDENGINERES|awk '{print($2)}'`
TMP=`echo $TDENGINERES|awk '{print($5)}'`
TDWTM=`echo ${TMP%s*}`
}
function do_query {
echo
echo "------------------Querying Data-----------------"
echo
echo
echo "start query test, query max from 8 hosts group by 1 hour, TDengine"
echo
#Test case 1
#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ;
# a,b,c,d,e,f,g,h are random 8 numbers.
echo "TDQS1=bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-all \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls=http://127.0.0.1:6020 -workers 50 -print-interval 0|grep wall"
TDQS1=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-all \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall`
echo
echo -e "${GREEN}TDengine query test case 1 result:${NC}"
echo -e "${GREEN}$TDQS1${NC}"
TMP=`echo $TDQS1|awk '{print($4)}'`
TDQ1=`echo ${TMP%s*}`
#Test case 2
#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h);
# a,b,c,d,e,f,g,h are random 8 numbers
echo "TDQS2=bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-allbyhr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls=http://127.0.0.1:6020 -workers 50 -print-interval 0|grep wall"
TDQS2=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-allbyhr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall`
echo
echo -e "${GREEN}TDengine query test case 2 result:${NC}"
echo -e "${GREEN}$TDQS2${NC}"
TMP=`echo $TDQS2|awk '{print($4)}'`
TDQ2=`echo ${TMP%s*}`
#Test case 3
#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time <y interval(10m);
# a,b,c,d,e,f,g,h are random 8 numbers, y-x =12 hour
echo "TDQS3=bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-12-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls=http://127.0.0.1:6020 -workers 50 -print-interval 0|grep wall"
TDQS3=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-12-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall`
echo
echo -e "${GREEN}TDengine query test case 3 result:${NC}"
echo -e "${GREEN}$TDQS3${NC}"
TMP=`echo $TDQS3|awk '{print($4)}'`
TDQ3=`echo ${TMP%s*}`
#Test case 4
#测试用例4,随机查询1个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1分钟为粒度,查询每1分钟的最大值
#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time <y interval(10m);
# a,b,c,d,e,f,g,h are random 8 numbers, y-x =1 hours
echo "TDQS4=bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-1-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls=http://127.0.0.1:6020 -workers 50 -print-interval 0|grep wall"
TDQS4=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-1-hr \
-scale-var $scalevar -queries 1000 | bin/query_benchmarker_tdengine \
-urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall`
echo
echo -e "${GREEN}TDengine query test case 4 result:${NC}"
echo -e "${GREEN}$TDQS4${NC}"
TMP=`echo $TDQS4|awk '{print($4)}'`
TDQ4=`echo ${TMP%s*}`
}
batch=$DEFAULT_BATCH
days=$DEFAULT_DAYS
interval=$DEFAULT_INTERVAL
scalevar=$DEFAULT_SCALEVAR
doprepare=$DEFAULT_DOPREPARE
dowrite=$DEFAULT_DOWRITE
doquery=$DEFAULT_DOQUERY
help="$(basename "$0") [-h] [-b batchsize] [-d data-of-days] [-i interval] [-v scalevar] [-p false for don't prepare] [-w false for don't do write] [-q false for don't do query]"
while getopts ':b:d:i:v:pwqh' flag; do
case "${flag}" in
b) batch=${OPTARG};;
d) days=${OPTARG};;
i) interval=${OPTARG};;
v) scalevar=${OPTARG};;
p) doprepare=${OPTARG};;
w) dowrite=${OPTARG};;
q) doquery=${OPTARG};;
:) echo -e "{RED}Missing argument!{NC}"
echo "$help"
exit 1
;;
h) echo "$help"
exit 1
;;
esac
done
walLevel=`grep "^walLevel" /etc/taos/taos.cfg | awk '{print $2}'`
if [[ "$walLevel" -eq "2" ]]; then
walPostfix="wal2"
elif [[ "$walLevel" -eq "1" ]]; then
walPostfix="wal1"
else
echo -e "${RED}wrong walLevel $walLevel found! ${NC}"
exit 1
fi
echo -e "${GREEN} $walPostfix found! ${NC}"
if [[ $scalevar -eq 10000 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-02T00:00:00Z"
if [[ $interval -eq 100 ]]
then
interval_s=100s
DATA_FILE=$DATA_DIR/tdengine-var10k-int100s.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-var10k-int100s-$walPostfix-report.csv
else
interval_s=10s
DATA_FILE=$DATA_DIR/tdengine-var10k-int10s.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-var10k-int10s-$walPostfix-report.csv
fi
else
if [[ $days -eq 1 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-02T00:00:00Z"
DATA_FILE=$DATA_DIR/tdengine-1d.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-1d-$walPostfix-report.csv
elif [[ $days -eq 13 ]]
then
TIME_START="2018-01-01T00:00:00Z"
TIME_END="2018-01-14T00:00:00Z"
DATA_FILE=$DATA_DIR/tdengine-13d.dat
RECORD_CSV_FILE=$WORK_DIR/perftest-13d-$walPostfix-report.csv
else
echo -e "{RED} don't support input $days{NC}"
exit 1
fi
interval_s=${interval}s
fi
echo "TIME_START: $TIME_START, TIME_END: $TIME_END, DATA_FILE: $DATA_FILE"
echo "doprepare: $doprepare, dowrite: $dowrite, doquery: $doquery"
if $doprepare;
then
do_prepare
fi
echo
if $dowrite;
then
echo -e "Start test TDengine writting, result in ${GREEN}Green line${NC}"
do_write
fi
if $doquery;
then
echo -e "Start test TDengine query, result in ${GREEN}Green line${NC}"
do_query
fi
echo
echo
echo "======================================================"
echo " tsdb performance comparision "
echo "======================================================"
if $dowrite;
then
echo -e " Writing $DATA records test takes: "
printf " TDengine | %-4.5f Seconds \n" $TDWTM
echo "------------------------------------------------------"
fi
if $doquery;
then
echo " Query test cases: "
echo " case 1: select the max(value) from all data "
echo " filtered out 8 hosts "
echo " Query test case 1 takes: "
printf " TDengine | %-4.5f Seconds \n" $TDQ1
echo "------------------------------------------------------"
echo " case 2: select the max(value) from all data "
echo " filtered out 8 hosts with an interval of 1 hour "
echo " case 2 takes: "
printf " TDengine | %-4.5f Seconds \n" $TDQ2
echo "------------------------------------------------------"
echo " case 3: select the max(value) from random 12 hours"
echo " data filtered out 8 hosts with an interval of 10 min "
echo " filtered out 8 hosts interval(1h) "
echo " case 3 takes: "
printf " TDengine | %-4.5f Seconds \n" $TDQ3
echo "------------------------------------------------------"
echo " case 4: select the max(value) from random 1 hour data "
echo " data filtered out 8 hosts with an interval of 1 min "
echo " case 4 takes: "
printf " TDengine | %-4.5f Seconds \n" $TDQ4
echo "------------------------------------------------------"
fi
#bulk_query_gen/bulk_query_gen -format influx-http -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_influxdb/query_benchmarker_influxdb -urls="http://172.26.89.231:8086"
#bulk_query_gen/bulk_query_gen -format tdengine -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_tdengine/query_benchmarker_tdengine -urls="http://172.26.89.231:6020"
today=`date +"%Y%m%d"`
echo "${today}, ${TDWTM}, ${TDQ1}, ${TDQ2}, ${TDQ3}, ${TDQ4}" >> $RECORD_CSV_FILE
#!/user/bin/gnuplot
reset
set terminal png
set title filename font ",20"
set ylabel "Time in Seconds"
set xdata time
set timefmt "%Y%m%d"
set format x "%Y-%m-%d"
set xlabel "Date"
set style data linespoints
set terminal pngcairo size 1024,768 enhanced font 'Segoe UI, 10'
set output filename . '.png'
set datafile separator ','
set key reverse Left outside
set grid
plot filename . '.csv' using 1:2 title "Request Per Second"
......@@ -112,8 +112,7 @@ class TDTestCase:
tdSql.query("select stb_t.ts, stb_t.dscrption, stb_t.temperature, stb_t.id, stb_p.dscrption, stb_p.pressure from stb_p, stb_t where stb_p.ts=stb_t.ts and stb_p.id = stb_t.id")
tdSql.checkRows(6)
tdSql.query("select stb_t.ts, stb_t.dscrption, stb_t.temperature, stb_t.pid, stb_p.id, stb_p.dscrption, stb_p.pressure,stb_v.velocity from stb_p, stb_t, stb_v where stb_p.ts=stb_t.ts and stb_p.ts=stb_v.ts and stb_p.id = stb_t.id")
tdSql.checkRows(2)
tdSql.error("select stb_t.ts, stb_t.dscrption, stb_t.temperature, stb_t.pid, stb_p.id, stb_p.dscrption, stb_p.pressure,stb_v.velocity from stb_p, stb_t, stb_v where stb_p.ts=stb_t.ts and stb_p.ts=stb_v.ts and stb_p.id = stb_t.id")
def stop(self):
tdSql.close()
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
import taos
import threading
import time
from datetime import datetime
class MetadataQuery:
def initConnection(self):
self.tables = 100000
self.records = 10
self.numOfTherads = 10
self.ts = 1537146000000
self.host = "127.0.0.1"
self.user = "root"
self.password = "taosdata"
self.config = "/etc/taos"
def connectDB(self):
self.conn = taos.connect(
self.host,
self.user,
self.password,
self.config)
return self.conn.cursor()
def createStable(self):
print("================= Create stable meters =================")
cursor = self.connectDB()
cursor.execute("drop database if exists test")
cursor.execute("create database test")
cursor.execute("use test")
cursor.execute('''create table if not exists meters (ts timestamp, speed int) tags(
tgcol1 tinyint, tgcol2 smallint, tgcol3 int, tgcol4 bigint, tgcol5 float, tgcol6 double, tgcol7 bool, tgcol8 binary(20), tgcol9 nchar(20),
tgcol10 tinyint, tgcol11 smallint, tgcol12 int, tgcol13 bigint, tgcol14 float, tgcol15 double, tgcol16 bool, tgcol17 binary(20), tgcol18 nchar(20),
tgcol19 tinyint, tgcol20 smallint, tgcol21 int, tgcol22 bigint, tgcol23 float, tgcol24 double, tgcol25 bool, tgcol26 binary(20), tgcol27 nchar(20),
tgcol28 tinyint, tgcol29 smallint, tgcol30 int, tgcol31 bigint, tgcol32 float, tgcol33 double, tgcol34 bool, tgcol35 binary(20), tgcol36 nchar(20),
tgcol37 tinyint, tgcol38 smallint, tgcol39 int, tgcol40 bigint, tgcol41 float, tgcol42 double, tgcol43 bool, tgcol44 binary(20), tgcol45 nchar(20),
tgcol46 tinyint, tgcol47 smallint, tgcol48 int, tgcol49 bigint, tgcol50 float, tgcol51 double, tgcol52 bool, tgcol53 binary(20), tgcol54 nchar(20))''')
cursor.close()
self.conn.close()
def createTablesAndInsertData(self, threadID):
cursor = self.connectDB()
cursor.execute("use test")
base = threadID * self.tables
tablesPerThread = int (self.tables / self.numOfTherads)
for i in range(tablesPerThread):
cursor.execute(
'''create table t%d using meters tags(
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d',
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d',
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d',
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d',
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d',
%d, %d, %d, %d, %f, %f, %d, 'taosdata%d', '涛思数据%d')''' %
(base + i + 1,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100,
(base + i) %100, (base + i) %10000, (base + i) %1000000, (base + i) %100000000, (base + i) %100 * 1.1, (base + i) %100 * 2.3, (base + i) %2, (base + i) %100, (base + i) %100))
for j in range(self.records):
cursor.execute(
"insert into t%d values(%d, %d)" %
(base + i + 1, self.ts + j, j))
cursor.close()
self.conn.close()
def queryData(self, query):
cursor = self.connectDB()
cursor.execute("use test")
print("================= query tag data =================")
startTime = datetime.now()
cursor.execute(query)
cursor.fetchall()
endTime = datetime.now()
print(
"Query time for the above query is %d seconds" %
(endTime - startTime).seconds)
cursor.close()
self.conn.close()
if __name__ == '__main__':
t = MetadataQuery()
t.initConnection()
t.createStable()
print(
"================= Create %d tables and insert %d records into each table =================" %
(t.tables, t.records))
startTime = datetime.now()
for i in range(t.numOfTherads):
thread = threading.Thread(
target=t.createTablesAndInsertData, args=(i,))
thread.start()
thread.join()
endTime = datetime.now()
diff = (endTime - startTime).seconds
print(
"spend %d seconds to create %d tables and insert %d records into each table" %
(diff, t.tables, t.records))
query = '''select tgcol1, tgcol2, tgcol3, tgcol4, tgcol5, tgcol6, tgcol7, tgcol8, tgcol9,
tgcol10, tgcol11, tgcol12, tgcol13, tgcol14, tgcol15, tgcol16, tgcol17, tgcol18,
tgcol19, tgcol20, tgcol21, tgcol22, tgcol23, tgcol24, tgcol25, tgcol26, tgcol27,
tgcol28, tgcol29, tgcol30, tgcol31, tgcol32, tgcol33, tgcol34, tgcol35, tgcol36,
tgcol37, tgcol38, tgcol39, tgcol40, tgcol41, tgcol42, tgcol43, tgcol44, tgcol45,
tgcol46, tgcol47, tgcol48, tgcol49, tgcol50, tgcol51, tgcol52, tgcol53, tgcol54
from meters where tgcol1 > 10 AND tgcol1 < 100 and tgcol2 > 100 and tgcol2 < 1000 or tgcol3 > 10000 or tgcol7 = true
or tgcol8 like '%2' and tgcol10 < 10'''
t.queryData(query)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册