diff --git a/tests/perftest-scripts/perftest-csv2png.gnuplot b/tests/perftest-scripts/perftest-csv2png.gnuplot new file mode 100644 index 0000000000000000000000000000000000000000..9c34ebe403fe7317c963ab8693069025acc79a64 --- /dev/null +++ b/tests/perftest-scripts/perftest-csv2png.gnuplot @@ -0,0 +1,33 @@ +#!/user/bin/gnuplot +reset +set terminal png + +set title "Performance Test Report" font ",20" + +set ylabel "Time in Seconds" + +set xdata time +set timefmt "%Y%m%d" +set format x "%Y-%m-%d" +set xlabel "Date" + +set style data linespoints + +set terminal pngcairo size 1024,768 enhanced font 'Segoe UI, 10' +set output filename . '.png' +set datafile separator ',' + +set key reverse Left outside +set grid + +# plot 'perftest-influx-report.csv' using 1:2 title "InfluxDB Write", \ +# "" using 1:3 title "InfluxDB Query case1", \ +# "" using 1:4 title "InfluxDB Query case2", \ +# "" using 1:5 title "InfluxDB Query case3", \ +# "" using 1:6 title "InfluxDB Query case4" +# +plot filename . '.csv' using 1:2 title "TDengine Write", \ + "" using 1:3 title "TDengine Query case1", \ + "" using 1:4 title "TDengine Query case2", \ + "" using 1:5 title "TDengine Query case3", \ + "" using 1:6 title "TDengine Query case4" diff --git a/tests/perftest-scripts/perftest-daily.sh b/tests/perftest-scripts/perftest-daily.sh new file mode 100755 index 0000000000000000000000000000000000000000..894d9c7905170fdab1d6a7392f2170cf72e39fa3 --- /dev/null +++ b/tests/perftest-scripts/perftest-daily.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +# Coloured Echoes # +function red_echo { echo -e "\033[31m$@\033[0m"; } # +function green_echo { echo -e "\033[32m$@\033[0m"; } # +function yellow_echo { echo -e "\033[33m$@\033[0m"; } # +function white_echo { echo -e "\033[1;37m$@\033[0m"; } # +# Coloured Printfs # +function red_printf { printf "\033[31m$@\033[0m"; } # +function green_printf { printf "\033[32m$@\033[0m"; } # +function yellow_printf { printf "\033[33m$@\033[0m"; } # +function white_printf { printf "\033[1;37m$@\033[0m"; } # +# Debugging Outputs # +function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } # +function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } # +function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } # +function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } # + +function set-Wal { + echo "/etc/taos/taos.cfg walLevel will be set to $1" + sed -i 's/^walLevel.*$/walLevel '"$1"'/g' /etc/taos/taos.cfg +} + +function collectSysInfo { + rm sysinfo.log + grep model /proc/cpuinfo | tail -n1 | tee sysinfo.log + grep cores /proc/cpuinfo | tail -n1 | tee -a sysinfo.log + grep MemTotal /proc/meminfo | tee -a sysinfo.log + grep "^[^#;]" /etc/taos/taos.cfg | tee taos.cfg +} + +function buildTDengine { + cd /root/TDengine + git pull + cd debug + rm -rf * + cmake .. + make > /dev/null + make install +} + +function restartTaosd { + systemctl stop taosd + pkill -KILL -x taosd + sleep 10 + + rm -rf /mnt/var/log/taos/* + rm -rf /mnt/var/lib/taos/* + + taosd 2>&1 > /dev/null & + sleep 10 +} + +function sendReport { + receiver="sdsang@taosdata.com, sangshuduo@gmail.com" + mimebody="MIME-Version: 1.0\nContent-Type: text/html; charset=utf-8\n" + + echo -e "to: ${receiver}\nsubject: Perf test report ${today}\n" | \ + (cat - && uuencode perftest-1d-$today.log perftest-1d-$today.log)| \ + (cat - && uuencode perftest-1d-report.csv perftest-1d-report-$today.csv) | \ + (cat - && uuencode perftest-1d-report.png perftest-1d-report-$today.png) | \ + (cat - && uuencode perftest-13d-$today.log perftest-13d-$today.log)| \ + (cat - && uuencode perftest-13d-report.csv perftest-13d-report-$today.csv) | \ + (cat - && uuencode perftest-13d-report.png perftest-13d-report-$today.png) | \ + (cat - && uuencode taosdemo-$today.log taosdemo-$today.log) | \ + (cat - && uuencode taosdemo-report.csv taosdemo-report-$today.csv) | \ + (cat - && uuencode taosdemo-report.png taosdemo-report-$today.png) | \ + (cat - && uuencode sysinfo.log sysinfo.txt) | \ + (cat - && uuencode taos.cfg taos-cfg-$today.txt) | \ + ssmtp "${receiver}" +} + +today=`date +"%Y%m%d"` +cd /root +echo -e "cron-ran-at-${today}" >> cron.log + +echoInfo "Build TDengine" +buildTDengine + +set-Wal "2" + +cd /root +./perftest-tsdb-compare-1d.sh + +cd /root +./perftest-tsdb-compare-13d.sh + +cd /root +./perftest-taosdemo.sh + +collectSysInfo + +echoInfo "Send Report" +sendReport +echoInfo "End of Test" diff --git a/tests/perftest-scripts/perftest-taosdemo.sh b/tests/perftest-scripts/perftest-taosdemo.sh new file mode 100755 index 0000000000000000000000000000000000000000..511ec22fec47a496b5e79b35c8d0b42d61d0a336 --- /dev/null +++ b/tests/perftest-scripts/perftest-taosdemo.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +# Coloured Echoes # +function red_echo { echo -e "\033[31m$@\033[0m"; } # +function green_echo { echo -e "\033[32m$@\033[0m"; } # +function yellow_echo { echo -e "\033[33m$@\033[0m"; } # +function white_echo { echo -e "\033[1;37m$@\033[0m"; } # +# Coloured Printfs # +function red_printf { printf "\033[31m$@\033[0m"; } # +function green_printf { printf "\033[32m$@\033[0m"; } # +function yellow_printf { printf "\033[33m$@\033[0m"; } # +function white_printf { printf "\033[1;37m$@\033[0m"; } # +# Debugging Outputs # +function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } # +function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } # +function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } # +function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } # + +function restartTaosd { + systemctl stop taosd + pkill -KILL -x taosd + sleep 10 + + rm -rf /mnt/var/log/taos/* + rm -rf /mnt/var/lib/taos/* + + taosd 2>&1 > /dev/null & + sleep 10 +} + +function runCreateTableOnly { + echoInfo "Restart Taosd" + restartTaosd + + /usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo -n 0 2>&1 | tee taosdemo-$today.log" + demoTableOnly=`grep "Total:" totaltime.out|awk '{print $2}'` +} + +function runCreateTableThenInsert { + echoInfo "Restart Taosd" + restartTaosd + + /usr/bin/time -f "Total: %e" -o totaltime.out bash -c "yes | taosdemo 2>&1 | tee -a taosdemo-$today.log" + demoTableAndInsert=`grep "Total:" totaltime.out|awk '{print $2}'` + demoRPS=`grep "records\/second" taosdemo-$today.log | tail -n1 | awk '{print $13}'` +} + +function generateTaosdemoPlot { + echo "${today}, demoTableOnly: ${demoTableOnly}, demoTableAndInsert: ${demoTableAndInsert}" | tee -a taosdemo-$today.log + echo "${today}, ${demoTableOnly}, ${demoTableAndInsert}, ${demoRPS}" >> taosdemo-report.csv + + csvLines=`cat taosdemo-report.csv | wc -l` + + if [ "$csvLines" -gt "10" ]; then + sed -i '1d' taosdemo-report.csv + fi + + gnuplot -p taosdemo-csv2png.gnuplot +} + +today=`date +"%Y%m%d"` + +cd /root +echoInfo "Test Create Table Only " +runCreateTableOnly +echoInfo "Test Create Table then Insert data" +runCreateTableThenInsert +echoInfo "Generate plot for taosdemo" +generateTaosdemoPlot +echoInfo "End of TaosDemo Test" diff --git a/tests/perftest-scripts/perftest-tsdb-compare-13d.sh b/tests/perftest-scripts/perftest-tsdb-compare-13d.sh new file mode 100755 index 0000000000000000000000000000000000000000..4b3ed6818c433c96fdde2513a3e38bb3110dfdbf --- /dev/null +++ b/tests/perftest-scripts/perftest-tsdb-compare-13d.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# Coloured Echoes # +function red_echo { echo -e "\033[31m$@\033[0m"; } # +function green_echo { echo -e "\033[32m$@\033[0m"; } # +function yellow_echo { echo -e "\033[33m$@\033[0m"; } # +function white_echo { echo -e "\033[1;37m$@\033[0m"; } # +# Coloured Printfs # +function red_printf { printf "\033[31m$@\033[0m"; } # +function green_printf { printf "\033[32m$@\033[0m"; } # +function yellow_printf { printf "\033[33m$@\033[0m"; } # +function white_printf { printf "\033[1;37m$@\033[0m"; } # +# Debugging Outputs # +function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } # +function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } # +function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } # +function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } # + +function restartTaosd { + systemctl stop taosd + pkill -KILL -x taosd + sleep 10 + + rm -rf /mnt/var/log/taos/* + rm -rf /mnt/var/lib/taos/* + + taosd 2>&1 > /dev/null & + sleep 10 +} + +function runPerfTest13d { + echoInfo "Restart Taosd" + restartTaosd + + cd /home/taos/tliu/timeseriesdatabase-comparisons/build/tsdbcompare + ./runreal-13d-csv.sh 2>&1 | tee /root/perftest-13d-$today.log +} + +function generatePerfPlot13d { + cd /root + + csvLines=`cat perftest-13d-report.csv | wc -l` + + if [ "$csvLines" -gt "10" ]; then + sed -i '1d' perftest-13d-report.csv + fi + + gnuplot -e "filename='perftest-13d-report'" -p perftest-csv2png.gnuplot +} + +today=`date +"%Y%m%d"` +cd /root + +echoInfo "run Performance Test with 13 days data" +runPerfTest13d +echoInfo "Generate plot of 13 days data" +generatePerfPlot13d +echoInfo "End of TSDB-Compare 13-days-data Test" diff --git a/tests/perftest-scripts/perftest-tsdb-compare-1d.sh b/tests/perftest-scripts/perftest-tsdb-compare-1d.sh new file mode 100755 index 0000000000000000000000000000000000000000..ebe34cde72ce3d6af4ff03ac242c73efecb53475 --- /dev/null +++ b/tests/perftest-scripts/perftest-tsdb-compare-1d.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# Coloured Echoes # +function red_echo { echo -e "\033[31m$@\033[0m"; } # +function green_echo { echo -e "\033[32m$@\033[0m"; } # +function yellow_echo { echo -e "\033[33m$@\033[0m"; } # +function white_echo { echo -e "\033[1;37m$@\033[0m"; } # +# Coloured Printfs # +function red_printf { printf "\033[31m$@\033[0m"; } # +function green_printf { printf "\033[32m$@\033[0m"; } # +function yellow_printf { printf "\033[33m$@\033[0m"; } # +function white_printf { printf "\033[1;37m$@\033[0m"; } # +# Debugging Outputs # +function white_brackets { local args="$@"; white_printf "["; printf "${args}"; white_printf "]"; } # +function echoInfo { local args="$@"; white_brackets $(green_printf "INFO") && echo " ${args}"; } # +function echoWarn { local args="$@"; echo "$(white_brackets "$(yellow_printf "WARN")" && echo " ${args}";)" 1>&2; } # +function echoError { local args="$@"; echo "$(white_brackets "$(red_printf "ERROR")" && echo " ${args}";)" 1>&2; } # + +function restartTaosd { + systemctl stop taosd + pkill -KILL -x taosd + sleep 10 + + rm -rf /mnt/var/log/taos/* + rm -rf /mnt/var/lib/taos/* + + taosd 2>&1 > /dev/null & + sleep 10 +} + +function runPerfTest1d { + echoInfo "Restart Taosd" + restartTaosd + + cd /home/taos/tliu/timeseriesdatabase-comparisons/build/tsdbcompare + ./runreal-1d-csv.sh 2>&1 | tee /root/perftest-1d-$today.log +} + +function generatePerfPlot1d { + cd /root + + csvLines=`cat perftest-1d-report.csv | wc -l` + + if [ "$csvLines" -gt "10" ]; then + sed -i '2d' perftest-1d-report.csv + fi + + gnuplot -e "filename='perftest-1d-report'" -p perftest-csv2png.gnuplot +} + +today=`date +"%Y%m%d"` +cd /root + +echoInfo "run Performance Test with 1 day data" +runPerfTest1d +echoInfo "Generate plot of 1 day data" +generatePerfPlot1d +echoInfo "End of TSDB-Compare 1-day-data Test" diff --git a/tests/perftest-scripts/run-csv.sh b/tests/perftest-scripts/run-csv.sh new file mode 100755 index 0000000000000000000000000000000000000000..afa1d5df5de0c9780d65ff12ae680a323851444f --- /dev/null +++ b/tests/perftest-scripts/run-csv.sh @@ -0,0 +1,209 @@ +#!/bin/bash + + +# Color setting +RED='\033[0;31m' +GREEN='\033[1;32m' +GREEN_DARK='\033[0;32m' +GREEN_UNDERLINE='\033[4;32m' +NC='\033[0m' +docker rm -f `docker ps -a -q` +#set -x +echo +echo "---------------Generating Data-----------------" +echo +echo "Prepare data for InfluxDB...." +#bin/bulk_data_gen -seed 123 -format influx-bulk -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" >data/influx.dat +bin/bulk_data_gen -seed 123 -format influx-bulk -sampling-interval 1s -scale-var 10 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" >data/influx.dat + +echo +echo "Prepare data for TDengine...." +#bin/bulk_data_gen -seed 123 -format tdengine -tdschema-file config/TDengineSchema.toml -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > data/tdengine.dat +bin/bulk_data_gen -seed 123 -format tdengine -sampling-interval 1s -tdschema-file config/TDengineSchema.toml -scale-var 10 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > data/tdengine.dat + + + +docker network create --ip-range 172.15.1.255/24 --subnet 172.15.1.1/16 tsdbcomp >>/dev/null 2>&1 + + +TDENGINE=`docker run -d --net tsdbcomp --ip 172.15.1.6 -p 6030:6030 -p 6020:6020 -p 6031:6031 -p 6032:6032 -p 6033:6033 -p 6034:6034 -p 6035:6035 -p 6036:6036 -p 6037:6037 -p 6038:6038 -p 6039:6039 tdengine/tdengine:1.6.4.5` +echo +echo "------------------Writing Data-----------------" +echo +sleep 5 +echo +echo -e "Start test TDengine, result in ${GREEN}Green line${NC}" + +TDENGINERES=`cat data/tdengine.dat |bin/bulk_load_tdengine --url 172.15.1.6:0 --batch-size 300 -do-load -report-tags n1 -workers 20 -fileout=false| grep loaded` +#TDENGINERES=`cat data/tdengine.dat |gunzip|bin/bulk_load_tdengine --url 172.15.1.6:0 --batch-size 300 -do-load -report-tags n1 -workers 10 -fileout=false| grep loaded` +echo +echo -e "${GREEN}TDengine writing result:${NC}" +echo -e "${GREEN}$TDENGINERES${NC}" +DATA=`echo $TDENGINERES|awk '{print($2)}'` +TMP=`echo $TDENGINERES|awk '{print($5)}'` +TDWTM=`echo ${TMP%s*}` + + +INFLUX=`docker run -d -p 8086:8086 --net tsdbcomp --ip 172.15.1.5 influxdb` >>/dev/null 2>&1 +sleep 10 +echo +echo -e "Start test InfluxDB, result in ${GREEN}Green line${NC}" + + +INFLUXRES=`cat data/influx.dat |bin/bulk_load_influx --batch-size=5000 --workers=20 --urls="http://172.15.1.5:8086" | grep loaded` + + +echo +echo -e "${GREEN}InfluxDB writing result:${NC}" +echo -e "${GREEN}$INFLUXRES${NC}" + +TMP=`echo $INFLUXRES|awk '{print($5)}'` +IFWTM=`echo ${TMP%s*}` + +echo +echo "------------------Querying Data-----------------" +echo + +sleep 10 +echo +echo "start query test, query max from 8 hosts group by 1 hour, TDengine" +echo + +#Test case 1 +#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ; +# a,b,c,d,e,f,g,h are random 8 numbers. +TDQS1=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-all -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://172.15.1.6:6020" -workers 50 -print-interval 0|grep wall` +echo +echo -e "${GREEN}TDengine query test case 1 result:${NC}" +echo -e "${GREEN}$TDQS1${NC}" +TMP=`echo $TDQS1|awk '{print($4)}'` +TDQ1=`echo ${TMP%s*}` + +#Test case 2 +#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h); +# a,b,c,d,e,f,g,h are random 8 numbers +TDQS2=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-allbyhr -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://172.15.1.6:6020" -workers 50 -print-interval 0|grep wall` + +echo +echo -e "${GREEN}TDengine query test case 2 result:${NC}" +echo -e "${GREEN}$TDQS2${NC}" +TMP=`echo $TDQS2|awk '{print($4)}'` +TDQ2=`echo ${TMP%s*}` + +#Test case 3 +#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time x and time x and time x and time >/dev/null 2>&1 +docker container rm -f $INFLUX >>/dev/null 2>&1 +docker stop $TDENGINE >>/dev/null 2>&1 +docker container rm -f $TDENGINE >>/dev/null 2>&1 +docker network rm tsdbcomp >>/dev/null 2>&1 +#bulk_query_gen/bulk_query_gen -format influx-http -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_influxdb/query_benchmarker_influxdb -urls="http://172.26.89.231:8086" +#bulk_query_gen/bulk_query_gen -format tdengine -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_tdengine/query_benchmarker_tdengine -urls="http://172.26.89.231:6020" + + +today=`date +"%Y%m%d"` +echo "${today}, ${IFWTM}, ${IFQ1}, ${IFQ2}, ${IFQ3}, ${IFQ4}" >> /root/perftest-influx-report.csv + diff --git a/tests/perftest-scripts/runInfluxdb-13d-csv.sh b/tests/perftest-scripts/runInfluxdb-13d-csv.sh new file mode 100755 index 0000000000000000000000000000000000000000..2eb36068688e8e180165c487bdeacbba9a9e7b73 --- /dev/null +++ b/tests/perftest-scripts/runInfluxdb-13d-csv.sh @@ -0,0 +1,126 @@ +#!/bin/bash + + +# Color setting +RED='\033[0;31m' +GREEN='\033[1;32m' +GREEN_DARK='\033[0;32m' +GREEN_UNDERLINE='\033[4;32m' +NC='\033[0m' +docker rm -f `docker ps -a -q` +#set -x +echo +echo "---------------Generating Data-----------------" +echo +echo "Prepare data for InfluxDB...." +#bin/bulk_data_gen -seed 123 -format influx-bulk -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" >data/influx.dat +bin/bulk_data_gen -seed 123 -format influx-bulk -sampling-interval 1s -scale-var 10 -use-case devops -timestamp-start "2018-01-02T00:00:00Z" -timestamp-end "2018-01-15T00:00:00Z" > /mnt/data/influx.dat + +docker network create --ip-range 172.15.1.255/24 --subnet 172.15.1.1/16 tsdbcomp >>/dev/null 2>&1 + +INFLUX=`docker run -d -p 8086:8086 --net tsdbcomp --ip 172.15.1.5 influxdb` >>/dev/null 2>&1 +sleep 10 +echo +echo -e "Start test InfluxDB, result in ${GREEN}Green line${NC}" + + +INFLUXRES=`cat /mnt/data/influx.dat |bin/bulk_load_influx --batch-size=5000 --workers=20 --urls="http://172.15.1.5:8086" | grep loaded` + + +echo +echo -e "${GREEN}InfluxDB writing result:${NC}" +echo -e "${GREEN}$INFLUXRES${NC}" +DATA=`echo $INFLUXRES|awk '{print($2)}'` +TMP=`echo $INFLUXRES|awk '{print($5)}'` +IFWTM=`echo ${TMP%s*}` + +echo +echo "------------------Querying Data-----------------" +echo + +sleep 10 + +echo +echo "start query test, query max from 8 hosts group by 1hour, Influxdb" +echo +#Test case 1 +#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ; +# a,b,c,d,e,f,g,h are random 8 numbers. +IFQS1=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-all -scale-var 10 -queries 1000 | bin/query_benchmarker_influxdb -urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall` +echo -e "${GREEN}InfluxDB query test case 1 result:${NC}" +echo -e "${GREEN}$IFQS1${NC}" +TMP=`echo $IFQS1|awk '{print($4)}'` +IFQ1=`echo ${TMP%s*}` +#Test case 2 +#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h); +# a,b,c,d,e,f,g,h are random 8 numbers +IFQS2=`bin/bulk_query_gen -seed 123 -format influx-http -query-type 8-host-allbyhr -scale-var 10 -queries 1000 | bin/query_benchmarker_influxdb -urls="http://172.15.1.5:8086" -workers 50 -print-interval 0|grep wall` +echo -e "${GREEN}InfluxDB query test case 2 result:${NC}" +echo -e "${GREEN}$IFQS2${NC}" +TMP=`echo $IFQS2|awk '{print($4)}'` +IFQ2=`echo ${TMP%s*}` +#Test case 3 +#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time x and time > /root/perftest-influxdb-report-13d.csv + +docker stop $INFLUX >>/dev/null 2>&1 +docker container rm -f $INFLUX >>/dev/null 2>&1 +docker network rm tsdbcomp >>/dev/null 2>&1 +#bulk_query_gen/bulk_query_gen -format influx-http -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_influxdb/query_benchmarker_influxdb -urls="http://172.26.89.231:8086" +#bulk_query_gen/bulk_query_gen -format tdengine -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_tdengine/query_benchmarker_tdengine -urls="http://172.26.89.231:6020" + diff --git a/tests/perftest-scripts/runreal-13d-csv.sh b/tests/perftest-scripts/runreal-13d-csv.sh new file mode 100755 index 0000000000000000000000000000000000000000..ff7ce41d4bd2f85d6911cf5f9d194a487fc4df07 --- /dev/null +++ b/tests/perftest-scripts/runreal-13d-csv.sh @@ -0,0 +1,149 @@ +#!/bin/bash + + +# Color setting +RED='\033[0;31m' +GREEN='\033[1;32m' +GREEN_DARK='\033[0;32m' +GREEN_UNDERLINE='\033[4;32m' +NC='\033[0m' +#set -x +echo +echo "---------------Generating Data-----------------" +echo + +echo +echo "Prepare data for TDengine...." +#bin/bulk_data_gen -seed 123 -format tdengine -tdschema-file config/TDengineSchema.toml -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > data/tdengine.dat +bin/bulk_data_gen -seed 123 -format tdengine -sampling-interval 1s -tdschema-file config/TDengineSchema.toml -scale-var 10 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-14T00:00:00Z" > /mnt/data/tdengine.dat + + +echo +echo -e "Start test TDengine, result in ${GREEN}Green line${NC}" + +for i in {1..5}; do + TDENGINERES=`cat /mnt/data/tdengine.dat |bin/bulk_load_tdengine --url 127.0.0.1:0 --batch-size 5000 -do-load -report-tags n1 -workers 20 -fileout=false| grep loaded` +#TDENGINERES=`cat data/tdengine.dat |gunzip|bin/bulk_load_tdengine --url 127.0.0.1:0 --batch-size 300 -do-load -report-tags n1 -workers 10 -fileout=false| grep loaded` + echo + echo -e "${GREEN}TDengine writing result:${NC}" + echo -e "${GREEN}$TDENGINERES${NC}" + DATA=`echo $TDENGINERES|awk '{print($2)}'` + TMP=`echo $TDENGINERES|awk '{print($5)}'` + TDWTM=`echo ${TMP%s*}` + + [ -z "$TDWTM" ] || break +done + + + +echo +echo "------------------Querying Data-----------------" +echo + +sleep 10 +echo +echo "start query test, query max from 8 hosts group by 1 hour, TDengine" +echo + +#Test case 1 +#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ; +# a,b,c,d,e,f,g,h are random 8 numbers. +for i in {1..5}; do + TDQS1=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-all -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall` + echo + echo -e "${GREEN}TDengine query test case 1 result:${NC}" + echo -e "${GREEN}$TDQS1${NC}" + TMP=`echo $TDQS1|awk '{print($4)}'` + TDQ1=`echo ${TMP%s*}` + + [ -z "$TDQ1" ] || break +done + +#Test case 2 +#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h); +# a,b,c,d,e,f,g,h are random 8 numbers +for i in {1..5}; do + TDQS2=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-allbyhr -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall` + + echo + echo -e "${GREEN}TDengine query test case 2 result:${NC}" + echo -e "${GREEN}$TDQS2${NC}" + TMP=`echo $TDQS2|awk '{print($4)}'` + TDQ2=`echo ${TMP%s*}` + + [ -z "$TDQ2" ] || break +done + +#Test case 3 +#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time x and time > /root/perftest-13d-report.csv + +#bulk_query_gen/bulk_query_gen -format influx-http -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_influxdb/query_benchmarker_influxdb -urls="http://172.26.89.231:8086" +#bulk_query_gen/bulk_query_gen -format tdengine -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_tdengine/query_benchmarker_tdengine -urls="http://172.26.89.231:6020" diff --git a/tests/perftest-scripts/runreal-1d-csv.sh b/tests/perftest-scripts/runreal-1d-csv.sh new file mode 100755 index 0000000000000000000000000000000000000000..5cd113aadf1c05bb67a061b969b842872a163bca --- /dev/null +++ b/tests/perftest-scripts/runreal-1d-csv.sh @@ -0,0 +1,149 @@ +#!/bin/bash + + +# Color setting +RED='\033[0;31m' +GREEN='\033[1;32m' +GREEN_DARK='\033[0;32m' +GREEN_UNDERLINE='\033[4;32m' +NC='\033[0m' +#set -x +echo +echo "---------------Generating Data-----------------" +echo + +echo +echo "Prepare data for TDengine...." +#bin/bulk_data_gen -seed 123 -format tdengine -tdschema-file config/TDengineSchema.toml -scale-var 100 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > data/tdengine.dat +bin/bulk_data_gen -seed 123 -format tdengine -sampling-interval 1s -tdschema-file config/TDengineSchema.toml -scale-var 10 -use-case devops -timestamp-start "2018-01-01T00:00:00Z" -timestamp-end "2018-01-02T00:00:00Z" > data/tdengine.dat + + +echo +echo -e "Start test TDengine, result in ${GREEN}Green line${NC}" + +for i in {1..5}; do + TDENGINERES=`cat data/tdengine.dat |bin/bulk_load_tdengine --url 127.0.0.1:0 --batch-size 300 -do-load -report-tags n1 -workers 20 -fileout=false| grep loaded` +#TDENGINERES=`cat data/tdengine.dat |gunzip|bin/bulk_load_tdengine --url 127.0.0.1:0 --batch-size 300 -do-load -report-tags n1 -workers 10 -fileout=false| grep loaded` + echo + echo -e "${GREEN}TDengine writing result:${NC}" + echo -e "${GREEN}$TDENGINERES${NC}" + DATA=`echo $TDENGINERES|awk '{print($2)}'` + TMP=`echo $TDENGINERES|awk '{print($5)}'` + TDWTM=`echo ${TMP%s*}` + + [ -z "$TDWTM" ] || break +done + + + +echo +echo "------------------Querying Data-----------------" +echo + +sleep 10 +echo +echo "start query test, query max from 8 hosts group by 1 hour, TDengine" +echo + +#Test case 1 +#测试用例1,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') ; +# a,b,c,d,e,f,g,h are random 8 numbers. +for i in {1..5}; do + TDQS1=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-all -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall` + echo + echo -e "${GREEN}TDengine query test case 1 result:${NC}" + echo -e "${GREEN}$TDQS1${NC}" + TMP=`echo $TDQS1|awk '{print($4)}'` + TDQ1=`echo ${TMP%s*}` + + [ -z "$TDQ1" ] || break +done + +#Test case 2 +#测试用例2,查询所有数据中,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以1小时为粒度,查询每1小时的最大值。 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') interval(1h); +# a,b,c,d,e,f,g,h are random 8 numbers +for i in {1..5}; do + TDQS2=`bin/bulk_query_gen -seed 123 -format tdengine -query-type 8-host-allbyhr -scale-var 10 -queries 1000 | bin/query_benchmarker_tdengine -urls="http://127.0.0.1:6020" -workers 50 -print-interval 0|grep wall` + + echo + echo -e "${GREEN}TDengine query test case 2 result:${NC}" + echo -e "${GREEN}$TDQS2${NC}" + TMP=`echo $TDQS2|awk '{print($4)}'` + TDQ2=`echo ${TMP%s*}` + + [ -z "$TDQ2" ] || break +done + +#Test case 3 +#测试用例3,测试用例3,随机查询12个小时的数据,用8个hostname标签进行匹配,匹配出这8个hostname对应的模拟服务器CPU数据中的usage_user这个监控数据,以10分钟为粒度,查询每10分钟的最大值 +#select max(usage_user) from cpu where(hostname='host_a' and hostname='host_b'and hostname='host_c'and hostname='host_d'and hostname='host_e'and hostname='host_f' and hostname='host_g'and hostname='host_h') and time >x and time x and time > /root/perftest-1d-report.csv + +#bulk_query_gen/bulk_query_gen -format influx-http -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_influxdb/query_benchmarker_influxdb -urls="http://172.26.89.231:8086" +#bulk_query_gen/bulk_query_gen -format tdengine -query-type 1-host-1-hr -scale-var 10 -queries 1000 | query_benchmarker_tdengine/query_benchmarker_tdengine -urls="http://172.26.89.231:6020" diff --git a/tests/perftest-scripts/taosdemo-csv2png.gnuplot b/tests/perftest-scripts/taosdemo-csv2png.gnuplot new file mode 100644 index 0000000000000000000000000000000000000000..9fcd4bb3d99cd64d97b3e2aa2257e384e19fc3ab --- /dev/null +++ b/tests/perftest-scripts/taosdemo-csv2png.gnuplot @@ -0,0 +1,26 @@ +#!/user/bin/gnuplot +reset +set terminal png + +set title "TaosDemo Performance Report" font ",20" + +set ylabel "Time in Seconds" + +set xdata time +set timefmt "%Y%m%d" +set format x "%Y-%m-%d" +set xlabel "Date" + +set style data linespoints + +set terminal pngcairo size 1024,768 enhanced font 'Segoe UI, 10' +set output 'taosdemo-report.png' +set datafile separator ',' + +set key reverse Left outside +set grid + + +plot 'taosdemo-report.csv' using 1:2 title "Create 10,000 Table", \ + "" using 1:3 title "Create 10,000 Table and Insert 100,000 data", \ + "" using 1:4 title "Request Per Second of Insert 100,000 data"