Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
taosdata
TDengine
提交
3b1a4c56
T
TDengine
项目概览
taosdata
/
TDengine
1 年多 前同步成功
通知
1185
Star
22016
Fork
4786
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
T
TDengine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
3b1a4c56
编写于
1月 06, 2022
作者:
H
Hui Li
提交者:
GitHub
1月 06, 2022
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #9566 from taosdata/test/td-12517
[TD-12517]<test>(other): add perf scripts for td-12517
上级
709ddd91
9d315f0b
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
137 addition
and
0 deletion
+137
-0
tests/perftest-scripts/HttpPerfCompare.py
tests/perftest-scripts/HttpPerfCompare.py
+137
-0
未找到文件。
tests/perftest-scripts/HttpPerfCompare.py
0 → 100644
浏览文件 @
3b1a4c56
from
loguru
import
logger
import
time
import
os
import
json
class
HttpPerfCompard
:
def
__init__
(
self
):
self
.
hostname
=
"vm85"
self
.
taosc_port
=
6030
self
.
http_port
=
6041
self
.
database
=
"test"
self
.
query_times
=
1
self
.
concurrent
=
1
self
.
column_count
=
10
self
.
tag_count
=
10
self
.
perfMonitorBin
=
'/home/ubuntu/perfMonitor'
self
.
taosBenchmarkBin
=
'/usr/local/bin/taosBenchmark'
self
.
sleep_time
=
20
self
.
current_time
=
time
.
strftime
(
"%Y-%m-%d-%H:%M:%S"
,
time
.
localtime
(
time
.
time
()))
self
.
current_dir
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
self
.
log_file
=
os
.
path
.
join
(
self
.
current_dir
,
f
'./performance.log'
)
logger
.
add
(
self
.
log_file
)
logger
.
info
(
f
'init env success, log will be export to
{
self
.
log_file
}
'
)
self
.
sql_list
=
[
'select last_row(*) from test.stb;'
,
'select * from test.stb limit 100000;'
,
'select count(*) from test.stb interval (1d);'
,
'select avg(c3), max(c4), min(c5) from test.stb interval (1d);'
,
'select count(*) from test.stb where t1 = "shanghai" interval (1h);'
,
'select avg(c3), max(c4), min(c5) from test.stb where t1 = "shanghai" interval (1d);'
,
'select avg(c3), max(c4), min(c5) from test.stb where ts > "2021-01-01 00:00:00" and ts < "2021-01-31 00:00:00" interval (1d);'
'select last(*) from test.stb;'
]
# self.sql_list = ['select * from test.stb limit 100000;']
def
initLog
(
self
):
self
.
exec_local_cmd
(
f
'echo "" >
{
self
.
log_file
}
'
)
def
exec_local_cmd
(
self
,
shell_cmd
):
result
=
os
.
popen
(
shell_cmd
).
read
().
strip
()
return
result
def
genQueryJsonFile
(
self
,
query_sql
):
json_file
=
os
.
path
.
join
(
self
.
current_dir
,
f
'./query.json'
)
jdict
=
{
"filetype"
:
"query"
,
"cfgdir"
:
"/etc/taos"
,
"host"
:
self
.
hostname
,
"port"
:
self
.
taosc_port
,
"user"
:
"root"
,
"password"
:
"taosdata"
,
"confirm_parameter_prompt"
:
"no"
,
"databases"
:
self
.
database
,
"query_times"
:
self
.
query_times
,
"query_mode"
:
"restful"
,
"specified_table_query"
:
{
"concurrent"
:
self
.
concurrent
,
"sqls"
:
[
{
"sql"
:
query_sql
,
"result"
:
"./query_res0.txt"
}
]
}
}
with
open
(
json_file
,
"w"
,
encoding
=
"utf-8"
)
as
f_w
:
f_w
.
write
(
json
.
dumps
(
jdict
))
def
genInsertJsonFile
(
self
,
thread_count
,
table_count
,
row_count
,
batch_size
):
json_file
=
os
.
path
.
join
(
self
.
current_dir
,
f
'./insert.json'
)
jdict
=
{
"filetype"
:
"insert"
,
"cfgdir"
:
"/etc/taos"
,
"host"
:
self
.
hostname
,
"rest_host"
:
self
.
hostname
,
"port"
:
self
.
taosc_port
,
"rest_port"
:
self
.
http_port
,
"user"
:
"root"
,
"password"
:
"taosdata"
,
"thread_count"
:
thread_count
,
"thread_count_create_tbl"
:
1
,
"result_file"
:
self
.
log_file
,
"databases"
:
[{
"dbinfo"
:
{
"name"
:
self
.
database
,
"drop"
:
"yes"
},
"super_tables"
:
[{
"name"
:
"stb"
,
"childtable_count"
:
table_count
,
"childtable_prefix"
:
"stb_"
,
"batch_create_tbl_num"
:
1
,
"insert_mode"
:
"rand"
,
"insert_iface"
:
"rest"
,
"insert_rows"
:
row_count
,
"insert_interval"
:
0
,
"batch_rows"
:
batch_size
,
"max_sql_len"
:
1048576
,
"timestamp_step"
:
3000
,
"start_timestamp"
:
"2021-01-01 00:00:00.000"
,
"tags_file"
:
""
,
"partical_col_num"
:
0
,
"columns"
:
[{
"type"
:
"INT"
,
"count"
:
self
.
column_count
}],
"tags"
:
[{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
self
.
tag_count
}]
}]
}]
}
with
open
(
json_file
,
"w"
,
encoding
=
"utf-8"
)
as
f_w
:
f_w
.
write
(
json
.
dumps
(
jdict
))
def
runTest
(
self
):
self
.
initLog
()
self
.
genInsertJsonFile
(
32
,
100
,
100000
,
1
)
logger
.
info
(
'result of insert_perf with 32 threads and 1 batch_size:'
)
self
.
exec_local_cmd
(
f
'
{
self
.
perfMonitorBin
}
-f insert.json'
)
time
.
sleep
(
self
.
sleep_time
)
self
.
genInsertJsonFile
(
32
,
500
,
1000000
,
1000
)
logger
.
info
(
'result of insert_perf with 32 threads and 1000 batch_size:'
)
self
.
exec_local_cmd
(
f
'
{
self
.
perfMonitorBin
}
-f insert.json'
)
time
.
sleep
(
self
.
sleep_time
)
for
query_sql
in
self
.
sql_list
:
self
.
genQueryJsonFile
(
query_sql
)
self
.
exec_local_cmd
(
f
'
{
self
.
taosBenchmarkBin
}
-f query.json > tmp.log'
)
res
=
self
.
exec_local_cmd
(
'grep -Eo
\'
\<Spent.+s\>
\'
tmp.log |grep -v
\'
total queries
\'
|awk
\'
{sum+=$2}END{print "Average=",sum/NR,"s"}
\'
'
)
logger
.
info
(
query_sql
)
logger
.
info
(
res
)
time
.
sleep
(
self
.
sleep_time
)
if
__name__
==
'__main__'
:
runPerf
=
HttpPerfCompard
()
runPerf
.
runTest
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录