Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
taosdata
TDengine
提交
f16c1135
T
TDengine
项目概览
taosdata
/
TDengine
1 年多 前同步成功
通知
1185
Star
22016
Fork
4786
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
T
TDengine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
f16c1135
编写于
12月 28, 2021
作者:
J
jiajingbin
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[TD-11800]<test>: add scripts for td-11800
上级
e89b7559
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
171 addition
and
0 deletion
+171
-0
tests/perftest-scripts/specifyColsComparison.py
tests/perftest-scripts/specifyColsComparison.py
+171
-0
未找到文件。
tests/perftest-scripts/specifyColsComparison.py
0 → 100644
浏览文件 @
f16c1135
from
loguru
import
logger
import
time
import
os
import
json
import
sys
from
fabric
import
Connection
# apt install -y sudo python3-pip
# pip3 install fabric loguru
class
specifyColsCompared
:
def
__init__
(
self
):
# remote server
self
.
remote_hostname
=
"node2"
self
.
remote_sshport
=
"22"
self
.
remote_username
=
"root"
self
.
remote_password
=
"1"
# TDengine pkg path
self
.
autoDeploy
=
True
self
.
install_package
=
'/root/share/TDengine-server-2.4.0.0-Linux-amd64.tar.gz'
# test element
self
.
update_list
=
[
1
,
2
]
self
.
column_count_list
=
[
100
,
500
,
2000
]
# perfMonitor config
self
.
thread_count
=
32
self
.
taosc_port
=
6030
self
.
http_port
=
6041
self
.
database
=
"test"
self
.
table_count
=
10
self
.
tag_count
=
5
self
.
col_count
=
1000000
self
.
batch_size
=
1
self
.
sleep_time
=
20
self
.
current_time
=
time
.
strftime
(
"%Y-%m-%d-%H:%M:%S"
,
time
.
localtime
(
time
.
time
()))
self
.
current_dir
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
self
.
log_file
=
os
.
path
.
join
(
self
.
current_dir
,
f
'./performance.log'
)
if
self
.
remote_username
==
"root"
:
self
.
remote_dir
=
"/root"
else
:
self
.
remote_dir
=
f
'/home/
{
self
.
remote_username
}
'
self
.
conn
=
Connection
(
self
.
remote_hostname
,
user
=
self
.
remote_username
,
port
=
self
.
remote_sshport
,
connect_timeout
=
120
,
connect_kwargs
=
{
"password"
:
self
.
remote_password
})
logger
.
add
(
self
.
log_file
)
logger
.
info
(
f
'init env success, log will be export to
{
self
.
log_file
}
'
)
def
initLog
(
self
):
# init log
self
.
exec_local_cmd
(
f
'echo "" >
{
self
.
log_file
}
'
)
def
exec_local_cmd
(
self
,
shell_cmd
):
# exec local cmd
try
:
result
=
os
.
popen
(
shell_cmd
).
read
().
strip
()
return
result
except
Exception
as
e
:
logger
.
error
(
f
"exec cmd:
{
shell_cmd
}
failed----
{
e
}
"
)
def
checkStatus
(
self
,
process
):
# check process status
try
:
process_count
=
self
.
conn
.
run
(
f
'ps -ef | grep -w
{
process
}
| grep -v grep | wc -l'
,
pty
=
False
,
warn
=
True
,
hide
=
False
).
stdout
if
int
(
process_count
.
strip
())
>
0
:
logger
.
info
(
f
'check
{
self
.
remote_hostname
}
{
process
}
existed'
)
return
True
else
:
logger
.
info
(
f
'check
{
self
.
remote_hostname
}
{
process
}
not exist'
)
return
False
except
Exception
as
e
:
logger
.
error
(
f
"check status failed----
{
e
}
, please check by manual"
)
def
deployPerfMonitor
(
self
):
# deploy perfMonitor
logger
.
info
(
'deploying perfMonitor'
)
if
os
.
path
.
exists
(
f
'
{
self
.
current_dir
}
/perfMonitor'
):
os
.
remove
(
f
'
{
self
.
current_dir
}
/perfMonitor'
)
self
.
exec_local_cmd
(
f
'wget -P
{
self
.
current_dir
}
http://39.105.163.10:9000/perfMonitor && chmod +x
{
self
.
current_dir
}
/perfMonitor'
)
package_name
=
self
.
install_package
.
split
(
'/'
)[
-
1
]
package_dir
=
'-'
.
join
(
package_name
.
split
(
"-"
,
3
)[
0
:
3
])
self
.
exec_local_cmd
(
f
'tar -xvf
{
self
.
install_package
}
&& cd
{
package_dir
}
&& echo -e "
\n
" | ./install.sh'
)
def
uploadPkg
(
self
):
# upload TDengine pkg
try
:
logger
.
info
(
f
'uploading
{
self
.
install_package
}
to
{
self
.
remote_hostname
}
:
{
self
.
remote_dir
}
'
)
self
.
conn
.
put
(
self
.
install_package
,
self
.
remote_dir
)
except
Exception
as
e
:
logger
.
error
(
f
"pkg send failed----
{
e
}
, please check by manual"
)
def
deployTDengine
(
self
):
# deploy TDengine
try
:
package_name
=
self
.
install_package
.
split
(
'/'
)[
-
1
]
package_dir
=
'-'
.
join
(
package_name
.
split
(
"-"
,
3
)[
0
:
3
])
self
.
uploadPkg
()
self
.
conn
.
run
(
f
'sudo rmtaos'
,
pty
=
False
,
warn
=
True
,
hide
=
False
)
logger
.
info
(
'installing TDengine'
)
logger
.
info
(
self
.
conn
.
run
(
f
'cd
{
self
.
remote_dir
}
&& tar -xvf
{
self
.
remote_dir
}
/
{
package_name
}
&& cd
{
package_dir
}
&& echo -e "
\n
"|./install.sh'
,
pty
=
False
,
warn
=
True
,
hide
=
False
))
logger
.
info
(
'start TDengine'
)
logger
.
info
(
self
.
conn
.
run
(
'sudo systemctl start taosd'
,
pty
=
False
,
warn
=
True
,
hide
=
False
))
for
deploy_elm
in
[
'taosd'
,
'taosadapter'
]:
if
self
.
checkStatus
(
deploy_elm
):
logger
.
success
(
f
'
{
self
.
remote_hostname
}
:
{
deploy_elm
}
deploy success'
)
else
:
logger
.
error
(
f
'
{
self
.
remote_hostname
}
:
{
deploy_elm
}
deploy failed, please check by manual'
)
sys
.
exit
(
1
)
except
Exception
as
e
:
logger
.
error
(
f
"deploy TDengine failed----
{
e
}
, please check by manual"
)
def
genInsertJsonFile
(
self
,
thread_count
,
table_count
,
row_count
,
batch_size
,
column_count
,
partical_col_num
,
update
):
# gen json file
json_file
=
os
.
path
.
join
(
self
.
current_dir
,
f
'./insert.json'
)
jdict
=
{
"filetype"
:
"insert"
,
"cfgdir"
:
"/etc/taos"
,
"host"
:
self
.
remote_hostname
,
"rest_host"
:
self
.
remote_hostname
,
"port"
:
self
.
taosc_port
,
"rest_port"
:
self
.
http_port
,
"user"
:
"root"
,
"password"
:
"taosdata"
,
"thread_count"
:
thread_count
,
"thread_count_create_tbl"
:
1
,
"result_file"
:
self
.
log_file
,
"databases"
:
[{
"dbinfo"
:
{
"name"
:
self
.
database
,
"drop"
:
"yes"
,
"update"
:
update
},
"super_tables"
:
[{
"name"
:
"stb"
,
"childtable_count"
:
table_count
,
"childtable_prefix"
:
"stb_"
,
"batch_create_tbl_num"
:
1
,
"insert_mode"
:
"rand"
,
"insert_iface"
:
"rest"
,
"insert_rows"
:
row_count
,
"insert_interval"
:
0
,
"batch_rows"
:
batch_size
,
"max_sql_len"
:
1048576
,
"timestamp_step"
:
1000
,
"start_timestamp"
:
"2021-01-01 00:00:00.000"
,
"tags_file"
:
""
,
"partical_col_num"
:
partical_col_num
,
"columns"
:
[{
"type"
:
"INT"
,
"count"
:
column_count
}],
"tags"
:
[{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
self
.
tag_count
}]
}]
}]
}
with
open
(
json_file
,
"w"
,
encoding
=
"utf-8"
)
as
f_w
:
f_w
.
write
(
json
.
dumps
(
jdict
))
def
runTest
(
self
):
self
.
initLog
()
self
.
deployPerfMonitor
()
if
self
.
autoDeploy
:
self
.
deployTDengine
()
for
update
in
self
.
update_list
:
for
col_count
in
self
.
column_count_list
:
for
partical_col_num
in
[
int
(
col_count
*
0
),
int
(
col_count
*
0.1
),
int
(
col_count
*
0.3
)]:
logger
.
info
(
f
'update:
{
update
}
|| col_count:
{
col_count
}
|| partical_col_num:
{
partical_col_num
}
test'
)
self
.
genInsertJsonFile
(
self
.
thread_count
,
self
.
table_count
,
self
.
col_count
,
self
.
batch_size
,
col_count
,
partical_col_num
,
update
)
self
.
exec_local_cmd
(
f
'
{
self
.
current_dir
}
/perfMonitor -f insert.json'
)
time
.
sleep
(
self
.
sleep_time
)
if
__name__
==
'__main__'
:
runPerf
=
specifyColsCompared
()
runPerf
.
runTest
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录