Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
taosdata
TDengine
提交
5c9752c9
T
TDengine
项目概览
taosdata
/
TDengine
大约 2 年 前同步成功
通知
1192
Star
22018
Fork
4786
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
T
TDengine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
5c9752c9
编写于
6月 27, 2022
作者:
J
Jason-Jia
提交者:
GitHub
6月 27, 2022
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #14282 from taosdata/cpwu/3.0
test: fix constant: add taos_keywords; add time_range_wise case
上级
88083c96
496c2ff7
变更
7
显示空白变更内容
内联
并排
Showing
7 changed file
with
542 addition
and
107 deletion
+542
-107
tests/pytest/util/constant.py
tests/pytest/util/constant.py
+103
-4
tests/pytest/util/sql.py
tests/pytest/util/sql.py
+37
-2
tests/system-test/1-insert/create_retentions.py
tests/system-test/1-insert/create_retentions.py
+41
-39
tests/system-test/1-insert/time_range_wise.py
tests/system-test/1-insert/time_range_wise.py
+358
-59
tests/system-test/2-query/join.py
tests/system-test/2-query/join.py
+1
-1
tests/system-test/2-query/join2.py
tests/system-test/2-query/join2.py
+1
-1
tests/system-test/2-query/substr.py
tests/system-test/2-query/substr.py
+1
-1
未找到文件。
tests/pytest/util/constant.py
浏览文件 @
5c9752c9
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
# basic type
TAOS_DATA_TYPE
=
[
"INT"
,
"BIGINT"
,
"SMALLINT"
,
"TINYINT"
,
"INT UNSIGNED"
,
"BIGINT UNSIGNED"
,
"SMALLINT UNSIGNED"
,
"TINYINT UNSIGNED"
,
"FLOAT"
,
"DOUBLE"
,
"BOOL"
,
"BINARY"
,
"NCHAR"
,
"VARCHAR"
,
"TIMESTAMP"
,
# "MEDIUMBLOB", "BLOB", # add in 3.x
# "DECIMAL", "NUMERIC", # add in 3.x
"JSON"
,
# only for tag
]
TAOS_NUM_TYPE
=
[
"INT"
,
"BIGINT"
,
"SMALLINT"
,
"TINYINT"
,
"INT UNSIGNED"
,
"BIGINT UNSIGNED"
,
"SMALLINT UNSIGNED"
,
"TINYINT UNSIGNED"
,
"FLOAT"
,
"DOUBLE"
,
# "DECIMAL", "NUMERIC", # add in 3.x
]
TAOS_CHAR_TYPE
=
[
"BINARY"
,
"NCHAR"
,
"VARCHAR"
,
]
TAOS_BOOL_TYPE
=
[
"BOOL"
,]
TAOS_TS_TYPE
=
[
"TIMESTAMP"
,]
TAOS_BIN_TYPE
=
[
"MEDIUMBLOB"
,
"BLOB"
,
# add in 3.x
]
TAOS_TIME_INIT
=
[
"b"
,
"u"
,
"a"
,
"s"
,
"m"
,
"h"
,
"d"
,
"w"
,
"n"
,
"y"
]
TAOS_PRECISION
=
[
"ms"
,
"us"
,
"ns"
]
PRECISION_DEFAULT
=
"ms"
PRECISION
=
PRECISION_DEFAULT
TAOS_KEYWORDS
=
[
"ABORT"
,
"CREATE"
,
"IGNORE"
,
"NULL"
,
"STAR"
,
"ACCOUNT"
,
"CTIME"
,
"IMMEDIATE"
,
"OF"
,
"STATE"
,
"ACCOUNTS"
,
"DATABASE"
,
"IMPORT"
,
"OFFSET"
,
"STATEMENT"
,
"ADD"
,
"DATABASES"
,
"IN"
,
"OR"
,
"STATE_WINDOW"
,
"AFTER"
,
"DAYS"
,
"INITIALLY"
,
"ORDER"
,
"STORAGE"
,
"ALL"
,
"DBS"
,
"INSERT"
,
"PARTITIONS"
,
"STREAM"
,
"ALTER"
,
"DEFERRED"
,
"INSTEAD"
,
"PASS"
,
"STREAMS"
,
"AND"
,
"DELIMITERS"
,
"INT"
,
"PLUS"
,
"STRING"
,
"AS"
,
"DESC"
,
"INTEGER"
,
"PPS"
,
"SYNCDB"
,
"ASC"
,
"DESCRIBE"
,
"INTERVAL"
,
"PRECISION"
,
"TABLE"
,
"ATTACH"
,
"DETACH"
,
"INTO"
,
"PREV"
,
"TABLES"
,
"BEFORE"
,
"DISTINCT"
,
"IS"
,
"PRIVILEGE"
,
"TAG"
,
"BEGIN"
,
"DIVIDE"
,
"ISNULL"
,
"QTIME"
,
"TAGS"
,
"BETWEEN"
,
"DNODE"
,
"JOIN"
,
"QUERIES"
,
"TBNAME"
,
"BIGINT"
,
"DNODES"
,
"KEEP"
,
"QUERY"
,
"TIMES"
,
"BINARY"
,
"DOT"
,
"KEY"
,
"QUORUM"
,
"TIMESTAMP"
,
"BITAND"
,
"DOUBLE"
,
"KILL"
,
"RAISE"
,
"TINYINT"
,
"BITNOT"
,
"DROP"
,
"LE"
,
"REM"
,
"TOPIC"
,
"BITOR"
,
"EACH"
,
"LIKE"
,
"REPLACE"
,
"TOPICS"
,
"BLOCKS"
,
"END"
,
"LIMIT"
,
"REPLICA"
,
"TRIGGER"
,
"BOOL"
,
"EQ"
,
"LINEAR"
,
"RESET"
,
"TSERIES"
,
"BY"
,
"EXISTS"
,
"LOCAL"
,
"RESTRICT"
,
"UMINUS"
,
"CACHE"
,
"EXPLAIN"
,
"LP"
,
"ROW"
,
"UNION"
,
"CACHELAST"
,
"FAIL"
,
"LSHIFT"
,
"RP"
,
"UNSIGNED"
,
"CASCADE"
,
"FILE"
,
"LT"
,
"RSHIFT"
,
"UPDATE"
,
"CHANGE"
,
"FILL"
,
"MATCH"
,
"SCORES"
,
"UPLUS"
,
"CLUSTER"
,
"FLOAT"
,
"MAXROWS"
,
"SELECT"
,
"USE"
,
"COLON"
,
"FOR"
,
"MINROWS"
,
"SEMI"
,
"USER"
,
"COLUMN"
,
"FROM"
,
"MINUS"
,
"SESSION"
,
"USERS"
,
"COMMA"
,
"FSYNC"
,
"MNODES"
,
"SET"
,
"USING"
,
"COMP"
,
"GE"
,
"MODIFY"
,
"SHOW"
,
"VALUES"
,
"COMPACT"
,
"GLOB"
,
"MODULES"
,
"SLASH"
,
"VARIABLE"
,
"CONCAT"
,
"GRANTS"
,
"NCHAR"
,
"SLIDING"
,
"VARIABLES"
,
"CONFLICT"
,
"GROUP"
,
"NE"
,
"SLIMIT"
,
"VGROUPS"
,
"CONNECTION"
,
"GT"
,
"NONE"
,
"SMALLINT"
,
"VIEW"
,
"CONNECTIONS"
,
"HAVING"
,
"NOT"
,
"SOFFSET"
,
"VNODES"
,
"CONNS"
,
"ID"
,
"NOTNULL"
,
"STABLE"
,
"WAL"
,
"COPY"
,
"IF"
,
"NOW"
,
"STABLES"
,
"WHERE"
,
]
# basic data type boundary
# basic data type boundary
TINYINT_MAX
=
127
TINYINT_MAX
=
127
TINYINT_MIN
=
-
128
TINYINT_MIN
=
-
128
...
@@ -11,7 +82,7 @@ SMALLINT_MAX = 32767
...
@@ -11,7 +82,7 @@ SMALLINT_MAX = 32767
SMALLINT_MIN
=
-
32768
SMALLINT_MIN
=
-
32768
SMALLINT_UN_MAX
=
65535
SMALLINT_UN_MAX
=
65535
MALLINT_UN_MIN
=
0
SMALLINT_UN_MIN
=
0
INT_MAX
=
2147483647
INT_MAX
=
2147483647
INT_MIN
=
-
2147483648
INT_MIN
=
-
2147483648
...
@@ -33,8 +104,8 @@ DOUBLE_MIN = -1.7E+308
...
@@ -33,8 +104,8 @@ DOUBLE_MIN = -1.7E+308
# schema boundary
# schema boundary
BINARY_LENGTH_MAX
=
16374
BINARY_LENGTH_MAX
=
16374
NCAHR_LENGTH_MAX
_
=
4093
NCAHR_LENGTH_MAX
=
4093
DBNAME_LENGTH_MAX
_
=
64
DBNAME_LENGTH_MAX
=
64
STBNAME_LENGTH_MAX
=
192
STBNAME_LENGTH_MAX
=
192
STBNAME_LENGTH_MIN
=
1
STBNAME_LENGTH_MIN
=
1
...
@@ -67,3 +138,31 @@ MNODE_SHM_SIZE_DEFAULT = 6292480
...
@@ -67,3 +138,31 @@ MNODE_SHM_SIZE_DEFAULT = 6292480
VNODE_SHM_SIZE_MAX
=
2147483647
VNODE_SHM_SIZE_MAX
=
2147483647
VNODE_SHM_SIZE_MIN
=
6292480
VNODE_SHM_SIZE_MIN
=
6292480
VNODE_SHM_SIZE_DEFAULT
=
31458304
VNODE_SHM_SIZE_DEFAULT
=
31458304
# time_init
TIME_MS
=
1
TIME_US
=
TIME_MS
/
1000
TIME_NS
=
TIME_US
/
1000
TIME_S
=
1000
*
TIME_MS
TIME_M
=
60
*
TIME_S
TIME_H
=
60
*
TIME_M
TIME_D
=
24
*
TIME_H
TIME_W
=
7
*
TIME_D
TIME_N
=
30
*
TIME_D
TIME_Y
=
365
*
TIME_D
# session parameters
INTERVAL_MIN
=
1
*
TIME_MS
if
PRECISION
==
PRECISION_DEFAULT
else
1
*
TIME_US
# streams and related agg-function
SMA_INDEX_FUNCTIONS
=
[
"MIN"
,
"MAX"
]
ROLLUP_FUNCTIONS
=
[
"AVG"
,
"SUM"
,
"MIN"
,
"MAX"
,
"LAST"
,
"FIRST"
]
SMA_WATMARK_MAXDELAY_INIT
=
[
'a'
,
"s"
,
"m"
]
WATERMARK_MAX
=
900000
WATERMARK_MIN
=
0
MAX_DELAY_MAX
=
900000
MAX_DELAY_MIN
=
1
\ No newline at end of file
tests/pytest/util/sql.py
浏览文件 @
5c9752c9
...
@@ -21,6 +21,7 @@ import psutil
...
@@ -21,6 +21,7 @@ import psutil
import
shutil
import
shutil
import
pandas
as
pd
import
pandas
as
pd
from
util.log
import
*
from
util.log
import
*
from
util.constant
import
*
def
_parse_datetime
(
timestr
):
def
_parse_datetime
(
timestr
):
try
:
try
:
...
@@ -117,8 +118,7 @@ class TDSql:
...
@@ -117,8 +118,7 @@ class TDSql:
col_name_list
=
[]
col_name_list
=
[]
col_type_list
=
[]
col_type_list
=
[]
self
.
cursor
.
execute
(
sql
)
self
.
cursor
.
execute
(
sql
)
self
.
queryCols
=
self
.
cursor
.
description
for
query_col
in
self
.
cursor
.
description
:
for
query_col
in
self
.
queryCols
:
col_name_list
.
append
(
query_col
[
0
])
col_name_list
.
append
(
query_col
[
0
])
col_type_list
.
append
(
query_col
[
1
])
col_type_list
.
append
(
query_col
[
1
])
except
Exception
as
e
:
except
Exception
as
e
:
...
@@ -301,6 +301,41 @@ class TDSql:
...
@@ -301,6 +301,41 @@ class TDSql:
args
=
(
caller
.
filename
,
caller
.
lineno
,
self
.
sql
,
elm
,
expect_elm
)
args
=
(
caller
.
filename
,
caller
.
lineno
,
self
.
sql
,
elm
,
expect_elm
)
tdLog
.
exit
(
"%s(%d) failed: sql:%s, elm:%s == expect_elm:%s"
%
args
)
tdLog
.
exit
(
"%s(%d) failed: sql:%s, elm:%s == expect_elm:%s"
%
args
)
def
get_times
(
self
,
time_str
,
precision
=
"ms"
):
caller
=
inspect
.
getframeinfo
(
inspect
.
stack
()[
1
][
0
])
if
time_str
[
-
1
]
not
in
TAOS_TIME_INIT
:
tdLog
.
exit
(
f
"
{
caller
.
filename
}
(
{
caller
.
lineno
}
) failed:
{
time_str
}
not a standard taos time init"
)
if
precision
not
in
TAOS_PRECISION
:
tdLog
.
exit
(
f
"
{
caller
.
filename
}
(
{
caller
.
lineno
}
) failed:
{
precision
}
not a standard taos time precision"
)
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
0
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_NS
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
1
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_US
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
2
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_MS
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
3
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_S
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
4
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_M
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
5
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_H
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
6
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_D
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
7
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_W
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
8
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_N
if
time_str
[
-
1
]
==
TAOS_TIME_INIT
[
9
]:
times
=
int
(
time_str
[:
-
1
])
*
TIME_Y
if
precision
==
"ms"
:
return
int
(
times
)
elif
precision
==
"us"
:
return
int
(
times
*
1000
)
elif
precision
==
"ns"
:
return
int
(
times
*
1000
*
1000
)
def
taosdStatus
(
self
,
state
):
def
taosdStatus
(
self
,
state
):
tdLog
.
sleep
(
5
)
tdLog
.
sleep
(
5
)
pstate
=
0
pstate
=
0
...
...
tests/system-test/1-insert/create_retentions.py
浏览文件 @
5c9752c9
...
@@ -21,9 +21,9 @@ SINT_UN_COL = "c_sint_un"
...
@@ -21,9 +21,9 @@ SINT_UN_COL = "c_sint_un"
BINT_UN_COL
=
"c_bint_un"
BINT_UN_COL
=
"c_bint_un"
INT_UN_COL
=
"c_int_un"
INT_UN_COL
=
"c_int_un"
BINARY_COL
=
"c
8
"
BINARY_COL
=
"c
_binary
"
NCHAR_COL
=
"c
9
"
NCHAR_COL
=
"c
_nchar
"
TS_COL
=
"c
10
"
TS_COL
=
"c
_ts
"
NUM_COL
=
[
INT_COL
,
BINT_COL
,
SINT_COL
,
TINT_COL
,
FLOAT_COL
,
DOUBLE_COL
,
]
NUM_COL
=
[
INT_COL
,
BINT_COL
,
SINT_COL
,
TINT_COL
,
FLOAT_COL
,
DOUBLE_COL
,
]
CHAR_COL
=
[
BINARY_COL
,
NCHAR_COL
,
]
CHAR_COL
=
[
BINARY_COL
,
NCHAR_COL
,
]
...
@@ -51,12 +51,28 @@ class DataSet:
...
@@ -51,12 +51,28 @@ class DataSet:
binary_data
:
List
[
str
]
=
None
binary_data
:
List
[
str
]
=
None
nchar_data
:
List
[
str
]
=
None
nchar_data
:
List
[
str
]
=
None
def
__post_init__
(
self
):
self
.
ts_data
=
[]
self
.
int_data
=
[]
self
.
bint_data
=
[]
self
.
sint_data
=
[]
self
.
tint_data
=
[]
self
.
int_un_data
=
[]
self
.
bint_un_data
=
[]
self
.
sint_un_data
=
[]
self
.
tint_un_data
=
[]
self
.
float_data
=
[]
self
.
double_data
=
[]
self
.
bool_data
=
[]
self
.
binary_data
=
[]
self
.
nchar_data
=
[]
class
TDTestCase
:
class
TDTestCase
:
def
init
(
self
,
conn
,
logSql
):
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdSql
.
init
(
conn
.
cursor
(),
Tru
e
)
tdSql
.
init
(
conn
.
cursor
(),
Fals
e
)
@
property
@
property
def
create_databases_sql_err
(
self
):
def
create_databases_sql_err
(
self
):
...
@@ -87,28 +103,28 @@ class TDTestCase:
...
@@ -87,28 +103,28 @@ class TDTestCase:
@
property
@
property
def
create_stable_sql_err
(
self
):
def
create_stable_sql_err
(
self
):
return
[
return
[
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(ceil) watermark 1s maxdelay 1m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(ceil) watermark 1s max
_
delay 1m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(count) watermark 1min"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(count) watermark 1min"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay -1s"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay -1s"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark -1m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark -1m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) watermark 1m "
,
#
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) watermark 1m ",
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) max
delay 1m "
,
# f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) max_
delay 1m ",
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int,
{
BINARY_COL
}
binary(16)) tags (tag1 int) rollup(avg) watermark 1s"
,
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int,
{
BINARY_COL
}
binary(16)) tags (tag1 int) rollup(avg) watermark 1s"
,
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int,
{
BINARY_COL
}
nchar(16)) tags (tag1 int) rollup(avg) maxdelay 1m"
,
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int,
{
BINARY_COL
}
nchar(16)) tags (tag1 int) rollup(avg) max
_
delay 1m"
,
# f"create table ntb_1 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) rollup(avg) watermark 1s maxdelay 1s",
# f"create table ntb_1 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) rollup(avg) watermark 1s max
_
delay 1s",
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) " ,
# watermark, maxdelay: [0, 900000], [ms, s, m, ?]
# watermark, max
_
delay: [0, 900000], [ms, s, m, ?]
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay 1u"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay 1u"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 1b"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 1b"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 900001ms"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 900001ms"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay 16m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay 16m"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay 901s"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay 901s"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay 1h"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay 1h"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) maxdelay 0.2h"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) max
_
delay 0.2h"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 0.002d"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 0.002d"
,
]
]
...
@@ -117,11 +133,11 @@ class TDTestCase:
...
@@ -117,11 +133,11 @@ class TDTestCase:
def
create_stable_sql_current
(
self
):
def
create_stable_sql_current
(
self
):
return
[
return
[
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(avg)"
,
f
"create stable stb1 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(avg)"
,
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 5s maxdelay 1m"
,
f
"create stable stb2 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(min) watermark 5s max
_
delay 1m"
,
f
"create stable stb3 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(max) watermark 5s maxdelay 1m"
,
f
"create stable stb3 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(max) watermark 5s max
_
delay 1m"
,
f
"create stable stb4 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(sum) watermark 5s maxdelay 1m"
,
f
"create stable stb4 (
{
PRIMARY_COL
}
timestamp,
{
INT_COL
}
int) tags (tag1 int) rollup(sum) watermark 5s max
_
delay 1m"
,
# f"create stable stb5 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(last) watermark 5s maxdelay 1m",
# f"create stable stb5 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(last) watermark 5s max
_
delay 1m",
# f"create stable stb6 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(first) watermark 5s maxdelay 1m",
# f"create stable stb6 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(first) watermark 5s max
_
delay 1m",
]
]
def
test_create_stb
(
self
):
def
test_create_stb
(
self
):
...
@@ -135,7 +151,7 @@ class TDTestCase:
...
@@ -135,7 +151,7 @@ class TDTestCase:
tdSql
.
checkRows
(
len
(
self
.
create_stable_sql_current
))
tdSql
.
checkRows
(
len
(
self
.
create_stable_sql_current
))
# tdSql.execute("use db") # because db is a noraml database, not a rollup database, should not be able to create a rollup database
# tdSql.execute("use db") # because db is a noraml database, not a rollup database, should not be able to create a rollup database
# tdSql.error(f"create stable nor_db_rollup_stb ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int)
file_factor 5.0
")
# tdSql.error(f"create stable nor_db_rollup_stb ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int)
watermark 5s max_delay 1m
")
def
test_create_databases
(
self
):
def
test_create_databases
(
self
):
...
@@ -177,21 +193,6 @@ class TDTestCase:
...
@@ -177,21 +193,6 @@ class TDTestCase:
def
__data_set
(
self
,
rows
):
def
__data_set
(
self
,
rows
):
data_set
=
DataSet
()
data_set
=
DataSet
()
# neg_data_set = DataSet()
data_set
.
ts_data
=
[]
data_set
.
int_data
=
[]
data_set
.
bint_data
=
[]
data_set
.
sint_data
=
[]
data_set
.
tint_data
=
[]
data_set
.
int_un_data
=
[]
data_set
.
bint_un_data
=
[]
data_set
.
sint_un_data
=
[]
data_set
.
tint_un_data
=
[]
data_set
.
float_data
=
[]
data_set
.
double_data
=
[]
data_set
.
bool_data
=
[]
data_set
.
binary_data
=
[]
data_set
.
nchar_data
=
[]
for
i
in
range
(
rows
):
for
i
in
range
(
rows
):
data_set
.
ts_data
.
append
(
NOW
+
1
*
(
rows
-
i
))
data_set
.
ts_data
.
append
(
NOW
+
1
*
(
rows
-
i
))
...
@@ -226,6 +227,7 @@ class TDTestCase:
...
@@ -226,6 +227,7 @@ class TDTestCase:
return
data_set
return
data_set
def
__insert_data
(
self
):
def
__insert_data
(
self
):
tdLog
.
printNoPrefix
(
"==========step: start inser data into tables now....."
)
data
=
self
.
__data_set
(
rows
=
self
.
rows
)
data
=
self
.
__data_set
(
rows
=
self
.
rows
)
# now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
# now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
...
@@ -264,10 +266,10 @@ class TDTestCase:
...
@@ -264,10 +266,10 @@ class TDTestCase:
def
run
(
self
):
def
run
(
self
):
self
.
rows
=
10
self
.
rows
=
10
tdSql
.
prepare
()
tdLog
.
printNoPrefix
(
"==========step0:all check"
)
tdLog
.
printNoPrefix
(
"==========step0:all check"
)
#
self.all_test()
self
.
all_test
()
tdLog
.
printNoPrefix
(
"==========step1:create table in normal database"
)
tdLog
.
printNoPrefix
(
"==========step1:create table in normal database"
)
tdSql
.
prepare
()
tdSql
.
prepare
()
...
...
tests/system-test/1-insert/time_range_wise.py
浏览文件 @
5c9752c9
...
@@ -17,25 +17,34 @@ TINT_COL = "c_tint"
...
@@ -17,25 +17,34 @@ TINT_COL = "c_tint"
FLOAT_COL
=
"c_float"
FLOAT_COL
=
"c_float"
DOUBLE_COL
=
"c_double"
DOUBLE_COL
=
"c_double"
BOOL_COL
=
"c_bool"
BOOL_COL
=
"c_bool"
TINT_UN_COL
=
"c_
tint_un
"
TINT_UN_COL
=
"c_
utint
"
SINT_UN_COL
=
"c_
sint_un
"
SINT_UN_COL
=
"c_
usint
"
BINT_UN_COL
=
"c_
bint_un
"
BINT_UN_COL
=
"c_
ubint
"
INT_UN_COL
=
"c_
int_un
"
INT_UN_COL
=
"c_
uint
"
BINARY_COL
=
"c_binary"
BINARY_COL
=
"c_binary"
NCHAR_COL
=
"c_nchar"
NCHAR_COL
=
"c_nchar"
TS_COL
=
"c_ts"
TS_COL
=
"c_ts"
NUM_COL
=
[
INT_COL
,
BINT_COL
,
SINT_COL
,
TINT_COL
,
FLOAT_COL
,
DOUBLE_COL
,
]
NUM_COL
=
[
INT_COL
,
BINT_COL
,
SINT_COL
,
TINT_COL
,
FLOAT_COL
,
DOUBLE_COL
,
]
CHAR_COL
=
[
BINARY_COL
,
NCHAR_COL
,
]
CHAR_COL
=
[
BINARY_COL
,
NCHAR_COL
,
]
BOOLEAN_COL
=
[
BOOL_COL
,
]
BOOLEAN_COL
=
[
BOOL_COL
,
]
TS_TYPE_COL
=
[
TS_COL
,
]
TS_TYPE_COL
=
[
TS_COL
,
]
INT_TAG
=
"t_int"
ALL_COL
=
[
PRIMARY_COL
,
INT_COL
,
BINT_COL
,
SINT_COL
,
TINT_COL
,
FLOAT_COL
,
DOUBLE_COL
,
BINARY_COL
,
NCHAR_COL
,
BOOL_COL
,
TS_COL
]
TAG_COL
=
[
INT_TAG
]
# insert data args:
# insert data args:
TIME_STEP
=
10000
TIME_STEP
=
10000
NOW
=
int
(
datetime
.
datetime
.
timestamp
(
datetime
.
datetime
.
now
())
*
1000
)
NOW
=
int
(
datetime
.
datetime
.
timestamp
(
datetime
.
datetime
.
now
())
*
1000
)
# init db/table
DBNAME
=
"db"
STBNAME
=
"stb1"
CTBNAME
=
"ct1"
NTBNAME
=
"nt1"
@
dataclass
@
dataclass
class
DataSet
:
class
DataSet
:
...
@@ -78,14 +87,20 @@ class SMAschema:
...
@@ -78,14 +87,20 @@ class SMAschema:
index_flag
:
str
=
"SMA INDEX"
index_flag
:
str
=
"SMA INDEX"
operator
:
str
=
"ON"
operator
:
str
=
"ON"
tbname
:
str
=
None
tbname
:
str
=
None
watermark
:
str
=
None
watermark
:
str
=
"5s"
max
delay
:
str
=
None
max
_delay
:
str
=
"6m"
func
:
Tuple
[
str
]
=
None
func
:
Tuple
[
str
]
=
None
interval
:
Tuple
[
str
]
=
None
interval
:
Tuple
[
str
]
=
(
"6m"
,
"10s"
)
sliding
:
str
=
None
sliding
:
str
=
"6m"
other
:
Any
=
None
other
:
Any
=
None
drop
:
str
=
"DROP"
drop
:
str
=
"DROP"
drop_flag
:
str
=
"INDEX"
drop_flag
:
str
=
"INDEX"
querySmaOptimize
:
int
=
1
show
:
str
=
"SHOW"
show_msg
:
str
=
"INDEXES"
show_oper
:
str
=
"FROM"
dbname
:
str
=
None
rollup_db
:
bool
=
False
def
__post_init__
(
self
):
def
__post_init__
(
self
):
if
isinstance
(
self
.
other
,
dict
):
if
isinstance
(
self
.
other
,
dict
):
...
@@ -111,8 +126,8 @@ class SMAschema:
...
@@ -111,8 +126,8 @@ class SMAschema:
self
.
watermark
=
v
self
.
watermark
=
v
del
self
.
other
[
k
]
del
self
.
other
[
k
]
if
k
.
lower
()
==
"max
delay"
and
isinstance
(
v
,
str
)
and
not
self
.
max
delay
:
if
k
.
lower
()
==
"max
_delay"
and
isinstance
(
v
,
str
)
and
not
self
.
max_
delay
:
self
.
maxdelay
=
v
self
.
max
_
delay
=
v
del
self
.
other
[
k
]
del
self
.
other
[
k
]
if
k
.
lower
()
==
"functions"
and
isinstance
(
v
,
tuple
)
and
not
self
.
func
:
if
k
.
lower
()
==
"functions"
and
isinstance
(
v
,
tuple
)
and
not
self
.
func
:
...
@@ -131,12 +146,36 @@ class SMAschema:
...
@@ -131,12 +146,36 @@ class SMAschema:
self
.
drop_flag
=
v
self
.
drop_flag
=
v
del
self
.
other
[
k
]
del
self
.
other
[
k
]
if
k
.
lower
()
==
"show_msg"
and
isinstance
(
v
,
str
)
and
not
self
.
show_msg
:
self
.
show_msg
=
v
del
self
.
other
[
k
]
if
k
.
lower
()
==
"dbname"
and
isinstance
(
v
,
str
)
and
not
self
.
dbname
:
self
.
dbname
=
v
del
self
.
other
[
k
]
if
k
.
lower
()
==
"show_oper"
and
isinstance
(
v
,
str
)
and
not
self
.
show_oper
:
self
.
show_oper
=
v
del
self
.
other
[
k
]
if
k
.
lower
()
==
"rollup_db"
and
isinstance
(
v
,
bool
)
and
not
self
.
rollup_db
:
self
.
rollup_db
=
v
del
self
.
other
[
k
]
# from ...pytest.util.sql import *
# from ...pytest.util.constant import *
class
TDTestCase
:
class
TDTestCase
:
updatecfgDict
=
{
"querySmaOptimize"
:
1
}
def
init
(
self
,
conn
,
logSql
):
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdSql
.
init
(
conn
.
cursor
(),
False
)
tdSql
.
init
(
conn
.
cursor
(),
False
)
self
.
precision
=
"ms"
self
.
sma_count
=
0
self
.
sma_created_index
=
[]
"""
"""
create sma index :
create sma index :
...
@@ -155,13 +194,17 @@ class TDTestCase:
...
@@ -155,13 +194,17 @@ class TDTestCase:
if
sma
.
func
:
if
sma
.
func
:
sql
+=
f
" function(
{
', '
.
join
(
sma
.
func
)
}
)"
sql
+=
f
" function(
{
', '
.
join
(
sma
.
func
)
}
)"
if
sma
.
interval
:
if
sma
.
interval
:
sql
+=
f
" interval(
{
', '
.
join
(
sma
.
interval
)
}
)"
interval
,
offset
=
self
.
__get_interval_offset
(
sma
.
interval
)
if
offset
:
sql
+=
f
" interval(
{
interval
}
,
{
offset
}
)"
else
:
sql
+=
f
" interval(
{
interval
}
)"
if
sma
.
sliding
:
if
sma
.
sliding
:
sql
+=
f
" sliding(
{
sma
.
sliding
}
)"
sql
+=
f
" sliding(
{
sma
.
sliding
}
)"
if
sma
.
watermark
:
if
sma
.
watermark
:
sql
+=
f
" watermark
{
sma
.
watermark
}
"
sql
+=
f
" watermark
{
sma
.
watermark
}
"
if
sma
.
maxdelay
:
if
sma
.
max
_
delay
:
sql
+=
f
" max
delay
{
sma
.
max
delay
}
"
sql
+=
f
" max
_delay
{
sma
.
max_
delay
}
"
if
isinstance
(
sma
.
other
,
dict
):
if
isinstance
(
sma
.
other
,
dict
):
for
k
,
v
in
sma
.
other
.
items
():
for
k
,
v
in
sma
.
other
.
items
():
if
isinstance
(
v
,
tuple
)
or
isinstance
(
v
,
list
):
if
isinstance
(
v
,
tuple
)
or
isinstance
(
v
,
list
):
...
@@ -171,53 +214,305 @@ class TDTestCase:
...
@@ -171,53 +214,305 @@ class TDTestCase:
if
isinstance
(
sma
.
other
,
tuple
)
or
isinstance
(
sma
.
other
,
list
):
if
isinstance
(
sma
.
other
,
tuple
)
or
isinstance
(
sma
.
other
,
list
):
sql
+=
" "
.
join
(
sma
.
other
)
sql
+=
" "
.
join
(
sma
.
other
)
if
isinstance
(
sma
.
other
,
int
)
or
isinstance
(
sma
.
other
,
float
)
or
isinstance
(
sma
.
other
,
str
):
if
isinstance
(
sma
.
other
,
int
)
or
isinstance
(
sma
.
other
,
float
)
or
isinstance
(
sma
.
other
,
str
):
sql
+=
sma
.
other
sql
+=
f
"
{
sma
.
other
}
"
return
sql
return
sql
def
sma_create_check
(
self
,
sma
:
SMAschema
):
def
__get_sma_func_col
(
self
,
func
):
cols
=
[]
if
isinstance
(
func
,
str
):
cols
.
append
(
func
.
split
(
"("
)[
-
1
].
split
(
")"
)[
0
]
)
elif
isinstance
(
func
,
tuple
)
or
isinstance
(
func
,
list
):
for
func_col
in
func
:
cols
.
append
(
func_col
.
split
(
"("
)[
-
1
].
split
(
")"
)[
0
])
else
:
cols
=
[]
return
cols
def
__check_sma_func
(
self
,
func
:
tuple
):
if
not
isinstance
(
func
,
str
)
and
not
isinstance
(
func
,
tuple
)
and
not
isinstance
(
func
,
list
):
return
False
if
isinstance
(
func
,
str
)
:
if
"("
not
in
func
or
")"
not
in
func
:
return
False
if
func
.
split
(
"("
)[
0
].
upper
()
not
in
SMA_INDEX_FUNCTIONS
:
return
False
if
func
.
split
(
"("
)[
1
].
split
(
")"
)[
0
]
not
in
ALL_COL
and
func
.
split
(
"("
)[
1
].
split
(
")"
)[
0
]
not
in
TAG_COL
:
return
False
if
isinstance
(
func
,
tuple
)
or
isinstance
(
func
,
list
):
for
arg
in
func
:
if
not
isinstance
(
arg
,
str
):
return
False
if
"("
not
in
arg
or
")"
not
in
arg
:
return
False
if
arg
.
split
(
"("
)[
0
].
upper
()
not
in
SMA_INDEX_FUNCTIONS
:
return
False
if
arg
.
split
(
"("
)[
1
].
split
(
")"
)[
0
]
not
in
ALL_COL
and
arg
.
split
(
"("
)[
1
].
split
(
")"
)[
0
]
not
in
TAG_COL
:
return
False
return
True
def
__check_sma_watermark
(
self
,
arg
):
if
not
arg
:
return
False
if
not
isinstance
(
arg
,
str
):
return
False
if
arg
[
-
1
]
not
in
SMA_WATMARK_MAXDELAY_INIT
:
return
False
if
len
(
arg
)
==
1
:
return
False
if
not
arg
[:
-
1
].
isdecimal
():
return
False
if
tdSql
.
get_times
(
arg
)
>
WATERMARK_MAX
:
return
False
if
tdSql
.
get_times
(
arg
)
<
WATERMARK_MIN
:
return
False
return
True
def
__check_sma_max_delay
(
self
,
arg
):
if
not
self
.
__check_sma_watermark
(
arg
):
return
False
if
tdSql
.
get_times
(
arg
)
<
MAX_DELAY_MIN
:
return
False
return
True
def
__check_sma_sliding
(
self
,
arg
):
if
not
isinstance
(
arg
,
str
):
return
False
if
arg
[
-
1
]
not
in
TAOS_TIME_INIT
:
return
False
if
len
(
arg
)
==
1
:
return
False
if
not
arg
[:
-
1
].
isdecimal
():
return
False
return
True
def
__get_interval_offset
(
self
,
args
):
if
isinstance
(
args
,
str
):
interval
,
offset
=
args
,
None
elif
isinstance
(
args
,
tuple
)
or
isinstance
(
args
,
list
):
if
len
(
args
)
==
1
:
interval
,
offset
=
args
[
0
],
None
elif
len
(
args
)
==
2
:
interval
,
offset
=
args
else
:
interval
,
offset
=
False
,
False
else
:
interval
,
offset
=
False
,
False
return
interval
,
offset
def
__check_sma_interval
(
self
,
args
):
if
not
isinstance
(
args
,
tuple
)
and
not
isinstance
(
args
,
str
):
return
False
interval
,
offset
=
self
.
__get_interval_offset
(
args
)
if
not
interval
:
return
False
if
not
self
.
__check_sma_sliding
(
interval
):
return
False
if
tdSql
.
get_times
(
interval
)
<
INTERVAL_MIN
:
return
False
if
offset
:
if
not
self
.
__check_sma_sliding
(
offset
):
return
False
if
tdSql
.
get_times
(
interval
)
<=
tdSql
.
get_times
(
offset
)
:
return
False
return
True
def
__sma_create_check
(
self
,
sma
:
SMAschema
):
if
self
.
updatecfgDict
[
"querySmaOptimize"
]
==
0
:
return
False
# # TODO: if database is a rollup-db, can not create sma index
# tdSql.query("select database()")
# if sma.rollup_db :
# return False
tdSql
.
query
(
"show stables"
)
tdSql
.
query
(
"show stables"
)
if
not
sma
.
tbname
:
return
False
stb_in_list
=
False
stb_in_list
=
False
for
row
in
tdSql
.
queryResult
:
for
row
in
tdSql
.
queryResult
:
if
sma
.
tbname
==
row
[
0
]:
if
sma
.
tbname
==
row
[
0
]:
stb_in_list
=
True
stb_in_list
=
True
break
if
not
stb_in_list
:
if
not
stb_in_list
:
return
False
if
not
sma
.
creation
or
not
isinstance
(
sma
.
creation
,
str
)
or
sma
.
creation
.
upper
()
!=
"CREATE"
:
return
False
if
not
sma
.
index_flag
or
not
isinstance
(
sma
.
index_flag
,
str
)
or
sma
.
index_flag
.
upper
()
!=
"SMA INDEX"
:
return
False
if
not
sma
.
index_name
or
not
isinstance
(
sma
.
index_name
,
str
)
or
sma
.
index_name
.
upper
()
in
TAOS_KEYWORDS
:
return
False
if
not
sma
.
operator
or
not
isinstance
(
sma
.
operator
,
str
)
or
sma
.
operator
.
upper
()
!=
"ON"
:
return
False
if
not
sma
.
func
or
not
self
.
__check_sma_func
(
sma
.
func
):
return
False
tdSql
.
query
(
f
"desc
{
sma
.
tbname
}
"
)
_col_list
=
[]
for
col_row
in
tdSql
.
queryResult
:
_col_list
.
append
(
col_row
[
0
])
_sma_func_cols
=
self
.
__get_sma_func_col
(
sma
.
func
)
for
_sma_func_col
in
_sma_func_cols
:
if
_sma_func_col
not
in
_col_list
:
return
False
if
sma
.
sliding
and
not
self
.
__check_sma_sliding
(
sma
.
sliding
):
return
False
interval
,
_
=
self
.
__get_interval_offset
(
sma
.
interval
)
if
not
sma
.
interval
or
not
self
.
__check_sma_interval
(
sma
.
interval
)
:
return
False
if
sma
.
sliding
and
tdSql
.
get_times
(
interval
)
<
tdSql
.
get_times
(
sma
.
sliding
):
return
False
if
sma
.
watermark
and
not
self
.
__check_sma_watermark
(
sma
.
watermark
):
return
False
if
sma
.
max_delay
and
not
self
.
__check_sma_max_delay
(
sma
.
max_delay
):
return
False
if
sma
.
other
:
return
False
return
True
def
sma_create_check
(
self
,
sma
:
SMAschema
):
if
self
.
__sma_create_check
(
sma
):
tdSql
.
query
(
self
.
__create_sma_index
(
sma
))
self
.
sma_count
+=
1
self
.
sma_created_index
.
append
(
sma
.
index_name
)
tdSql
.
query
(
"show streams"
)
tdSql
.
checkRows
(
self
.
sma_count
)
else
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
if
not
sma
.
creation
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
def
__drop_sma_index
(
self
,
sma
:
SMAschema
):
if
not
sma
.
index_flag
:
sql
=
f
"
{
sma
.
drop
}
{
sma
.
drop_flag
}
{
sma
.
index_name
}
"
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
return
sql
def
__sma_drop_check
(
self
,
sma
:
SMAschema
):
if
not
sma
.
drop
:
return
False
if
not
sma
.
drop_flag
:
return
False
if
not
sma
.
index_name
:
if
not
sma
.
index_name
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
return
False
if
not
sma
.
operator
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
return
True
def
sma_drop_check
(
self
,
sma
:
SMAschema
):
if
self
.
__sma_drop_check
(
sma
):
tdSql
.
query
(
self
.
__drop_sma_index
(
sma
))
print
(
self
.
__drop_sma_index
(
sma
))
self
.
sma_count
-=
1
self
.
sma_created_index
=
list
(
filter
(
lambda
x
:
x
!=
sma
.
index_name
,
self
.
sma_created_index
))
tdSql
.
query
(
"show streams"
)
tdSql
.
checkRows
(
self
.
sma_count
)
else
:
tdSql
.
error
(
self
.
__drop_sma_index
(
sma
))
def
__show_sma_index
(
self
,
sma
:
SMAschema
):
sql
=
f
"
{
sma
.
show
}
{
sma
.
show_msg
}
{
sma
.
show_oper
}
{
sma
.
tbname
}
"
return
sql
def
__sma_show_check
(
self
,
sma
:
SMAschema
):
if
not
sma
.
show
:
return
False
if
not
sma
.
show_msg
:
return
False
if
not
sma
.
show_oper
:
return
False
if
not
sma
.
tbname
:
if
not
sma
.
tbname
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
return
False
if
not
sma
.
func
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
if
not
sma
.
interval
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
if
not
sma
.
sliding
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
if
sma
.
other
:
tdSql
.
error
(
self
.
__create_sma_index
(
sma
))
return
True
def
sma_show_check
(
self
,
sma
:
SMAschema
):
if
self
.
__sma_show_check
(
sma
):
tdSql
.
query
(
self
.
__show_sma_index
(
sma
))
tdSql
.
checkRows
(
self
.
sma_count
)
else
:
tdSql
.
error
(
self
.
__show_sma_index
(
sma
))
@
property
def
__create_sma_sql
(
self
):
err_sqls
=
[]
cur_sqls
=
[]
# err_set
# # case 1: required fields check
err_sqls
.
append
(
SMAschema
(
creation
=
""
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
index_name
=
""
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
index_flag
=
""
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
operator
=
""
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
tbname
=
""
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
func
=
(
""
,),
tbname
=
STBNAME
)
)
err_sqls
.
append
(
SMAschema
(
interval
=
(
""
),
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
# # case 2: err fields
err_sqls
.
append
(
SMAschema
(
creation
=
"show"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
creation
=
"alter"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
creation
=
"select"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
index_flag
=
"SMA INDEXES"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
index_flag
=
"SMA INDEX ,"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
err_sqls
.
append
(
SMAschema
(
index_name
=
"tbname"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
# current_set
cur_sqls
.
append
(
SMAschema
(
max_delay
=
""
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
cur_sqls
.
append
(
SMAschema
(
watermark
=
""
,
index_name
=
"sma_index_2"
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
cur_sqls
.
append
(
SMAschema
(
sliding
=
""
,
index_name
=
'sma_index_3'
,
tbname
=
STBNAME
,
func
=
(
f
"min(
{
INT_COL
}
)"
,
f
"max(
{
INT_COL
}
)"
)
)
)
return
err_sqls
,
cur_sqls
def
test_create_sma
(
self
):
err_sqls
,
cur_sqls
=
self
.
__create_sma_sql
for
err_sql
in
err_sqls
:
self
.
sma_create_check
(
err_sql
)
for
cur_sql
in
cur_sqls
:
self
.
sma_create_check
(
cur_sql
)
@
property
def
__drop_sma_sql
(
self
):
err_sqls
=
[]
cur_sqls
=
[]
# err_set
## case 1: required fields check
err_sqls
.
append
(
SMAschema
(
drop
=
""
)
)
err_sqls
.
append
(
SMAschema
(
drop_flag
=
""
)
)
err_sqls
.
append
(
SMAschema
(
index_name
=
""
)
)
for
index
in
self
.
sma_created_index
:
cur_sqls
.
append
(
SMAschema
(
index_name
=
index
))
return
err_sqls
,
cur_sqls
def
test_drop_sma
(
self
):
err_sqls
,
cur_sqls
=
self
.
__drop_sma_sql
for
err_sql
in
err_sqls
:
self
.
sma_drop_check
(
err_sql
)
# for cur_sql in cur_sqls:
# self.sma_drop_check(cur_sql)
def
all_test
(
self
):
def
all_test
(
self
):
self
.
test_create_sma
()
self
.
test_drop_sma
()
pass
pass
def
__create_tb
(
self
):
def
__create_tb
(
self
):
tdLog
.
printNoPrefix
(
"==========step: create table"
)
tdLog
.
printNoPrefix
(
"==========step: create table"
)
create_stb_sql
=
f
'''create table
stb1
(
create_stb_sql
=
f
'''create table
{
STBNAME
}
(
ts timestamp,
{
INT_COL
}
int,
{
BINT_COL
}
bigint,
{
SINT_COL
}
smallint,
{
TINT_COL
}
tinyint,
ts timestamp,
{
INT_COL
}
int,
{
BINT_COL
}
bigint,
{
SINT_COL
}
smallint,
{
TINT_COL
}
tinyint,
{
FLOAT_COL
}
float,
{
DOUBLE_COL
}
double,
{
BOOL_COL
}
bool,
{
FLOAT_COL
}
float,
{
DOUBLE_COL
}
double,
{
BOOL_COL
}
bool,
{
BINARY_COL
}
binary(16),
{
NCHAR_COL
}
nchar(32),
{
TS_COL
}
timestamp,
{
BINARY_COL
}
binary(16),
{
NCHAR_COL
}
nchar(32),
{
TS_COL
}
timestamp,
{
TINT_UN_COL
}
tinyint unsigned,
{
SINT_UN_COL
}
smallint unsigned,
{
TINT_UN_COL
}
tinyint unsigned,
{
SINT_UN_COL
}
smallint unsigned,
{
INT_UN_COL
}
int unsigned,
{
BINT_UN_COL
}
bigint unsigned
{
INT_UN_COL
}
int unsigned,
{
BINT_UN_COL
}
bigint unsigned
) tags (
tag1
int)
) tags (
{
INT_TAG
}
int)
'''
'''
create_ntb_sql
=
f
'''create table
t1
(
create_ntb_sql
=
f
'''create table
{
NTBNAME
}
(
ts timestamp,
{
INT_COL
}
int,
{
BINT_COL
}
bigint,
{
SINT_COL
}
smallint,
{
TINT_COL
}
tinyint,
ts timestamp,
{
INT_COL
}
int,
{
BINT_COL
}
bigint,
{
SINT_COL
}
smallint,
{
TINT_COL
}
tinyint,
{
FLOAT_COL
}
float,
{
DOUBLE_COL
}
double,
{
BOOL_COL
}
bool,
{
FLOAT_COL
}
float,
{
DOUBLE_COL
}
double,
{
BOOL_COL
}
bool,
{
BINARY_COL
}
binary(16),
{
NCHAR_COL
}
nchar(32),
{
TS_COL
}
timestamp,
{
BINARY_COL
}
binary(16),
{
NCHAR_COL
}
nchar(32),
{
TS_COL
}
timestamp,
...
@@ -253,6 +548,7 @@ class TDTestCase:
...
@@ -253,6 +548,7 @@ class TDTestCase:
return
data_set
return
data_set
def
__insert_data
(
self
):
def
__insert_data
(
self
):
tdLog
.
printNoPrefix
(
"==========step: start inser data into tables now....."
)
data
=
self
.
__data_set
(
rows
=
self
.
rows
)
data
=
self
.
__data_set
(
rows
=
self
.
rows
)
# now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
# now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
...
@@ -278,7 +574,7 @@ class TDTestCase:
...
@@ -278,7 +574,7 @@ class TDTestCase:
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into ct4 values (
{
NOW
-
i
*
int
(
TIME_STEP
*
0.8
)
}
,
{
row_data
}
)"
)
f
"insert into ct4 values (
{
NOW
-
i
*
int
(
TIME_STEP
*
0.8
)
}
,
{
row_data
}
)"
)
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into
t1
values (
{
NOW
-
i
*
int
(
TIME_STEP
*
1.2
)
}
,
{
row_data
}
)"
)
f
"insert into
{
NTBNAME
}
values (
{
NOW
-
i
*
int
(
TIME_STEP
*
1.2
)
}
,
{
row_data
}
)"
)
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into ct2 values (
{
NOW
+
int
(
TIME_STEP
*
0.6
)
}
,
{
null_data
}
)"
)
f
"insert into ct2 values (
{
NOW
+
int
(
TIME_STEP
*
0.6
)
}
,
{
null_data
}
)"
)
...
@@ -295,28 +591,31 @@ class TDTestCase:
...
@@ -295,28 +591,31 @@ class TDTestCase:
f
"insert into ct4 values (
{
NOW
-
self
.
rows
*
int
(
TIME_STEP
*
0.39
)
}
,
{
null_data
}
)"
)
f
"insert into ct4 values (
{
NOW
-
self
.
rows
*
int
(
TIME_STEP
*
0.39
)
}
,
{
null_data
}
)"
)
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into
t1
values (
{
NOW
+
int
(
TIME_STEP
*
1.2
)
}
,
{
null_data
}
)"
)
f
"insert into
{
NTBNAME
}
values (
{
NOW
+
int
(
TIME_STEP
*
1.2
)
}
,
{
null_data
}
)"
)
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into
t1
values (
{
NOW
-
(
self
.
rows
+
1
)
*
int
(
TIME_STEP
*
1.2
)
}
,
{
null_data
}
)"
)
f
"insert into
{
NTBNAME
}
values (
{
NOW
-
(
self
.
rows
+
1
)
*
int
(
TIME_STEP
*
1.2
)
}
,
{
null_data
}
)"
)
tdSql
.
execute
(
tdSql
.
execute
(
f
"insert into
t1
values (
{
NOW
-
self
.
rows
*
int
(
TIME_STEP
*
0.59
)
}
,
{
null_data
}
)"
)
f
"insert into
{
NTBNAME
}
values (
{
NOW
-
self
.
rows
*
int
(
TIME_STEP
*
0.59
)
}
,
{
null_data
}
)"
)
def
run
(
self
):
def
run
(
self
):
sma1
=
SMAschema
(
func
=
(
"min(c1)"
,
"max(c2)"
))
sql1
=
self
.
__create_sma_index
(
sma1
)
print
(
"================"
)
print
(
sql1
)
# a = DataSet()
# return
self
.
rows
=
10
self
.
rows
=
10
tdLog
.
printNoPrefix
(
"==========step0:all check"
)
tdLog
.
printNoPrefix
(
"==========step0:all check"
)
# self.all_test()
tdLog
.
printNoPrefix
(
"==========step1:create table in normal database"
)
tdLog
.
printNoPrefix
(
"==========step1:create table in normal database"
)
tdSql
.
prepare
()
tdSql
.
prepare
()
self
.
__create_tb
()
self
.
__create_tb
()
self
.
__insert_data
()
# self.__insert_data()
self
.
all_test
()
# drop databases, create same name db、stb and sma index
# tdSql.prepare()
# self.__create_tb()
# self.__insert_data()
# self.all_test()
return
return
tdLog
.
printNoPrefix
(
"==========step2:create table in rollup database"
)
tdLog
.
printNoPrefix
(
"==========step2:create table in rollup database"
)
...
...
tests/system-test/2-query/join.py
浏览文件 @
5c9752c9
...
@@ -28,7 +28,7 @@ class TDTestCase:
...
@@ -28,7 +28,7 @@ class TDTestCase:
def
init
(
self
,
conn
,
logSql
):
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdSql
.
init
(
conn
.
cursor
(),
Tru
e
)
tdSql
.
init
(
conn
.
cursor
(),
Fals
e
)
def
__query_condition
(
self
,
tbname
):
def
__query_condition
(
self
,
tbname
):
query_condition
=
[]
query_condition
=
[]
...
...
tests/system-test/2-query/join2.py
浏览文件 @
5c9752c9
...
@@ -28,7 +28,7 @@ class TDTestCase:
...
@@ -28,7 +28,7 @@ class TDTestCase:
def
init
(
self
,
conn
,
logSql
):
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdSql
.
init
(
conn
.
cursor
(),
Tru
e
)
tdSql
.
init
(
conn
.
cursor
(),
Fals
e
)
def
__query_condition
(
self
,
tbname
):
def
__query_condition
(
self
,
tbname
):
query_condition
=
[]
query_condition
=
[]
...
...
tests/system-test/2-query/substr.py
浏览文件 @
5c9752c9
...
@@ -31,7 +31,7 @@ class TDTestCase:
...
@@ -31,7 +31,7 @@ class TDTestCase:
def
init
(
self
,
conn
,
logSql
):
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdLog
.
debug
(
f
"start to excute
{
__file__
}
"
)
tdSql
.
init
(
conn
.
cursor
())
tdSql
.
init
(
conn
.
cursor
()
,
False
)
def
__substr_condition
(
self
):
# sourcery skip: extract-method
def
__substr_condition
(
self
):
# sourcery skip: extract-method
substr_condition
=
[]
substr_condition
=
[]
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录