未验证 提交 5c9752c9 编写于 作者: J Jason-Jia 提交者: GitHub

Merge pull request #14282 from taosdata/cpwu/3.0

test: fix constant: add taos_keywords; add time_range_wise case
# -*- coding: utf-8 -*-
# basic type
TAOS_DATA_TYPE = [
"INT", "BIGINT", "SMALLINT", "TINYINT", "INT UNSIGNED", "BIGINT UNSIGNED", "SMALLINT UNSIGNED", "TINYINT UNSIGNED",
"FLOAT", "DOUBLE",
"BOOL",
"BINARY", "NCHAR", "VARCHAR",
"TIMESTAMP",
# "MEDIUMBLOB", "BLOB", # add in 3.x
# "DECIMAL", "NUMERIC", # add in 3.x
"JSON", # only for tag
]
TAOS_NUM_TYPE = [
"INT", "BIGINT", "SMALLINT", "TINYINT", "INT UNSIGNED", "BIGINT UNSIGNED", "SMALLINT UNSIGNED", "TINYINT UNSIGNED", "FLOAT", "DOUBLE",
# "DECIMAL", "NUMERIC", # add in 3.x
]
TAOS_CHAR_TYPE = [
"BINARY", "NCHAR", "VARCHAR",
]
TAOS_BOOL_TYPE = ["BOOL",]
TAOS_TS_TYPE = ["TIMESTAMP",]
TAOS_BIN_TYPE = [
"MEDIUMBLOB", "BLOB", # add in 3.x
]
TAOS_TIME_INIT = ["b", "u", "a", "s", "m", "h", "d", "w", "n", "y"]
TAOS_PRECISION = ["ms", "us", "ns"]
PRECISION_DEFAULT = "ms"
PRECISION = PRECISION_DEFAULT
TAOS_KEYWORDS = [
"ABORT", "CREATE", "IGNORE", "NULL", "STAR",
"ACCOUNT", "CTIME", "IMMEDIATE", "OF", "STATE",
"ACCOUNTS", "DATABASE", "IMPORT", "OFFSET", "STATEMENT",
"ADD", "DATABASES", "IN", "OR", "STATE_WINDOW",
"AFTER", "DAYS", "INITIALLY", "ORDER", "STORAGE",
"ALL", "DBS", "INSERT", "PARTITIONS", "STREAM",
"ALTER", "DEFERRED", "INSTEAD", "PASS", "STREAMS",
"AND", "DELIMITERS", "INT", "PLUS", "STRING",
"AS", "DESC", "INTEGER", "PPS", "SYNCDB",
"ASC", "DESCRIBE", "INTERVAL", "PRECISION", "TABLE",
"ATTACH", "DETACH", "INTO", "PREV", "TABLES",
"BEFORE", "DISTINCT", "IS", "PRIVILEGE", "TAG",
"BEGIN", "DIVIDE", "ISNULL", "QTIME", "TAGS",
"BETWEEN", "DNODE", "JOIN", "QUERIES", "TBNAME",
"BIGINT", "DNODES", "KEEP", "QUERY", "TIMES",
"BINARY", "DOT", "KEY", "QUORUM", "TIMESTAMP",
"BITAND", "DOUBLE", "KILL", "RAISE", "TINYINT",
"BITNOT", "DROP", "LE", "REM", "TOPIC",
"BITOR", "EACH", "LIKE", "REPLACE", "TOPICS",
"BLOCKS", "END", "LIMIT", "REPLICA", "TRIGGER",
"BOOL", "EQ", "LINEAR", "RESET", "TSERIES",
"BY", "EXISTS", "LOCAL", "RESTRICT", "UMINUS",
"CACHE", "EXPLAIN", "LP", "ROW", "UNION",
"CACHELAST", "FAIL", "LSHIFT", "RP", "UNSIGNED",
"CASCADE", "FILE", "LT", "RSHIFT", "UPDATE",
"CHANGE", "FILL", "MATCH", "SCORES", "UPLUS",
"CLUSTER", "FLOAT", "MAXROWS", "SELECT", "USE",
"COLON", "FOR", "MINROWS", "SEMI", "USER",
"COLUMN", "FROM", "MINUS", "SESSION", "USERS",
"COMMA", "FSYNC", "MNODES", "SET", "USING",
"COMP", "GE", "MODIFY", "SHOW", "VALUES",
"COMPACT", "GLOB", "MODULES", "SLASH", "VARIABLE",
"CONCAT", "GRANTS", "NCHAR", "SLIDING", "VARIABLES",
"CONFLICT", "GROUP", "NE", "SLIMIT", "VGROUPS",
"CONNECTION", "GT", "NONE", "SMALLINT", "VIEW",
"CONNECTIONS", "HAVING", "NOT", "SOFFSET", "VNODES",
"CONNS", "ID", "NOTNULL", "STABLE", "WAL",
"COPY", "IF", "NOW", "STABLES", "WHERE",
]
# basic data type boundary
TINYINT_MAX = 127
TINYINT_MIN = -128
......@@ -11,7 +82,7 @@ SMALLINT_MAX = 32767
SMALLINT_MIN = -32768
SMALLINT_UN_MAX = 65535
MALLINT_UN_MIN = 0
SMALLINT_UN_MIN = 0
INT_MAX = 2147483647
INT_MIN = -2147483648
......@@ -33,8 +104,8 @@ DOUBLE_MIN = -1.7E+308
# schema boundary
BINARY_LENGTH_MAX = 16374
NCAHR_LENGTH_MAX_ = 4093
DBNAME_LENGTH_MAX_ = 64
NCAHR_LENGTH_MAX = 4093
DBNAME_LENGTH_MAX = 64
STBNAME_LENGTH_MAX = 192
STBNAME_LENGTH_MIN = 1
......@@ -66,4 +137,32 @@ MNODE_SHM_SIZE_DEFAULT = 6292480
VNODE_SHM_SIZE_MAX = 2147483647
VNODE_SHM_SIZE_MIN = 6292480
VNODE_SHM_SIZE_DEFAULT = 31458304
\ No newline at end of file
VNODE_SHM_SIZE_DEFAULT = 31458304
# time_init
TIME_MS = 1
TIME_US = TIME_MS/1000
TIME_NS = TIME_US/1000
TIME_S = 1000 * TIME_MS
TIME_M = 60 * TIME_S
TIME_H = 60 * TIME_M
TIME_D = 24 * TIME_H
TIME_W = 7 * TIME_D
TIME_N = 30 * TIME_D
TIME_Y = 365 * TIME_D
# session parameters
INTERVAL_MIN = 1 * TIME_MS if PRECISION == PRECISION_DEFAULT else 1 * TIME_US
# streams and related agg-function
SMA_INDEX_FUNCTIONS = ["MIN", "MAX"]
ROLLUP_FUNCTIONS = ["AVG", "SUM", "MIN", "MAX", "LAST", "FIRST"]
SMA_WATMARK_MAXDELAY_INIT = ['a', "s", "m"]
WATERMARK_MAX = 900000
WATERMARK_MIN = 0
MAX_DELAY_MAX = 900000
MAX_DELAY_MIN = 1
\ No newline at end of file
......@@ -21,6 +21,7 @@ import psutil
import shutil
import pandas as pd
from util.log import *
from util.constant import *
def _parse_datetime(timestr):
try:
......@@ -117,8 +118,7 @@ class TDSql:
col_name_list = []
col_type_list = []
self.cursor.execute(sql)
self.queryCols = self.cursor.description
for query_col in self.queryCols:
for query_col in self.cursor.description:
col_name_list.append(query_col[0])
col_type_list.append(query_col[1])
except Exception as e:
......@@ -301,6 +301,41 @@ class TDSql:
args = (caller.filename, caller.lineno, self.sql, elm, expect_elm)
tdLog.exit("%s(%d) failed: sql:%s, elm:%s == expect_elm:%s" % args)
def get_times(self, time_str, precision="ms"):
caller = inspect.getframeinfo(inspect.stack()[1][0])
if time_str[-1] not in TAOS_TIME_INIT:
tdLog.exit(f"{caller.filename}({caller.lineno}) failed: {time_str} not a standard taos time init")
if precision not in TAOS_PRECISION:
tdLog.exit(f"{caller.filename}({caller.lineno}) failed: {precision} not a standard taos time precision")
if time_str[-1] == TAOS_TIME_INIT[0]:
times = int(time_str[:-1]) * TIME_NS
if time_str[-1] == TAOS_TIME_INIT[1]:
times = int(time_str[:-1]) * TIME_US
if time_str[-1] == TAOS_TIME_INIT[2]:
times = int(time_str[:-1]) * TIME_MS
if time_str[-1] == TAOS_TIME_INIT[3]:
times = int(time_str[:-1]) * TIME_S
if time_str[-1] == TAOS_TIME_INIT[4]:
times = int(time_str[:-1]) * TIME_M
if time_str[-1] == TAOS_TIME_INIT[5]:
times = int(time_str[:-1]) * TIME_H
if time_str[-1] == TAOS_TIME_INIT[6]:
times = int(time_str[:-1]) * TIME_D
if time_str[-1] == TAOS_TIME_INIT[7]:
times = int(time_str[:-1]) * TIME_W
if time_str[-1] == TAOS_TIME_INIT[8]:
times = int(time_str[:-1]) * TIME_N
if time_str[-1] == TAOS_TIME_INIT[9]:
times = int(time_str[:-1]) * TIME_Y
if precision == "ms":
return int(times)
elif precision == "us":
return int(times*1000)
elif precision == "ns":
return int(times*1000*1000)
def taosdStatus(self, state):
tdLog.sleep(5)
pstate = 0
......
......@@ -21,9 +21,9 @@ SINT_UN_COL = "c_sint_un"
BINT_UN_COL = "c_bint_un"
INT_UN_COL = "c_int_un"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
BINARY_COL = "c_binary"
NCHAR_COL = "c_nchar"
TS_COL = "c_ts"
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
......@@ -51,12 +51,28 @@ class DataSet:
binary_data : List[str] = None
nchar_data : List[str] = None
def __post_init__(self):
self.ts_data = []
self.int_data = []
self.bint_data = []
self.sint_data = []
self.tint_data = []
self.int_un_data = []
self.bint_un_data = []
self.sint_un_data = []
self.tint_un_data = []
self.float_data = []
self.double_data = []
self.bool_data = []
self.binary_data = []
self.nchar_data = []
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor(), True)
tdSql.init(conn.cursor(), False)
@property
def create_databases_sql_err(self):
......@@ -87,28 +103,28 @@ class TDTestCase:
@property
def create_stable_sql_err(self):
return [
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(ceil) watermark 1s maxdelay 1m",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(ceil) watermark 1s max_delay 1m",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(count) watermark 1min",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay -1s",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay -1s",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark -1m",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) watermark 1m ",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) maxdelay 1m ",
# f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) watermark 1m ",
# f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) max_delay 1m ",
f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} binary(16)) tags (tag1 int) rollup(avg) watermark 1s",
f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) tags (tag1 int) rollup(avg) maxdelay 1m",
# f"create table ntb_1 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) rollup(avg) watermark 1s maxdelay 1s",
f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) tags (tag1 int) rollup(avg) max_delay 1m",
# f"create table ntb_1 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) rollup(avg) watermark 1s max_delay 1s",
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) " ,
# f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int, {BINARY_COL} nchar(16)) " ,
# watermark, maxdelay: [0, 900000], [ms, s, m, ?]
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay 1u",
# watermark, max_delay: [0, 900000], [ms, s, m, ?]
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay 1u",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark 1b",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark 900001ms",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay 16m",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay 901s",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay 1h",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) maxdelay 0.2h",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay 16m",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay 901s",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay 1h",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) max_delay 0.2h",
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark 0.002d",
]
......@@ -117,11 +133,11 @@ class TDTestCase:
def create_stable_sql_current(self):
return [
f"create stable stb1 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(avg)",
f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark 5s maxdelay 1m",
f"create stable stb3 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(max) watermark 5s maxdelay 1m",
f"create stable stb4 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(sum) watermark 5s maxdelay 1m",
# f"create stable stb5 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(last) watermark 5s maxdelay 1m",
# f"create stable stb6 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(first) watermark 5s maxdelay 1m",
f"create stable stb2 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(min) watermark 5s max_delay 1m",
f"create stable stb3 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(max) watermark 5s max_delay 1m",
f"create stable stb4 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(sum) watermark 5s max_delay 1m",
# f"create stable stb5 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(last) watermark 5s max_delay 1m",
# f"create stable stb6 ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) rollup(first) watermark 5s max_delay 1m",
]
def test_create_stb(self):
......@@ -135,7 +151,7 @@ class TDTestCase:
tdSql.checkRows(len(self.create_stable_sql_current))
# tdSql.execute("use db") # because db is a noraml database, not a rollup database, should not be able to create a rollup database
# tdSql.error(f"create stable nor_db_rollup_stb ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) file_factor 5.0")
# tdSql.error(f"create stable nor_db_rollup_stb ({PRIMARY_COL} timestamp, {INT_COL} int) tags (tag1 int) watermark 5s max_delay 1m")
def test_create_databases(self):
......@@ -177,21 +193,6 @@ class TDTestCase:
def __data_set(self, rows):
data_set = DataSet()
# neg_data_set = DataSet()
data_set.ts_data = []
data_set.int_data = []
data_set.bint_data = []
data_set.sint_data = []
data_set.tint_data = []
data_set.int_un_data = []
data_set.bint_un_data = []
data_set.sint_un_data = []
data_set.tint_un_data = []
data_set.float_data = []
data_set.double_data = []
data_set.bool_data = []
data_set.binary_data = []
data_set.nchar_data = []
for i in range(rows):
data_set.ts_data.append(NOW + 1 * (rows - i))
......@@ -226,6 +227,7 @@ class TDTestCase:
return data_set
def __insert_data(self):
tdLog.printNoPrefix("==========step: start inser data into tables now.....")
data = self.__data_set(rows=self.rows)
# now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
......@@ -264,10 +266,10 @@ class TDTestCase:
def run(self):
self.rows = 10
tdSql.prepare()
tdLog.printNoPrefix("==========step0:all check")
# self.all_test()
self.all_test()
tdLog.printNoPrefix("==========step1:create table in normal database")
tdSql.prepare()
......
......@@ -28,7 +28,7 @@ class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor(), True)
tdSql.init(conn.cursor(), False)
def __query_condition(self,tbname):
query_condition = []
......
......@@ -28,7 +28,7 @@ class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor(), True)
tdSql.init(conn.cursor(), False)
def __query_condition(self,tbname):
query_condition = []
......
......@@ -31,7 +31,7 @@ class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
tdSql.init(conn.cursor(),False)
def __substr_condition(self): # sourcery skip: extract-method
substr_condition = []
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册