提交 e4a9d815 编写于 作者: H Hongze Cheng

Merge branch '3.0' of https://github.com/taosdata/TDengine into fix/hzcheng_3.0

......@@ -85,6 +85,11 @@ bool spreadFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo)
int32_t spreadFunction(SqlFunctionCtx* pCtx);
int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock);
bool getElapsedFuncEnv(struct SFunctionNode* pFunc, SFuncExecEnv* pEnv);
bool elapsedFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo);
int32_t elapsedFunction(SqlFunctionCtx* pCtx);
int32_t elapsedFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock);
bool getHistogramFuncEnv(struct SFunctionNode* pFunc, SFuncExecEnv* pEnv);
bool histogramFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo);
int32_t histogramFunction(SqlFunctionCtx* pCtx);
......
......@@ -226,6 +226,27 @@ static int32_t translateSpread(SFunctionNode* pFunc, char* pErrBuf, int32_t len)
return TSDB_CODE_SUCCESS;
}
static int32_t translateElapsed(SFunctionNode* pFunc, char* pErrBuf, int32_t len) {
int32_t paraNum = LIST_LENGTH(pFunc->pParameterList);
if (1 != paraNum && 2 != paraNum) {
return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName);
}
SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0);
if (QUERY_NODE_COLUMN != nodeType(pPara)) {
return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR,
"The input parameter of ELAPSED function can only be column");
}
uint8_t paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 0))->resType.type;
if (TSDB_DATA_TYPE_TIMESTAMP != paraType) {
return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName);
}
pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE};
return TSDB_CODE_SUCCESS;
}
static int32_t translateLeastSQR(SFunctionNode* pFunc, char* pErrBuf, int32_t len) {
int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList);
if (3 != numOfParams) {
......@@ -794,6 +815,17 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.processFunc = spreadFunction,
.finalizeFunc = spreadFinalize
},
{
.name = "elapsed",
.type = FUNCTION_TYPE_ELAPSED,
.classification = FUNC_MGT_AGG_FUNC,
.dataRequiredFunc = statisDataRequired,
.translateFunc = translateElapsed,
.getEnvFunc = getElapsedFuncEnv,
.initFunc = elapsedFunctionSetup,
.processFunc = elapsedFunction,
.finalizeFunc = elapsedFinalize
},
{
.name = "last_row",
.type = FUNCTION_TYPE_LAST_ROW,
......
......@@ -112,6 +112,13 @@ typedef struct SSpreadInfo {
double max;
} SSpreadInfo;
typedef struct SElapsedInfo {
double result;
TSKEY min;
TSKEY max;
int64_t timeUnit;
} SElapsedInfo;
typedef struct SHistoFuncBin {
double lower;
double upper;
......@@ -2494,6 +2501,116 @@ int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) {
return functionFinalize(pCtx, pBlock);
}
bool getElapsedFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv) {
pEnv->calcMemSize = sizeof(SElapsedInfo);
return true;
}
bool elapsedFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo) {
if (!functionSetup(pCtx, pResultInfo)) {
return false;
}
SElapsedInfo* pInfo = GET_ROWCELL_INTERBUF(pResultInfo);
pInfo->result = 0;
pInfo->min = MAX_TS_KEY;
pInfo->max = 0;
if (pCtx->numOfParams == 3) {
pInfo->timeUnit = pCtx->param[1].param.i;
} else {
pInfo->timeUnit = 1;
}
return true;
}
int32_t elapsedFunction(SqlFunctionCtx *pCtx) {
int32_t numOfElems = 0;
// Only the pre-computing information loaded and actual data does not loaded
SInputColumnInfoData* pInput = &pCtx->input;
SColumnDataAgg *pAgg = pInput->pColumnDataAgg[0];
SElapsedInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx));
numOfElems = pInput->numOfRows; //since this is the primary timestamp, no need to exclude NULL values
if (numOfElems == 0) {
goto _elapsed_over;
}
if (pInput->colDataAggIsSet) {
if (pInfo->min == MAX_TS_KEY) {
pInfo->min = GET_INT64_VAL(&pAgg->min);
pInfo->max = GET_INT64_VAL(&pAgg->max);
} else {
if (pCtx->order == TSDB_ORDER_ASC) {
pInfo->max = GET_INT64_VAL(&pAgg->max);
} else {
pInfo->min = GET_INT64_VAL(&pAgg->min);
}
}
} else { // computing based on the true data block
if (0 == pCtx->size) {
if (pCtx->order == TSDB_ORDER_DESC) {
if (pCtx->end.key != INT64_MIN) {
pInfo->min = pCtx->end.key;
}
} else {
if (pCtx->end.key != INT64_MIN) {
pInfo->max = pCtx->end.key + 1;
}
}
goto _elapsed_over;
}
SColumnInfoData* pCol = pInput->pData[0];
int32_t start = pInput->startRowIndex;
TSKEY* ptsList = (int64_t*)colDataGetData(pCol, start);
if (pCtx->order == TSDB_ORDER_DESC) {
if (pCtx->start.key == INT64_MIN) {
pInfo->max = (pInfo->max < ptsList[pCtx->size - 1]) ? ptsList[pCtx->size - 1] : pInfo->max;
} else {
pInfo->max = pCtx->start.key + 1;
}
if (pCtx->end.key != INT64_MIN) {
pInfo->min = pCtx->end.key;
} else {
pInfo->min = ptsList[0];
}
} else {
if (pCtx->start.key == INT64_MIN) {
pInfo->min = (pInfo->min > ptsList[0]) ? ptsList[0] : pInfo->min;
} else {
pInfo->min = pCtx->start.key;
}
if (pCtx->end.key != INT64_MIN) {
pInfo->max = pCtx->end.key + 1;
} else {
pInfo->max = ptsList[pCtx->size - 1];
}
}
}
_elapsed_over:
// data in the check operation are all null, not output
SET_VAL(GET_RES_INFO(pCtx), numOfElems, 1);
return TSDB_CODE_SUCCESS;
}
int32_t elapsedFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) {
SElapsedInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx));
double result = (double)pInfo->max - (double)pInfo->min;
result = (result >= 0) ? result : -result;
pInfo->result = result / pInfo->timeUnit;
return functionFinalize(pCtx, pBlock);
}
bool getHistogramFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv) {
pEnv->calcMemSize = sizeof(SHistoFuncInfo) + HISTOGRAM_MAX_BINS_NUM * sizeof(SHistoFuncBin);
return true;
......
......@@ -35,53 +35,26 @@ class TDTestCase:
for char_col in CHAR_COL:
query_condition.extend(
(
f"{tbname}.{char_col}",
f"upper( {tbname}.{char_col} )",
f"char_length( {tbname}.{char_col} )",
f"concat( {tbname}.{char_col}, {tbname}.{char_col} )",
f"concat_ws( '_', {tbname}.{char_col}, {tbname}.{char_col} )",
f"length( {tbname}.{char_col} )",
f"lower( {tbname}.{char_col} )",
f"ltrim( {tbname}.{char_col} )",
f"rtrim( {tbname}.{char_col} )",
f"substr( {tbname}.{char_col}, 1 )",
f"count( {tbname}.{char_col} )",
f"cast( {tbname}.{char_col} as nchar(3) )",
f"cast( {tbname}.{char_col} as nchar(8) )",
)
)
query_condition.extend( f"cast( {tbname}.{un_char_col} as binary(16) ) " for un_char_col in NUM_COL)
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{un_char_col} as binary(32) ) " for un_char_col in NUM_COL )
for num_col in NUM_COL:
query_condition.extend(
(
f"{tbname}.{num_col}",
f"ceil( {tbname}.{num_col} )",
f"abs( {tbname}.{num_col} )",
f"acos( {tbname}.{num_col} )",
f"asin( {tbname}.{num_col} )",
f"atan( {tbname}.{num_col} )",
f"cos( {tbname}.{num_col} )",
f"floor( {tbname}.{num_col} )",
f"log( {tbname}.{num_col}, {tbname}.{num_col})",
f"sin( {tbname}.{num_col} )",
f"sqrt( {tbname}.{num_col} )",
f"tan( {tbname}.{num_col} )",
f"round( {tbname}.{num_col} )",
f"max( {tbname}.{num_col} )",
f"sum( {tbname}.{num_col} )",
f"count( {tbname}.{num_col} )",
f"min( {tbname}.{num_col} )",
)
)
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{num_col_2}" for num_col_2 in NUM_COL )
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{char_col} " for char_col in CHAR_COL )
query_condition.extend(
(
''' "test1234!@#$%^&*():'><?/.,][}{" ''',
''' "test12" ''',
# 1010,
)
......@@ -208,8 +181,8 @@ class TDTestCase:
)
)
return filter(None, sqls)
# return list(filter(None, sqls))
# return filter(None, sqls)
return list(filter(None, sqls))
def __get_type(self, col):
if tdSql.cursor.istype(col, "BOOL"):
......@@ -245,34 +218,55 @@ class TDTestCase:
def union_check(self):
sqls = self.sql_list()
for sql1 in sqls:
tdSql.query(sql1)
for i in range(len(sqls)):
tdSql.query(sqls[i])
res1_type = self.__get_type(0)
for sql2 in sqls:
tdSql.query(sql2)
union_type = False
for j in range(len(sqls[i:])):
tdSql.query(sqls[j+i])
order_union_type = False
rev_order_type = False
all_union_type = False
res2_type = self.__get_type(0)
if res1_type in ( "BIGINT" , "NCHAR" ):
union_type = True
elif res2_type == res1_type:
union_type = True
if res2_type == res1_type:
all_union_type = True
elif res1_type in ( "BIGINT" , "NCHAR" ) and res2_type in ("BIGINT" , "NCHAR"):
all_union_type = True
elif res1_type in ("BIGINT", "NCHAR"):
order_union_type = True
elif res2_type in ("BIGINT", "NCHAR"):
rev_order_type = True
elif res1_type == "TIMESAMP" and res2_type not in ("BINARY", "NCHAR"):
union_type = True
order_union_type = True
elif res2_type == "TIMESAMP" and res1_type not in ("BINARY", "NCHAR"):
rev_order_type = True
elif res1_type == "BINARY" and res2_type != "NCHAR":
union_type = True
order_union_type = True
elif res2_type == "BINARY" and res1_type != "NCHAR":
rev_order_type = True
if union_type:
tdSql.query(f"{sql1} union {sql2}")
if all_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sql1} union all {sql2}")
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
elif order_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.checkCols(1)
elif rev_order_type:
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
else:
tdSql.error(f"{sql1} union {sql2}")
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
def __test_error(self):
tdSql.error( "show tables union show tables" )
tdSql.error( "create table errtb1 union all create table errtb2" )
tdSql.error( "drop table ct1 union all drop table ct3" )
......
import datetime
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
PRIMARY_COL = "ts"
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
BOOLEAN_COL = [ BOOL_COL, ]
TS_TYPE_COL = [ TS_COL, ]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __query_condition(self,tbname):
query_condition = []
for char_col in CHAR_COL:
query_condition.extend(
(
f"{tbname}.{char_col}",
f"upper( {tbname}.{char_col} )",
f"char_length( {tbname}.{char_col} )",
f"concat( {tbname}.{char_col}, {tbname}.{char_col} )",
)
)
for num_col in NUM_COL:
query_condition.extend(
(
f"{tbname}.{num_col}",
f"ceil( {tbname}.{num_col} )",
f"abs( {tbname}.{num_col} )",
f"acos( {tbname}.{num_col} )",
f"max( {tbname}.{num_col} )",
)
)
query_condition.extend(
(
# 1010,
)
)
return query_condition
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
table_reference = tb_list[0]
join_condition = table_reference
join = "inner join" if INNER else "join"
for i in range(len(tb_list[1:])):
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
return join_condition
def __where_condition(self, col=None, tbname=None, query_conditon=None):
if query_conditon and isinstance(query_conditon, str):
if query_conditon.startswith("count"):
query_conditon = query_conditon[6:-1]
elif query_conditon.startswith("max"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("sum"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("min"):
query_conditon = query_conditon[4:-1]
if query_conditon:
return f" where {query_conditon} is not null"
if col in NUM_COL:
return f" where abs( {tbname}.{col} ) >= 0"
if col in CHAR_COL:
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
if col in BOOLEAN_COL:
return f" where {tbname}.{col} in (false, true) "
if col in TS_TYPE_COL or col in PRIMARY_COL:
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
return ""
def __group_condition(self, col, having = None):
if isinstance(col, str):
if col.startswith("count"):
col = col[6:-1]
elif col.startswith("max"):
col = col[4:-1]
elif col.startswith("sum"):
col = col[4:-1]
elif col.startswith("min"):
col = col[4:-1]
return f" group by {col} having {having}" if having else f" group by {col} "
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
return
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
@property
def __join_tblist(self):
return [
["ct1", "ct2"],
["ct1", "ct4"],
["ct1", "t1"],
["ct2", "ct4"],
["ct2", "t1"],
["ct4", "t1"],
# ["ct1", "ct2", "ct4"],
# ["ct1", "ct2", "t1"],
# ["ct1", "ct4", "t1"],
# ["ct2", "ct4", "t1"],
# ["ct1", "ct2", "ct4", "t1"],
]
@property
def __tb_liast(self):
return [
"ct1",
"ct2",
"ct4",
"t1",
]
def sql_list(self):
sqls = []
__join_tblist = self.__join_tblist
for join_tblist in __join_tblist:
for join_tb in join_tblist:
select_claus_list = self.__query_condition(join_tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition( col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb),
)
)
__no_join_tblist = self.__tb_liast
for tb in __no_join_tblist:
select_claus_list = self.__query_condition(tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition(col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb),
)
)
# return filter(None, sqls)
return list(filter(None, sqls))
def __get_type(self, col):
if tdSql.cursor.istype(col, "BOOL"):
return "BOOL"
if tdSql.cursor.istype(col, "INT"):
return "INT"
if tdSql.cursor.istype(col, "BIGINT"):
return "BIGINT"
if tdSql.cursor.istype(col, "TINYINT"):
return "TINYINT"
if tdSql.cursor.istype(col, "SMALLINT"):
return "SMALLINT"
if tdSql.cursor.istype(col, "FLOAT"):
return "FLOAT"
if tdSql.cursor.istype(col, "DOUBLE"):
return "DOUBLE"
if tdSql.cursor.istype(col, "BINARY"):
return "BINARY"
if tdSql.cursor.istype(col, "NCHAR"):
return "NCHAR"
if tdSql.cursor.istype(col, "TIMESTAMP"):
return "TIMESTAMP"
if tdSql.cursor.istype(col, "JSON"):
return "JSON"
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
return "TINYINT UNSIGNED"
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
return "SMALLINT UNSIGNED"
if tdSql.cursor.istype(col, "INT UNSIGNED"):
return "INT UNSIGNED"
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
return "BIGINT UNSIGNED"
def union_check(self):
sqls = self.sql_list()
for i in range(len(sqls)):
tdSql.query(sqls[i])
res1_type = self.__get_type(0)
for j in range(len(sqls[i:])):
tdSql.query(sqls[j+i])
order_union_type = False
rev_order_type = False
all_union_type = False
res2_type = self.__get_type(0)
if res2_type == res1_type:
all_union_type = True
elif res1_type in ( "BIGINT" , "NCHAR" ) and res2_type in ("BIGINT" , "NCHAR"):
all_union_type = True
elif res1_type in ("BIGINT", "NCHAR"):
order_union_type = True
elif res2_type in ("BIGINT", "NCHAR"):
rev_order_type = True
elif res1_type == "TIMESAMP" and res2_type not in ("BINARY", "NCHAR"):
order_union_type = True
elif res2_type == "TIMESAMP" and res1_type not in ("BINARY", "NCHAR"):
rev_order_type = True
elif res1_type == "BINARY" and res2_type != "NCHAR":
order_union_type = True
elif res2_type == "BINARY" and res1_type != "NCHAR":
rev_order_type = True
if all_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
elif order_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.checkCols(1)
elif rev_order_type:
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
else:
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
def __test_error(self):
tdSql.error( "show tables union show tables" )
tdSql.error( "create table errtb1 union all create table errtb2" )
tdSql.error( "drop table ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all '' " )
tdSql.error( " '' union all select c1 from ct1 " )
tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
def all_test(self):
self.__test_error()
self.union_check()
def __create_tb(self):
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.rows = 10
self.__insert_data(self.rows)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
tdDnodes.stop(1)
tdDnodes.start(1)
tdSql.execute("use db")
tdLog.printNoPrefix("==========step4:after wal, all check again ")
self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())
import datetime
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
PRIMARY_COL = "ts"
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
BOOLEAN_COL = [ BOOL_COL, ]
TS_TYPE_COL = [ TS_COL, ]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __query_condition(self,tbname):
query_condition = []
for char_col in CHAR_COL:
query_condition.extend(
(
f"concat_ws( '_', {tbname}.{char_col}, {tbname}.{char_col} )",
f"length( {tbname}.{char_col} )",
f"lower( {tbname}.{char_col} )",
f"ltrim( {tbname}.{char_col} )",
)
)
for num_col in NUM_COL:
query_condition.extend(
(
f"asin( {tbname}.{num_col} )",
f"atan( {tbname}.{num_col} )",
f"cos( {tbname}.{num_col} )",
f"sum( {tbname}.{num_col} )",
)
)
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{num_col_2}" for num_col_2 in NUM_COL )
query_condition.extend(
(
''' "test1234!@#$%^&*():'><?/.,][}{" ''',
# 1010,
)
)
return query_condition
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
table_reference = tb_list[0]
join_condition = table_reference
join = "inner join" if INNER else "join"
for i in range(len(tb_list[1:])):
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
return join_condition
def __where_condition(self, col=None, tbname=None, query_conditon=None):
if query_conditon and isinstance(query_conditon, str):
if query_conditon.startswith("count"):
query_conditon = query_conditon[6:-1]
elif query_conditon.startswith("max"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("sum"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("min"):
query_conditon = query_conditon[4:-1]
if query_conditon:
return f" where {query_conditon} is not null"
if col in NUM_COL:
return f" where abs( {tbname}.{col} ) >= 0"
if col in CHAR_COL:
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
if col in BOOLEAN_COL:
return f" where {tbname}.{col} in (false, true) "
if col in TS_TYPE_COL or col in PRIMARY_COL:
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
return ""
def __group_condition(self, col, having = None):
if isinstance(col, str):
if col.startswith("count"):
col = col[6:-1]
elif col.startswith("max"):
col = col[4:-1]
elif col.startswith("sum"):
col = col[4:-1]
elif col.startswith("min"):
col = col[4:-1]
return f" group by {col} having {having}" if having else f" group by {col} "
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
return
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
@property
def __join_tblist(self):
return [
["ct1", "ct2"],
["ct1", "ct4"],
["ct1", "t1"],
["ct2", "ct4"],
["ct2", "t1"],
["ct4", "t1"],
# ["ct1", "ct2", "ct4"],
# ["ct1", "ct2", "t1"],
# ["ct1", "ct4", "t1"],
# ["ct2", "ct4", "t1"],
# ["ct1", "ct2", "ct4", "t1"],
]
@property
def __tb_liast(self):
return [
"ct1",
"ct2",
"ct4",
"t1",
]
def sql_list(self):
sqls = []
__join_tblist = self.__join_tblist
for join_tblist in __join_tblist:
for join_tb in join_tblist:
select_claus_list = self.__query_condition(join_tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition( col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb),
)
)
__no_join_tblist = self.__tb_liast
for tb in __no_join_tblist:
select_claus_list = self.__query_condition(tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition(col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb),
)
)
# return filter(None, sqls)
return list(filter(None, sqls))
def __get_type(self, col):
if tdSql.cursor.istype(col, "BOOL"):
return "BOOL"
if tdSql.cursor.istype(col, "INT"):
return "INT"
if tdSql.cursor.istype(col, "BIGINT"):
return "BIGINT"
if tdSql.cursor.istype(col, "TINYINT"):
return "TINYINT"
if tdSql.cursor.istype(col, "SMALLINT"):
return "SMALLINT"
if tdSql.cursor.istype(col, "FLOAT"):
return "FLOAT"
if tdSql.cursor.istype(col, "DOUBLE"):
return "DOUBLE"
if tdSql.cursor.istype(col, "BINARY"):
return "BINARY"
if tdSql.cursor.istype(col, "NCHAR"):
return "NCHAR"
if tdSql.cursor.istype(col, "TIMESTAMP"):
return "TIMESTAMP"
if tdSql.cursor.istype(col, "JSON"):
return "JSON"
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
return "TINYINT UNSIGNED"
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
return "SMALLINT UNSIGNED"
if tdSql.cursor.istype(col, "INT UNSIGNED"):
return "INT UNSIGNED"
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
return "BIGINT UNSIGNED"
def union_check(self):
sqls = self.sql_list()
for i in range(len(sqls)):
tdSql.query(sqls[i])
res1_type = self.__get_type(0)
for j in range(len(sqls[i:])):
tdSql.query(sqls[j+i])
order_union_type = False
rev_order_type = False
all_union_type = False
res2_type = self.__get_type(0)
if res2_type == res1_type:
all_union_type = True
elif res1_type in ( "BIGINT" , "NCHAR" ) and res2_type in ("BIGINT" , "NCHAR"):
all_union_type = True
elif res1_type in ("BIGINT", "NCHAR"):
order_union_type = True
elif res2_type in ("BIGINT", "NCHAR"):
rev_order_type = True
elif res1_type == "TIMESAMP" and res2_type not in ("BINARY", "NCHAR"):
order_union_type = True
elif res2_type == "TIMESAMP" and res1_type not in ("BINARY", "NCHAR"):
rev_order_type = True
elif res1_type == "BINARY" and res2_type != "NCHAR":
order_union_type = True
elif res2_type == "BINARY" and res1_type != "NCHAR":
rev_order_type = True
if all_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
elif order_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.checkCols(1)
elif rev_order_type:
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
else:
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
def __test_error(self):
tdSql.error( "show tables union show tables" )
tdSql.error( "create table errtb1 union all create table errtb2" )
tdSql.error( "drop table ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all '' " )
tdSql.error( " '' union all select c1 from ct1 " )
tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
def all_test(self):
self.__test_error()
self.union_check()
def __create_tb(self):
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.rows = 10
self.__insert_data(self.rows)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
tdDnodes.stop(1)
tdDnodes.start(1)
tdSql.execute("use db")
tdLog.printNoPrefix("==========step4:after wal, all check again ")
self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())
import datetime
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
PRIMARY_COL = "ts"
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
BOOLEAN_COL = [ BOOL_COL, ]
TS_TYPE_COL = [ TS_COL, ]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __query_condition(self,tbname):
query_condition = []
for char_col in CHAR_COL:
query_condition.extend(
(
f"cast( {tbname}.{char_col} as nchar(8) )",
)
)
query_condition.extend( f"cast( {tbname}.{un_char_col} as binary(16) ) " for un_char_col in NUM_COL)
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{un_char_col} as binary(32) ) " for un_char_col in NUM_COL )
for num_col in NUM_COL:
query_condition.extend(
(
f"tan( {tbname}.{num_col} )",
f"round( {tbname}.{num_col} )",
f"count( {tbname}.{num_col} )",
f"min( {tbname}.{num_col} )",
)
)
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{char_col} " for char_col in CHAR_COL )
query_condition.extend(
(
''' "test12" ''',
# 1010,
)
)
return query_condition
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
table_reference = tb_list[0]
join_condition = table_reference
join = "inner join" if INNER else "join"
for i in range(len(tb_list[1:])):
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
return join_condition
def __where_condition(self, col=None, tbname=None, query_conditon=None):
if query_conditon and isinstance(query_conditon, str):
if query_conditon.startswith("count"):
query_conditon = query_conditon[6:-1]
elif query_conditon.startswith("max"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("sum"):
query_conditon = query_conditon[4:-1]
elif query_conditon.startswith("min"):
query_conditon = query_conditon[4:-1]
if query_conditon:
return f" where {query_conditon} is not null"
if col in NUM_COL:
return f" where abs( {tbname}.{col} ) >= 0"
if col in CHAR_COL:
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
if col in BOOLEAN_COL:
return f" where {tbname}.{col} in (false, true) "
if col in TS_TYPE_COL or col in PRIMARY_COL:
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
return ""
def __group_condition(self, col, having = None):
if isinstance(col, str):
if col.startswith("count"):
col = col[6:-1]
elif col.startswith("max"):
col = col[4:-1]
elif col.startswith("sum"):
col = col[4:-1]
elif col.startswith("min"):
col = col[4:-1]
return f" group by {col} having {having}" if having else f" group by {col} "
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
return
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
@property
def __join_tblist(self):
return [
["ct1", "ct2"],
["ct1", "ct4"],
["ct1", "t1"],
["ct2", "ct4"],
["ct2", "t1"],
["ct4", "t1"],
# ["ct1", "ct2", "ct4"],
# ["ct1", "ct2", "t1"],
# ["ct1", "ct4", "t1"],
# ["ct2", "ct4", "t1"],
# ["ct1", "ct2", "ct4", "t1"],
]
@property
def __tb_liast(self):
return [
"ct1",
"ct2",
"ct4",
"t1",
]
def sql_list(self):
sqls = []
__join_tblist = self.__join_tblist
for join_tblist in __join_tblist:
for join_tb in join_tblist:
select_claus_list = self.__query_condition(join_tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition( col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
self.__single_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb),
)
)
__no_join_tblist = self.__tb_liast
for tb in __no_join_tblist:
select_claus_list = self.__query_condition(tb)
for select_claus in select_claus_list:
group_claus = self.__group_condition(col=select_claus)
where_claus = self.__where_condition(query_conditon=select_claus)
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
sqls.extend(
(
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
self.__single_sql(select_claus, join_tb, where_claus),
self.__single_sql(select_claus, join_tb, group_claus),
self.__single_sql(select_claus, join_tb, having_claus),
self.__single_sql(select_claus, join_tb),
)
)
# return filter(None, sqls)
return list(filter(None, sqls))
def __get_type(self, col):
if tdSql.cursor.istype(col, "BOOL"):
return "BOOL"
if tdSql.cursor.istype(col, "INT"):
return "INT"
if tdSql.cursor.istype(col, "BIGINT"):
return "BIGINT"
if tdSql.cursor.istype(col, "TINYINT"):
return "TINYINT"
if tdSql.cursor.istype(col, "SMALLINT"):
return "SMALLINT"
if tdSql.cursor.istype(col, "FLOAT"):
return "FLOAT"
if tdSql.cursor.istype(col, "DOUBLE"):
return "DOUBLE"
if tdSql.cursor.istype(col, "BINARY"):
return "BINARY"
if tdSql.cursor.istype(col, "NCHAR"):
return "NCHAR"
if tdSql.cursor.istype(col, "TIMESTAMP"):
return "TIMESTAMP"
if tdSql.cursor.istype(col, "JSON"):
return "JSON"
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
return "TINYINT UNSIGNED"
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
return "SMALLINT UNSIGNED"
if tdSql.cursor.istype(col, "INT UNSIGNED"):
return "INT UNSIGNED"
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
return "BIGINT UNSIGNED"
def union_check(self):
sqls = self.sql_list()
for i in range(len(sqls)):
tdSql.query(sqls[i])
res1_type = self.__get_type(0)
for j in range(len(sqls[i:])):
tdSql.query(sqls[j+i])
order_union_type = False
rev_order_type = False
all_union_type = False
res2_type = self.__get_type(0)
if res2_type == res1_type:
all_union_type = True
elif res1_type in ( "BIGINT" , "NCHAR" ) and res2_type in ("BIGINT" , "NCHAR"):
all_union_type = True
elif res1_type in ("BIGINT", "NCHAR"):
order_union_type = True
elif res2_type in ("BIGINT", "NCHAR"):
rev_order_type = True
elif res1_type == "TIMESAMP" and res2_type not in ("BINARY", "NCHAR"):
order_union_type = True
elif res2_type == "TIMESAMP" and res1_type not in ("BINARY", "NCHAR"):
rev_order_type = True
elif res1_type == "BINARY" and res2_type != "NCHAR":
order_union_type = True
elif res2_type == "BINARY" and res1_type != "NCHAR":
rev_order_type = True
if all_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
elif order_union_type:
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
tdSql.checkCols(1)
elif rev_order_type:
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
tdSql.checkCols(1)
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
tdSql.checkCols(1)
else:
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
def __test_error(self):
tdSql.error( "show tables union show tables" )
tdSql.error( "create table errtb1 union all create table errtb2" )
tdSql.error( "drop table ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all drop table ct3" )
tdSql.error( "select c1 from ct1 union all '' " )
tdSql.error( " '' union all select c1 from ct1 " )
tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
def all_test(self):
self.__test_error()
self.union_check()
def __create_tb(self):
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.rows = 10
self.__insert_data(self.rows)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
tdDnodes.stop(1)
tdDnodes.start(1)
tdSql.execute("use db")
tdLog.printNoPrefix("==========step4:after wal, all check again ")
self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())
......@@ -22,8 +22,12 @@ python3 ./test.py -f 2-query/upper.py
python3 ./test.py -f 2-query/lower.py
python3 ./test.py -f 2-query/join.py
python3 ./test.py -f 2-query/cast.py
# python3 ./test.py -f 2-query/concat.py # after wal ,crash occured
# python3 ./test.py -f 2-query/concat_ws.py
python3 ./test.py -f 2-query/concat.py
python3 ./test.py -f 2-query/concat_ws.py
# python3 ./test.py -f 2-query/union.py
# python3 ./test.py -f 2-query/union2.py
# python3 ./test.py -f 2-query/union3.py
# python3 ./test.py -f 2-query/union4.py
python3 ./test.py -f 2-query/timezone.py
python3 ./test.py -f 2-query/Now.py
......@@ -58,4 +62,3 @@ python3 ./test.py -f 2-query/nestedQuery.py
python3 ./test.py -f 7-tmq/basic5.py
python3 ./test.py -f 7-tmq/subscribeDb.py
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册