diff --git a/.travis.yml b/.travis.yml index 39fddc20c9f1c5c2b1369f248a3859c2bf6165cb..7df3a7d7fece1e039a17a3b6bf0e4490b51f455e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -46,10 +46,10 @@ matrix: pip3 install --user ${TRAVIS_BUILD_DIR}/src/connector/python/linux/python3/ cd ${TRAVIS_BUILD_DIR}/tests - ./test-all.sh || travis_terminate $? + ./test-all.sh $TRAVIS_EVENT_TYPE || travis_terminate $? cd ${TRAVIS_BUILD_DIR}/tests/pytest - ./simpletest.sh -g 2>&1 | tee mem-error-out.txt + ./smoketest.sh -g 2>&1 | tee mem-error-out.txt sleep 1 # Color setting @@ -86,13 +86,12 @@ matrix: addons: coverity_scan: - # GitHub project metadata # ** specific to your project ** project: name: TDengine version: 2.x - description: taosdata/TDengine + description: TDengine # Where email notification of build analysis results will be sent notification_email: sdsang@taosdata.com diff --git a/src/client/inc/tscUtil.h b/src/client/inc/tscUtil.h index d46c32d73d823d52c739fc01d1d11c0a59f27168..718dfcf475136aab944c0b9c889a35d0f48c2bba 100644 --- a/src/client/inc/tscUtil.h +++ b/src/client/inc/tscUtil.h @@ -25,6 +25,7 @@ extern "C" { */ #include "os.h" #include "tbuffer.h" +#include "exception.h" #include "qextbuffer.h" #include "taosdef.h" #include "tscSecondaryMerge.h" @@ -177,7 +178,7 @@ bool tscValidateColumnId(STableMetaInfo* pTableMetaInfo, int32_t colId); // get starter position of metric query condition (query on tags) in SSqlCmd.payload SCond* tsGetSTableQueryCond(STagCond* pCond, uint64_t uid); -void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBuffer* pBuf); +void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBufferWriter* bw); void tscTagCondCopy(STagCond* dest, const STagCond* src); void tscTagCondRelease(STagCond* pCond); diff --git a/src/client/src/tscSQLParser.c b/src/client/src/tscSQLParser.c index 6e16606695430c7d66bb23733a80dcf94248cea6..5590ac5a01aee5f8b49b1a35a543efc063417ed1 100644 --- a/src/client/src/tscSQLParser.c +++ b/src/client/src/tscSQLParser.c @@ -1185,10 +1185,18 @@ int32_t parseSelectClause(SSqlCmd* pCmd, int32_t clauseIndex, tSQLExprList* pSel return invalidSqlErrMsg(pQueryInfo->msg, "invalid arithmetic expression in select clause"); } - SBuffer buf = exprTreeToBinary(pNode); + SBufferWriter bw = tbufInitWriter(NULL, false); + + TRY(0) { + exprTreeToBinary(&bw, pNode); + } CATCH(code) { + tbufCloseWriter(&bw); + UNUSED(code); + // TODO: other error handling + } END_TRY - size_t len = tbufTell(&buf); - char* c = tbufGetData(&buf, true); + size_t len = tbufTell(&bw); + char* c = tbufGetData(&bw, true); // set the serialized binary string as the parameter of arithmetic expression addExprParams(pExpr, c, TSDB_DATA_TYPE_BINARY, len, index.tableIndex); @@ -3751,7 +3759,15 @@ static int32_t getTagQueryCondExpr(SQueryInfo* pQueryInfo, SCondExpr* pCondExpr, SArray* colList = taosArrayInit(10, sizeof(SColIndex)); ret = exprTreeFromSqlExpr(&p, p1, NULL, pQueryInfo, colList); - SBuffer buf = exprTreeToBinary(p); + SBufferWriter bw = tbufInitWriter(NULL, false); + + TRY(0) { + exprTreeToBinary(&bw, p); + } CATCH(code) { + tbufCloseWriter(&bw); + UNUSED(code); + // TODO: more error handling + } END_TRY // add to source column list STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, i); @@ -3765,7 +3781,7 @@ static int32_t getTagQueryCondExpr(SQueryInfo* pQueryInfo, SCondExpr* pCondExpr, addRequiredTagColumn(pTableMetaInfo, &index); } - tsSetSTableQueryCond(&pQueryInfo->tagCond, uid, &buf); + tsSetSTableQueryCond(&pQueryInfo->tagCond, uid, &bw); doCompactQueryExpr(pExpr); tSQLExprDestroy(p1); diff --git a/src/client/src/tscUtil.c b/src/client/src/tscUtil.c index 6b8b2b38b408ba05a7a1008dd99b04c3987a5c69..88ce13e560a1e2ecdab9566222f282d978b9801f 100644 --- a/src/client/src/tscUtil.c +++ b/src/client/src/tscUtil.c @@ -47,18 +47,18 @@ SCond* tsGetSTableQueryCond(STagCond* pTagCond, uint64_t uid) { return NULL; } -void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBuffer* pBuf) { - if (tbufTell(pBuf) == 0) { +void tsSetSTableQueryCond(STagCond* pTagCond, uint64_t uid, SBufferWriter* bw) { + if (tbufTell(bw) == 0) { return; } SCond cond = { .uid = uid, - .len = tbufTell(pBuf), + .len = tbufTell(bw), .cond = NULL, }; - cond.cond = tbufGetData(pBuf, true); + cond.cond = tbufGetData(bw, true); if (pTagCond->pCond == NULL) { pTagCond->pCond = taosArrayInit(3, sizeof(SCond)); diff --git a/src/query/inc/qast.h b/src/query/inc/qast.h index 903d54a18f7370b4a3d896a5c071eb9618cb78c4..6c997d5a36dd3ba379f0d490c7178c02b2340df0 100644 --- a/src/query/inc/qast.h +++ b/src/query/inc/qast.h @@ -90,9 +90,10 @@ void tSQLBinaryExprTrv(tExprNode *pExprs, SArray* res); uint8_t getBinaryExprOptr(SSQLToken *pToken); -SBuffer exprTreeToBinary(tExprNode* pExprTree); +void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)); +void exprTreeToBinary(SBufferWriter* bw, tExprNode* pExprTree); -tExprNode* exprTreeFromBinary(const void* pBuf, size_t size); +tExprNode* exprTreeFromBinary(const void* data, size_t size); tExprNode* exprTreeFromTableName(const char* tbnameCond); #ifdef __cplusplus diff --git a/src/query/src/qast.c b/src/query/src/qast.c index fdcbeeeac0315ba29466a9c1d8a81969d4713a31..500a5f1e496c5ac30c3f3d6011098dca0c840be6 100644 --- a/src/query/src/qast.c +++ b/src/query/src/qast.c @@ -31,6 +31,7 @@ #include "tskiplist.h" #include "queryLog.h" #include "tsdbMain.h" +#include "exception.h" /* * @@ -44,7 +45,6 @@ * */ static tExprNode *tExprNodeCreate(SSchema *pSchema, int32_t numOfCols, SSQLToken *pToken); -static void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)); static tExprNode *createSyntaxTree(SSchema *pSchema, int32_t numOfCols, char *str, int32_t *i); static void destroySyntaxTree(tExprNode *); @@ -428,7 +428,7 @@ void tSQLBinaryExprToString(tExprNode *pExpr, char *dst, int32_t *len) { static void UNUSED_FUNC destroySyntaxTree(tExprNode *pNode) { tExprNodeDestroy(pNode, NULL); } -static void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)) { +void tExprNodeDestroy(tExprNode *pNode, void (*fp)(void *)) { if (pNode == NULL) { return; } @@ -1023,104 +1023,116 @@ void tSQLBinaryExprTrv(tExprNode *pExprs, SArray* res) { } } -static void exprTreeToBinaryImpl(tExprNode* pExprTree, SBuffer* pBuf) { - tbufWrite(pBuf, &pExprTree->nodeType, sizeof(pExprTree->nodeType)); +static void exprTreeToBinaryImpl(SBufferWriter* bw, tExprNode* expr) { + tbufWriteUint8(bw, expr->nodeType); - if (pExprTree->nodeType == TSQL_NODE_VALUE) { - tVariant* pVal = pExprTree->pVal; + if (expr->nodeType == TSQL_NODE_VALUE) { + tVariant* pVal = expr->pVal; - tbufWrite(pBuf, &pVal->nType, sizeof(pVal->nType)); + tbufWriteUint32(bw, pVal->nType); if (pVal->nType == TSDB_DATA_TYPE_BINARY) { - tbufWrite(pBuf, &pVal->nLen, sizeof(pVal->nLen)); - tbufWrite(pBuf, pVal->pz, pVal->nLen); + tbufWriteInt32(bw, pVal->nLen); + tbufWrite(bw, pVal->pz, pVal->nLen); } else { - tbufWrite(pBuf, &pVal->pz, sizeof(pVal->i64Key)); + tbufWriteInt64(bw, pVal->i64Key); } - } else if (pExprTree->nodeType == TSQL_NODE_COL) { - SSchema* pSchema = pExprTree->pSchema; - tbufWrite(pBuf, &pSchema->colId, sizeof(pSchema->colId)); - tbufWrite(pBuf, &pSchema->bytes, sizeof(pSchema->bytes)); - tbufWrite(pBuf, &pSchema->type, sizeof(pSchema->type)); + } else if (expr->nodeType == TSQL_NODE_COL) { + SSchema* pSchema = expr->pSchema; + tbufWriteInt16(bw, pSchema->colId); + tbufWriteInt16(bw, pSchema->bytes); + tbufWriteUint8(bw, pSchema->type); + tbufWriteString(bw, pSchema->name); - int32_t len = strlen(pSchema->name); - tbufWriteStringLen(pBuf, pSchema->name, len); - - } else if (pExprTree->nodeType == TSQL_NODE_EXPR) { - tbufWrite(pBuf, &pExprTree->_node.optr, sizeof(pExprTree->_node.optr)); - tbufWrite(pBuf, &pExprTree->_node.hasPK, sizeof(pExprTree->_node.hasPK)); - - exprTreeToBinaryImpl(pExprTree->_node.pLeft, pBuf); - exprTreeToBinaryImpl(pExprTree->_node.pRight, pBuf); + } else if (expr->nodeType == TSQL_NODE_EXPR) { + tbufWriteUint8(bw, expr->_node.optr); + tbufWriteUint8(bw, expr->_node.hasPK); + exprTreeToBinaryImpl(bw, expr->_node.pLeft); + exprTreeToBinaryImpl(bw, expr->_node.pRight); } } -SBuffer exprTreeToBinary(tExprNode* pExprTree) { - SBuffer buf = {0}; - if (pExprTree == NULL) { - return buf; +void exprTreeToBinary(SBufferWriter* bw, tExprNode* expr) { + if (expr != NULL) { + exprTreeToBinaryImpl(bw, expr); } - - int32_t code = tbufBeginWrite(&buf); - if (code != 0) { - return buf; +} + +// TODO: these three functions should be made global +static void* exception_calloc(size_t nmemb, size_t size) { + void* p = calloc(nmemb, size); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); } - - exprTreeToBinaryImpl(pExprTree, &buf); - return buf; + return p; +} + +static void* exception_malloc(size_t size) { + void* p = malloc(size); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); + } + return p; } -static tExprNode* exprTreeFromBinaryImpl(SBuffer* pBuf) { - tExprNode* pExpr = calloc(1, sizeof(tExprNode)); - pExpr->nodeType = tbufReadUint8(pBuf); +static char* exception_strdup(const char* str) { + char* p = strdup(str); + if (p == NULL) { + THROW(TSDB_CODE_SERV_OUT_OF_MEMORY); + } + return p; +} + + +static tExprNode* exprTreeFromBinaryImpl(SBufferReader* br) { + int32_t anchor = CLEANUP_GET_ANCHOR(); + + tExprNode* pExpr = exception_calloc(1, sizeof(tExprNode)); + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, pExpr, NULL); + + pExpr->nodeType = tbufReadUint8(br); if (pExpr->nodeType == TSQL_NODE_VALUE) { - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); pExpr->pVal = pVal; - pVal->nType = tbufReadUint32(pBuf); + pVal->nType = tbufReadUint32(br); if (pVal->nType == TSDB_DATA_TYPE_BINARY) { - tbufReadToBuffer(pBuf, &pVal->nLen, sizeof(pVal->nLen)); + tbufReadToBuffer(br, &pVal->nLen, sizeof(pVal->nLen)); pVal->pz = calloc(1, pVal->nLen + 1); - tbufReadToBuffer(pBuf, pVal->pz, pVal->nLen); + tbufReadToBuffer(br, pVal->pz, pVal->nLen); } else { - pVal->i64Key = tbufReadInt64(pBuf); + pVal->i64Key = tbufReadInt64(br); } } else if (pExpr->nodeType == TSQL_NODE_COL) { - SSchema* pSchema = calloc(1, sizeof(SSchema)); - if (pSchema == NULL) { - // TODO: - } + SSchema* pSchema = exception_calloc(1, sizeof(SSchema)); pExpr->pSchema = pSchema; - pSchema->colId = tbufReadInt16(pBuf); - pSchema->bytes = tbufReadInt16(pBuf); - pSchema->type = tbufReadUint8(pBuf); - tbufReadToString(pBuf, pSchema->name, TSDB_COL_NAME_LEN); + pSchema->colId = tbufReadInt16(br); + pSchema->bytes = tbufReadInt16(br); + pSchema->type = tbufReadUint8(br); + tbufReadToString(br, pSchema->name, TSDB_COL_NAME_LEN); } else if (pExpr->nodeType == TSQL_NODE_EXPR) { - pExpr->_node.optr = tbufReadUint8(pBuf); - pExpr->_node.hasPK = tbufReadUint8(pBuf); - pExpr->_node.pLeft = exprTreeFromBinaryImpl(pBuf); - pExpr->_node.pRight = exprTreeFromBinaryImpl(pBuf); + pExpr->_node.optr = tbufReadUint8(br); + pExpr->_node.hasPK = tbufReadUint8(br); + pExpr->_node.pLeft = exprTreeFromBinaryImpl(br); + pExpr->_node.pRight = exprTreeFromBinaryImpl(br); assert(pExpr->_node.pLeft != NULL && pExpr->_node.pRight != NULL); } + CLEANUP_EXECUTE_TO(anchor, false); return pExpr; } -tExprNode* exprTreeFromBinary(const void* pBuf, size_t size) { +tExprNode* exprTreeFromBinary(const void* data, size_t size) { if (size == 0) { return NULL; } - SBuffer rbuf = {0}; - tbufBeginRead(&rbuf, pBuf, size); - return exprTreeFromBinaryImpl(&rbuf); + SBufferReader br = tbufInitReader(data, size, false); + return exprTreeFromBinaryImpl(&br); } tExprNode* exprTreeFromTableName(const char* tbnameCond) { @@ -1128,23 +1140,18 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { return NULL; } - tExprNode* expr = calloc(1, sizeof(tExprNode)); - if (expr == NULL) { - // TODO: - } + int32_t anchor = CLEANUP_GET_ANCHOR(); + + tExprNode* expr = exception_calloc(1, sizeof(tExprNode)); + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, expr, NULL); + expr->nodeType = TSQL_NODE_EXPR; - tExprNode* left = calloc(1, sizeof(tExprNode)); - if (left == NULL) { - // TODO: - } + tExprNode* left = exception_calloc(1, sizeof(tExprNode)); expr->_node.pLeft = left; left->nodeType = TSQL_NODE_COL; - SSchema* pSchema = calloc(1, sizeof(SSchema)); - if (pSchema == NULL) { - // TODO: - } + SSchema* pSchema = exception_calloc(1, sizeof(SSchema)); left->pSchema = pSchema; pSchema->type = TSDB_DATA_TYPE_BINARY; @@ -1152,36 +1159,24 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { strcpy(pSchema->name, TSQL_TBNAME_L); pSchema->colId = -1; - tExprNode* right = calloc(1, sizeof(tExprNode)); - if (right == NULL) { - // TODO - } + tExprNode* right = exception_calloc(1, sizeof(tExprNode)); expr->_node.pRight = right; if (strncmp(tbnameCond, QUERY_COND_REL_PREFIX_LIKE, QUERY_COND_REL_PREFIX_LIKE_LEN) == 0) { right->nodeType = TSQL_NODE_VALUE; expr->_node.optr = TSDB_RELATION_LIKE; - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); right->pVal = pVal; - pVal->nType = TSDB_DATA_TYPE_BINARY; size_t len = strlen(tbnameCond + QUERY_COND_REL_PREFIX_LIKE_LEN) + 1; - pVal->pz = malloc(len); - if (pVal->pz == NULL) { - // TODO: - } + pVal->pz = exception_malloc(len); memcpy(pVal->pz, tbnameCond + QUERY_COND_REL_PREFIX_LIKE_LEN, len); + pVal->nType = TSDB_DATA_TYPE_BINARY; pVal->nLen = (int32_t)len; } else if (strncmp(tbnameCond, QUERY_COND_REL_PREFIX_IN, QUERY_COND_REL_PREFIX_IN_LEN) == 0) { right->nodeType = TSQL_NODE_VALUE; expr->_node.optr = TSDB_RELATION_IN; - tVariant* pVal = calloc(1, sizeof(tVariant)); - if (pVal == NULL) { - // TODO: - } + tVariant* pVal = exception_calloc(1, sizeof(tVariant)); right->pVal = pVal; pVal->nType = TSDB_DATA_TYPE_ARRAY; pVal->arr = taosArrayInit(2, sizeof(char*)); @@ -1192,7 +1187,7 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { cond = e + 1; } else if (*e == ',') { size_t len = e - cond + 1; - char* p = malloc( len ); + char* p = exception_malloc( len ); memcpy(p, cond, len); p[len - 1] = 0; cond += len; @@ -1201,12 +1196,13 @@ tExprNode* exprTreeFromTableName(const char* tbnameCond) { } if (*cond != 0) { - char* p = strdup( cond ); + char* p = exception_strdup( cond ); taosArrayPush(pVal->arr, &p); } taosArraySortString(pVal->arr); } + CLEANUP_EXECUTE_TO(anchor, false); return expr; } \ No newline at end of file diff --git a/src/query/tests/astTest.cpp b/src/query/tests/astTest.cpp index 6a78cfbe539e96a80d3fb26048552055e7561b23..dee85ef63002b91e08cf32d7f1eb6417c1afac7c 100644 --- a/src/query/tests/astTest.cpp +++ b/src/query/tests/astTest.cpp @@ -550,11 +550,12 @@ tExprNode* createExpr2() { void exprSerializeTest1() { tExprNode* p1 = createExpr1(); - SBuffer buf = exprTreeToBinary(p1); + SBufferWriter bw = tbufInitWriter(NULL, false); + exprTreeToBinary(&bw, p1); - size_t size = tbufTell(&buf); + size_t size = tbufTell(&bw); ASSERT_TRUE(size > 0); - char* b = tbufGetData(&buf, false); + char* b = tbufGetData(&bw, false); tExprNode* p2 = exprTreeFromBinary(b, size); ASSERT_EQ(p1->nodeType, p2->nodeType); @@ -581,16 +582,17 @@ void exprSerializeTest1() { tExprTreeDestroy(&p1, nullptr); tExprTreeDestroy(&p2, nullptr); - tbufClose(&buf, false); + tbufClose(&bw); } void exprSerializeTest2() { tExprNode* p1 = createExpr2(); - SBuffer buf = exprTreeToBinary(p1); + SBufferWriter bw = tbufInitWriter(NULL, false); + exprTreeToBinary(&bw, p1); - size_t size = tbufTell(&buf); + size_t size = tbufTell(&bw); ASSERT_TRUE(size > 0); - char* b = tbufGetData(&buf, false); + char* b = tbufGetData(&bw, false); tExprNode* p2 = exprTreeFromBinary(b, size); ASSERT_EQ(p1->nodeType, p2->nodeType); @@ -625,7 +627,7 @@ void exprSerializeTest2() { tExprTreeDestroy(&p1, nullptr); tExprTreeDestroy(&p2, nullptr); - tbufClose(&buf, false); + tbufClose(&bw); } } // namespace TEST(testCase, astTest) { diff --git a/src/tsdb/src/tsdbRead.c b/src/tsdb/src/tsdbRead.c index eb35be5383e1e82761a47de6fcd54c223687234b..bc9220dbc72b197785b5fa227340def9e1839dbb 100644 --- a/src/tsdb/src/tsdbRead.c +++ b/src/tsdb/src/tsdbRead.c @@ -18,6 +18,7 @@ #include "talgo.h" #include "tutil.h" #include "tcompare.h" +#include "exception.h" #include "../../../query/inc/qast.h" // todo move to common module #include "../../../query/inc/tlosertree.h" // todo move to util module @@ -1473,21 +1474,35 @@ int32_t tsdbQueryByTagsCond( } int32_t ret = TSDB_CODE_SUCCESS; + tExprNode* expr = NULL; - tExprNode* expr = exprTreeFromTableName(tbnameCond); - tExprNode* tagExpr = exprTreeFromBinary(pTagCond, len); - if (tagExpr != NULL) { + TRY(32) { + expr = exprTreeFromTableName(tbnameCond); if (expr == NULL) { - expr = tagExpr; + expr = exprTreeFromBinary(pTagCond, len); } else { - tExprNode* tbnameExpr = expr; - expr = calloc(1, sizeof(tExprNode)); - expr->nodeType = TSQL_NODE_EXPR; - expr->_node.optr = tagNameRelType; - expr->_node.pLeft = tagExpr; - expr->_node.pRight = tbnameExpr; + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, expr, NULL); + tExprNode* tagExpr = exprTreeFromBinary(pTagCond, len); + if (tagExpr != NULL) { + CLEANUP_PUSH_VOID_PTR_PTR(true, tExprNodeDestroy, tagExpr, NULL); + tExprNode* tbnameExpr = expr; + expr = calloc(1, sizeof(tExprNode)); + if (expr == NULL) { + THROW( TSDB_CODE_SERV_OUT_OF_MEMORY ); + } + expr->nodeType = TSQL_NODE_EXPR; + expr->_node.optr = tagNameRelType; + expr->_node.pLeft = tagExpr; + expr->_node.pRight = tbnameExpr; + } } - } + CLEANUP_EXECUTE(); + + } CATCH( code ) { + CLEANUP_EXECUTE(); + ret = code; + // TODO: more error handling + } END_TRY doQueryTableList(pSTable, res, expr); pGroupInfo->numOfTables = taosArrayGetSize(res); diff --git a/src/util/inc/exception.h b/src/util/inc/exception.h new file mode 100644 index 0000000000000000000000000000000000000000..41f01d68dd5fddd800c20b5d23c6b20fa1bb7b73 --- /dev/null +++ b/src/util/inc/exception.h @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2020 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef TDENGINE_EXCEPTION_H +#define TDENGINE_EXCEPTION_H + +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * cleanup actions + */ +typedef struct SCleanupAction { + bool failOnly; + uint8_t wrapper; + uint16_t reserved; + void* func; + union { + void* Ptr; + bool Bool; + char Char; + int8_t Int8; + uint8_t Uint8; + int16_t Int16; + uint16_t Uint16; + int Int; + unsigned int Uint; + int32_t Int32; + uint32_t Uint32; + int64_t Int64; + uint64_t Uint64; + float Float; + double Double; + } arg1, arg2; +} SCleanupAction; + + +/* + * exception hander registration + */ +typedef struct SExceptionNode { + struct SExceptionNode* prev; + jmp_buf jb; + int32_t code; + int32_t maxCleanupAction; + int32_t numCleanupAction; + SCleanupAction* cleanupActions; +} SExceptionNode; + +//////////////////////////////////////////////////////////////////////////////// +// functions & macros for auto-cleanup + +void cleanupPush_void_ptr_ptr ( bool failOnly, void* func, void* arg1, void* arg2 ); +void cleanupPush_void_ptr_bool ( bool failOnly, void* func, void* arg1, bool arg2 ); +void cleanupPush_void_ptr ( bool failOnly, void* func, void* arg ); +void cleanupPush_int_int ( bool failOnly, void* func, int arg ); +void cleanupPush_void ( bool failOnly, void* func ); + +int32_t cleanupGetActionCount(); +void cleanupExecuteTo( int32_t anchor, bool failed ); +void cleanupExecute( SExceptionNode* node, bool failed ); + +#define CLEANUP_PUSH_VOID_PTR_PTR( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_ptr( (failOnly), (void*)(func), (void*)(arg1), (void*)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR_BOOL( failOnly, func, arg1, arg2 ) cleanupPush_void_ptr_bool( (failOnly), (void*)(func), (void*)(arg1), (bool)(arg2) ) +#define CLEANUP_PUSH_VOID_PTR( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (void*)(arg) ) +#define CLEANUP_PUSH_INT_INT( failOnly, func, arg ) cleanupPush_void_ptr( (failOnly), (void*)(func), (int)(arg) ) +#define CLEANUP_PUSH_VOID( failOnly, func ) cleanupPush_void( (failOnly), (void*)(func) ) +#define CLEANUP_PUSH_FREE( failOnly, arg ) cleanupPush_void_ptr( (failOnly), free, (void*)(arg) ) +#define CLEANUP_PUSH_CLOSE( failOnly, arg ) cleanupPush_int_int( (failOnly), close, (int)(arg) ) + +#define CLEANUP_GET_ANCHOR() cleanupGetActionCount() +#define CLEANUP_EXECUTE_TO( anchor, failed ) cleanupExecuteTo( (anchor), (failed) ) + + +//////////////////////////////////////////////////////////////////////////////// +// functions & macros for exception handling + +void exceptionPushNode( SExceptionNode* node ); +int32_t exceptionPopNode(); +void exceptionThrow( int code ); + +#define TRY(maxCleanupActions) do { \ + SExceptionNode exceptionNode = { 0 }; \ + SCleanupAction cleanupActions[(maxCleanupActions) > 0 ? (maxCleanupActions) : 1]; \ + exceptionNode.maxCleanupAction = (maxCleanupActions) > 0 ? (maxCleanupActions) : 1; \ + exceptionNode.cleanupActions = cleanupActions; \ + exceptionPushNode( &exceptionNode ); \ + int caughtException = setjmp( exceptionNode.jb ); \ + if( caughtException == 0 ) + +#define CATCH( code ) int code = exceptionPopNode(); \ + if( caughtException == 1 ) + +#define FINALLY( code ) int code = exceptionPopNode(); + +#define END_TRY } while( 0 ); + +#define THROW( x ) exceptionThrow( (x) ) +#define CAUGHT_EXCEPTION() ((bool)(caughtException == 1)) +#define CLEANUP_EXECUTE() cleanupExecute( &exceptionNode, CAUGHT_EXCEPTION() ) + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/util/inc/tbuffer.h b/src/util/inc/tbuffer.h index 9bc0fd9f43eb9122aa50a438a3eb1053312c7d6e..e2bdb815d7ac8ecd09b62a3a62897b78ebf3c4b7 100644 --- a/src/util/inc/tbuffer.h +++ b/src/util/inc/tbuffer.h @@ -16,122 +16,163 @@ #ifndef TDENGINE_TBUFFER_H #define TDENGINE_TBUFFER_H -#include "setjmp.h" -#include "os.h" +#include +#include #ifdef __cplusplus extern "C" { #endif +//////////////////////////////////////////////////////////////////////////////// +// usage example /* -SBuffer can be used to read or write a buffer, but cannot be used for both -read & write at a same time. Below is an example: - -int main(int argc, char** argv) { - //--------------------- write ------------------------ - SBuffer wbuf; - int32_t code = tbufBeginWrite(&wbuf); - if (code != 0) { - // handle errors - return 0; - } - - // reserve 1024 bytes for the buffer to improve performance - tbufEnsureCapacity(&wbuf, 1024); - - // write 5 integers to the buffer - for (int i = 0; i < 5; i++) { - tbufWriteInt32(&wbuf, i); - } - - // write a string to the buffer - tbufWriteString(&wbuf, "this is a string.\n"); - - // acquire the result and close the write buffer - size_t size = tbufTell(&wbuf); - char* data = tbufGetData(&wbuf, true); - tbufClose(&wbuf, true); - - - //------------------------ read ----------------------- - SBuffer rbuf; - code = tbufBeginRead(&rbuf, data, size); - if (code != 0) { - printf("you will see this message after print out 5 integers and a string.\n"); - tbufClose(&rbuf, false); - return 0; - } - - // read & print out 5 integers - for (int i = 0; i < 5; i++) { - printf("%d\n", tbufReadInt32(&rbuf)); - } - - // read & print out a string - printf(tbufReadString(&rbuf, NULL)); - - // try read another integer, this result in an error as there no this integer - tbufReadInt32(&rbuf); - - printf("you should not see this message.\n"); - tbufClose(&rbuf, false); - +#include +#include "exception.h" + +int main( int argc, char** argv ) { + SBufferWriter bw = tbufInitWriter( NULL, false ); + + TRY( 1 ) { + //--------------------- write ------------------------ + // reserve 1024 bytes for the buffer to improve performance + tbufEnsureCapacity( &bw, 1024 ); + + // reserve space for the interger count + size_t pos = tbufReserve( &bw, sizeof(int32_t) ); + // write 5 integers to the buffer + for( int i = 0; i < 5; i++) { + tbufWriteInt32( &bw, i ); + } + // write the integer count to buffer at reserved position + tbufWriteInt32At( &bw, pos, 5 ); + + // write a string to the buffer + tbufWriteString( &bw, "this is a string.\n" ); + // acquire the result and close the write buffer + size_t size = tbufTell( &bw ); + char* data = tbufGetData( &bw, false ); + + //------------------------ read ----------------------- + SBufferReader br = tbufInitReader( data, size, false ); + // read & print out all integers + int32_t count = tbufReadInt32( &br ); + for( int i = 0; i < count; i++ ) { + printf( "%d\n", tbufReadInt32(&br) ); + } + // read & print out a string + puts( tbufReadString(&br, NULL) ); + // try read another integer, this result in an error as there no this integer + tbufReadInt32( &br ); + printf( "you should not see this message.\n" ); + } CATCH( code ) { + printf( "exception code is: %d, you will see this message after print out 5 integers and a string.\n", code ); + } END_TRY + + tbufCloseWriter( &bw ); return 0; } */ + typedef struct { - jmp_buf jb; - char* data; - size_t pos; - size_t size; -} SBuffer; - -// common functions can be used in both read & write -#define tbufThrowError(buf, code) longjmp((buf)->jb, (code)) -size_t tbufTell(SBuffer* buf); -size_t tbufSeekTo(SBuffer* buf, size_t pos); -size_t tbufSkip(SBuffer* buf, size_t size); -void tbufClose(SBuffer* buf, bool keepData); - -// basic read functions -#define tbufBeginRead(buf, _data, len) ((buf)->data = (char*)(_data), ((buf)->pos = 0), ((buf)->size = ((_data) == NULL) ? 0 : (len)), setjmp((buf)->jb)) -char* tbufRead(SBuffer* buf, size_t size); -void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size); -const char* tbufReadString(SBuffer* buf, size_t* len); -size_t tbufReadToString(SBuffer* buf, char* dst, size_t size); - -// basic write functions -#define tbufBeginWrite(buf) ((buf)->data = NULL, ((buf)->pos = 0), ((buf)->size = 0), setjmp((buf)->jb)) -void tbufEnsureCapacity(SBuffer* buf, size_t size); -char* tbufGetData(SBuffer* buf, bool takeOver); -void tbufWrite(SBuffer* buf, const void* data, size_t size); -void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size); -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len); -void tbufWriteString(SBuffer* buf, const char* str); - -// read & write function for primitive types -#ifndef TBUFFER_DEFINE_FUNCTION -#define TBUFFER_DEFINE_FUNCTION(type, name) \ - type tbufRead##name(SBuffer* buf); \ - void tbufWrite##name(SBuffer* buf, type data); \ - void tbufWrite##name##At(SBuffer* buf, size_t pos, type data); -#endif + bool endian; + const char* data; + size_t pos; + size_t size; +} SBufferReader; -TBUFFER_DEFINE_FUNCTION(bool, Bool) -TBUFFER_DEFINE_FUNCTION(char, Char) -TBUFFER_DEFINE_FUNCTION(int8_t, Int8) -TBUFFER_DEFINE_FUNCTION(uint8_t, Uint8) -TBUFFER_DEFINE_FUNCTION(int16_t, Int16) -TBUFFER_DEFINE_FUNCTION(uint16_t, Uint16) -TBUFFER_DEFINE_FUNCTION(int32_t, Int32) -TBUFFER_DEFINE_FUNCTION(uint32_t, Uint32) -TBUFFER_DEFINE_FUNCTION(int64_t, Int64) -TBUFFER_DEFINE_FUNCTION(uint64_t, Uint64) -TBUFFER_DEFINE_FUNCTION(float, Float) -TBUFFER_DEFINE_FUNCTION(double, Double) +typedef struct { + bool endian; + char* data; + size_t pos; + size_t size; + void* (*allocator)( void*, size_t ); +} SBufferWriter; + +//////////////////////////////////////////////////////////////////////////////// +// common functions & macros for both reader & writer + +#define tbufTell( buf ) ((buf)->pos) + + +//////////////////////////////////////////////////////////////////////////////// +// reader functions & macros + +// *Endian*, if true, reader functions of primitive types will do 'ntoh' automatically +#define tbufInitReader( Data, Size, Endian ) {.endian = (Endian), .data = (Data), .pos = 0, .size = ((Data) == NULL ? 0 :(Size))} + +size_t tbufSkip( SBufferReader* buf, size_t size ); + +const char* tbufRead( SBufferReader* buf, size_t size ); +void tbufReadToBuffer( SBufferReader* buf, void* dst, size_t size ); +const char* tbufReadString( SBufferReader* buf, size_t* len ); +size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ); +const char* tbufReadBinary( SBufferReader* buf, size_t *len ); +size_t tbufReadToBinary( SBufferReader* buf, void* dst, size_t size ); + +bool tbufReadBool( SBufferReader* buf ); +char tbufReadChar( SBufferReader* buf ); +int8_t tbufReadInt8( SBufferReader* buf ); +uint8_t tbufReadUint8( SBufferReader* buf ); +int16_t tbufReadInt16( SBufferReader* buf ); +uint16_t tbufReadUint16( SBufferReader* buf ); +int32_t tbufReadInt32( SBufferReader* buf ); +uint32_t tbufReadUint32( SBufferReader* buf ); +int64_t tbufReadInt64( SBufferReader* buf ); +uint64_t tbufReadUint64( SBufferReader* buf ); +float tbufReadFloat( SBufferReader* buf ); +double tbufReadDouble( SBufferReader* buf ); + + +//////////////////////////////////////////////////////////////////////////////// +// writer functions & macros + +// *Allocator*, function to allocate memory, will use 'realloc' if NULL +// *Endian*, if true, writer functions of primitive types will do 'hton' automatically +#define tbufInitWriter( Allocator, Endian ) {.endian = (Endian), .data = NULL, .pos = 0, .size = 0, .allocator = ((Allocator) == NULL ? realloc : (Allocator))} +void tbufCloseWriter( SBufferWriter* buf ); + +void tbufEnsureCapacity( SBufferWriter* buf, size_t size ); +size_t tbufReserve( SBufferWriter* buf, size_t size ); +char* tbufGetData( SBufferWriter* buf, bool takeOver ); + +void tbufWrite( SBufferWriter* buf, const void* data, size_t size ); +void tbufWriteAt( SBufferWriter* buf, size_t pos, const void* data, size_t size ); +void tbufWriteStringLen( SBufferWriter* buf, const char* str, size_t len ); +void tbufWriteString( SBufferWriter* buf, const char* str ); +// the prototype of tbufWriteBinary and tbufWrite are identical +// the difference is: tbufWriteBinary writes the length of the data to the buffer +// first, then the actual data, which means the reader don't need to know data +// size before read. Write only write the data itself, which means the reader +// need to know data size before read. +void tbufWriteBinary( SBufferWriter* buf, const void* data, size_t len ); + +void tbufWriteBool( SBufferWriter* buf, bool data ); +void tbufWriteBoolAt( SBufferWriter* buf, size_t pos, bool data ); +void tbufWriteChar( SBufferWriter* buf, char data ); +void tbufWriteCharAt( SBufferWriter* buf, size_t pos, char data ); +void tbufWriteInt8( SBufferWriter* buf, int8_t data ); +void tbufWriteInt8At( SBufferWriter* buf, size_t pos, int8_t data ); +void tbufWriteUint8( SBufferWriter* buf, uint8_t data ); +void tbufWriteUint8At( SBufferWriter* buf, size_t pos, uint8_t data ); +void tbufWriteInt16( SBufferWriter* buf, int16_t data ); +void tbufWriteInt16At( SBufferWriter* buf, size_t pos, int16_t data ); +void tbufWriteUint16( SBufferWriter* buf, uint16_t data ); +void tbufWriteUint16At( SBufferWriter* buf, size_t pos, uint16_t data ); +void tbufWriteInt32( SBufferWriter* buf, int32_t data ); +void tbufWriteInt32At( SBufferWriter* buf, size_t pos, int32_t data ); +void tbufWriteUint32( SBufferWriter* buf, uint32_t data ); +void tbufWriteUint32At( SBufferWriter* buf, size_t pos, uint32_t data ); +void tbufWriteInt64( SBufferWriter* buf, int64_t data ); +void tbufWriteInt64At( SBufferWriter* buf, size_t pos, int64_t data ); +void tbufWriteUint64( SBufferWriter* buf, uint64_t data ); +void tbufWriteUint64At( SBufferWriter* buf, size_t pos, uint64_t data ); +void tbufWriteFloat( SBufferWriter* buf, float data ); +void tbufWriteFloatAt( SBufferWriter* buf, size_t pos, float data ); +void tbufWriteDouble( SBufferWriter* buf, double data ); +void tbufWriteDoubleAt( SBufferWriter* buf, size_t pos, double data ); #ifdef __cplusplus } #endif -#endif \ No newline at end of file +#endif diff --git a/src/util/src/exception.c b/src/util/src/exception.c new file mode 100644 index 0000000000000000000000000000000000000000..7f8f91c784b7a07078a2801a373d608693178db7 --- /dev/null +++ b/src/util/src/exception.c @@ -0,0 +1,132 @@ +#include "exception.h" + + +static _Thread_local SExceptionNode* expList; + +void exceptionPushNode( SExceptionNode* node ) { + node->prev = expList; + expList = node; +} + +int32_t exceptionPopNode() { + SExceptionNode* node = expList; + expList = node->prev; + return node->code; +} + +void exceptionThrow( int code ) { + expList->code = code; + longjmp( expList->jb, 1 ); +} + + + +static void cleanupWrapper_void_ptr_ptr( SCleanupAction* ca ) { + void (*func)( void*, void* ) = ca->func; + func( ca->arg1.Ptr, ca->arg2.Ptr ); +} + +static void cleanupWrapper_void_ptr_bool( SCleanupAction* ca ) { + void (*func)( void*, bool ) = ca->func; + func( ca->arg1.Ptr, ca->arg2.Bool ); +} + +static void cleanupWrapper_void_ptr( SCleanupAction* ca ) { + void (*func)( void* ) = ca->func; + func( ca->arg1.Ptr ); +} + +static void cleanupWrapper_int_int( SCleanupAction* ca ) { + int (*func)( int ) = ca->func; + func( (int)(intptr_t)(ca->arg1.Int) ); +} + +static void cleanupWrapper_void_void( SCleanupAction* ca ) { + void (*func)() = ca->func; + func(); +} + +typedef void (*wrapper)(SCleanupAction*); +static wrapper wrappers[] = { + cleanupWrapper_void_ptr_ptr, + cleanupWrapper_void_ptr_bool, + cleanupWrapper_void_ptr, + cleanupWrapper_int_int, + cleanupWrapper_void_void, +}; + + +void cleanupPush_void_ptr_ptr( bool failOnly, void* func, void* arg1, void* arg2 ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 0; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg1; + ca->arg2.Ptr = arg2; +} + +void cleanupPush_void_ptr_bool( bool failOnly, void* func, void* arg1, bool arg2 ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 1; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg1; + ca->arg2.Bool = arg2; +} + +void cleanupPush_void_ptr( bool failOnly, void* func, void* arg ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 2; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Ptr = arg; +} + +void cleanupPush_int_int( bool failOnly, void* func, int arg ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 3; + ca->failOnly = failOnly; + ca->func = func; + ca->arg1.Int = arg; +} + +void cleanupPush_void( bool failOnly, void* func ) { + assert( expList->numCleanupAction < expList->maxCleanupAction ); + + SCleanupAction *ca = expList->cleanupActions + expList->numCleanupAction++; + ca->wrapper = 4; + ca->failOnly = failOnly; + ca->func = func; +} + + + +int32_t cleanupGetActionCount() { + return expList->numCleanupAction; +} + + +static void doExecuteCleanup( SExceptionNode* node, int32_t anchor, bool failed ) { + while( node->numCleanupAction > anchor ) { + --node->numCleanupAction; + SCleanupAction *ca = node->cleanupActions + node->numCleanupAction; + if( failed || !(ca->failOnly) ) + wrappers[ca->wrapper]( ca ); + } +} + +void cleanupExecuteTo( int32_t anchor, bool failed ) { + doExecuteCleanup( expList, anchor, failed ); +} + +void cleanupExecute( SExceptionNode* node, bool failed ) { + doExecuteCleanup( node, 0, failed ); +} \ No newline at end of file diff --git a/src/util/src/tbuffer.c b/src/util/src/tbuffer.c index a83d7dddb0d8e987bfbb670f8f3e4b413539dec2..3b4cc74cc3717c365b3a1e0e36b63d5b4098e995 100644 --- a/src/util/src/tbuffer.c +++ b/src/util/src/tbuffer.c @@ -16,150 +16,384 @@ #include #include #include +#include +#include "tbuffer.h" +#include "exception.h" +#include + +//////////////////////////////////////////////////////////////////////////////// +// reader functions -#define TBUFFER_DEFINE_FUNCTION(type, name) \ - type tbufRead##name(SBuffer* buf) { \ - type ret; \ - tbufReadToBuffer(buf, &ret, sizeof(type)); \ - return ret; \ - }\ - void tbufWrite##name(SBuffer* buf, type data) {\ - tbufWrite(buf, &data, sizeof(data));\ - }\ - void tbufWrite##name##At(SBuffer* buf, size_t pos, type data) {\ - tbufWriteAt(buf, pos, &data, sizeof(data));\ +size_t tbufSkip(SBufferReader* buf, size_t size) { + if( (buf->pos + size) > buf->size ) { + THROW( TSDB_CODE_MEMORY_CORRUPTED ); } + size_t old = buf->pos; + buf->pos += size; + return old; +} -#include "tbuffer.h" +const char* tbufRead( SBufferReader* buf, size_t size ) { + const char* ret = buf->data + buf->pos; + tbufSkip( buf, size ); + return ret; +} +void tbufReadToBuffer( SBufferReader* buf, void* dst, size_t size ) { + assert( dst != NULL ); + // always using memcpy, leave optimization to compiler + memcpy( dst, tbufRead(buf, size), size ); +} -//////////////////////////////////////////////////////////////////////////////// -// common functions +static size_t tbufReadLength( SBufferReader* buf ) { + // maximum length is 65535, if larger length is required + // this function and the corresponding write function need to be + // revised. + uint16_t l = tbufReadUint16( buf ); + return l; +} -size_t tbufTell(SBuffer* buf) { - return buf->pos; +const char* tbufReadString( SBufferReader* buf, size_t* len ) { + size_t l = tbufReadLength( buf ); + const char* ret = buf->data + buf->pos; + tbufSkip( buf, l + 1 ); + if( ret[l] != 0 ) { + THROW( TSDB_CODE_MEMORY_CORRUPTED ); + } + if( len != NULL ) { + *len = l; + } + return ret; } -size_t tbufSeekTo(SBuffer* buf, size_t pos) { - if (pos > buf->size) { - // TODO: update error code, other tbufThrowError need to be changed too - tbufThrowError(buf, 1); +size_t tbufReadToString( SBufferReader* buf, char* dst, size_t size ) { + assert( dst != NULL ); + size_t len; + const char* str = tbufReadString( buf, &len ); + if (len >= size) { + len = size - 1; } - size_t old = buf->pos; - buf->pos = pos; - return old; + memcpy( dst, str, len ); + dst[len] = 0; + return len; } -size_t tbufSkip(SBuffer* buf, size_t size) { - return tbufSeekTo(buf, buf->pos + size); +const char* tbufReadBinary( SBufferReader* buf, size_t *len ) { + size_t l = tbufReadLength( buf ); + const char* ret = buf->data + buf->pos; + tbufSkip( buf, l ); + if( len != NULL ) { + *len = l; + } + return ret; } -void tbufClose(SBuffer* buf, bool keepData) { - if (!keepData) { - free(buf->data); +size_t tbufReadToBinary( SBufferReader* buf, void* dst, size_t size ) { + assert( dst != NULL ); + size_t len; + const char* data = tbufReadBinary( buf, &len ); + if( len >= size ) { + len = size; } - buf->data = NULL; - buf->pos = 0; - buf->size = 0; + memcpy( dst, data, len ); + return len; } -//////////////////////////////////////////////////////////////////////////////// -// read functions +bool tbufReadBool( SBufferReader* buf ) { + bool ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} -char* tbufRead(SBuffer* buf, size_t size) { - char* ret = buf->data + buf->pos; - tbufSkip(buf, size); +char tbufReadChar( SBufferReader* buf ) { + char ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); return ret; } -void tbufReadToBuffer(SBuffer* buf, void* dst, size_t size) { - assert(dst != NULL); - // always using memcpy, leave optimization to compiler - memcpy(dst, tbufRead(buf, size), size); +int8_t tbufReadInt8( SBufferReader* buf ) { + int8_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; } -const char* tbufReadString(SBuffer* buf, size_t* len) { - uint16_t l = tbufReadUint16(buf); - char* ret = buf->data + buf->pos; - tbufSkip(buf, l + 1); - ret[l] = 0; // ensure the string end with '\0' - if (len != NULL) { - *len = l; +uint8_t tbufReadUint8( SBufferReader* buf ) { + uint8_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + return ret; +} + +int16_t tbufReadInt16( SBufferReader* buf ) { + int16_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int16_t)ntohs( ret ); } return ret; } -size_t tbufReadToString(SBuffer* buf, char* dst, size_t size) { - assert(dst != NULL); - size_t len; - const char* str = tbufReadString(buf, &len); - if (len >= size) { - len = size - 1; +uint16_t tbufReadUint16( SBufferReader* buf ) { + uint16_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return ntohs( ret ); } - memcpy(dst, str, len); - dst[len] = 0; - return len; + return ret; +} + +int32_t tbufReadInt32( SBufferReader* buf ) { + int32_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int32_t)ntohl( ret ); + } + return ret; +} + +uint32_t tbufReadUint32( SBufferReader* buf ) { + uint32_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return ntohl( ret ); + } + return ret; +} + +int64_t tbufReadInt64( SBufferReader* buf ) { + int64_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return (int64_t)htobe64( ret ); // TODO: ntohll + } + return ret; +} + +uint64_t tbufReadUint64( SBufferReader* buf ) { + uint64_t ret; + tbufReadToBuffer( buf, &ret, sizeof(ret) ); + if( buf->endian ) { + return htobe64( ret ); // TODO: ntohll + } + return ret; +} + +float tbufReadFloat( SBufferReader* buf ) { + uint32_t ret = tbufReadUint32( buf ); + return *(float*)( &ret ); } +double tbufReadDouble(SBufferReader* buf) { + uint64_t ret = tbufReadUint64( buf ); + return *(double*)( &ret ); +} //////////////////////////////////////////////////////////////////////////////// -// write functions +// writer functions -void tbufEnsureCapacity(SBuffer* buf, size_t size) { +void tbufCloseWriter( SBufferWriter* buf ) { + (*buf->allocator)( buf->data, 0 ); + buf->data = NULL; + buf->pos = 0; + buf->size = 0; +} + +void tbufEnsureCapacity( SBufferWriter* buf, size_t size ) { size += buf->pos; - if (size > buf->size) { + if( size > buf->size ) { size_t nsize = size + buf->size; - char* data = realloc(buf->data, nsize); - if (data == NULL) { - tbufThrowError(buf, 2); + char* data = (*buf->allocator)( buf->data, nsize ); + // TODO: the exception should be thrown by the allocator function + if( data == NULL ) { + THROW( TSDB_CODE_SERV_OUT_OF_MEMORY ); } buf->data = data; buf->size = nsize; } } -char* tbufGetData(SBuffer* buf, bool takeOver) { +size_t tbufReserve( SBufferWriter* buf, size_t size ) { + tbufEnsureCapacity( buf, size ); + size_t old = buf->pos; + buf->pos += size; + return old; +} + +char* tbufGetData( SBufferWriter* buf, bool takeOver ) { char* ret = buf->data; - if (takeOver) { + if( takeOver ) { buf->pos = 0; buf->size = 0; buf->data = NULL; } - return ret; } -void tbufEndWrite(SBuffer* buf) { - free(buf->data); - buf->data = NULL; - buf->pos = 0; - buf->size = 0; -} - -void tbufWrite(SBuffer* buf, const void* data, size_t size) { - assert(data != NULL); - tbufEnsureCapacity(buf, size); - memcpy(buf->data + buf->pos, data, size); +void tbufWrite( SBufferWriter* buf, const void* data, size_t size ) { + assert( data != NULL ); + tbufEnsureCapacity( buf, size ); + memcpy( buf->data + buf->pos, data, size ); buf->pos += size; } -void tbufWriteAt(SBuffer* buf, size_t pos, const void* data, size_t size) { - assert(data != NULL); +void tbufWriteAt( SBufferWriter* buf, size_t pos, const void* data, size_t size ) { + assert( data != NULL ); // this function can only be called to fill the gap on previous writes, // so 'pos + size <= buf->pos' must be true - assert(pos + size <= buf->pos); - memcpy(buf->data + pos, data, size); + assert( pos + size <= buf->pos ); + memcpy( buf->data + pos, data, size ); } -void tbufWriteStringLen(SBuffer* buf, const char* str, size_t len) { - // maximum string length is 65535, if longer string is required +static void tbufWriteLength( SBufferWriter* buf, size_t len ) { + // maximum length is 65535, if larger length is required // this function and the corresponding read function need to be // revised. - assert(len <= 0xffff); - tbufWriteUint16(buf, (uint16_t)len); - tbufWrite(buf, str, len + 1); + assert( len <= 0xffff ); + tbufWriteUint16( buf, (uint16_t)len ); +} + +void tbufWriteStringLen( SBufferWriter* buf, const char* str, size_t len ) { + tbufWriteLength( buf, len ); + tbufWrite( buf, str, len ); + tbufWriteChar( buf, '\0' ); +} + +void tbufWriteString( SBufferWriter* buf, const char* str ) { + tbufWriteStringLen( buf, str, strlen(str) ); +} + +void tbufWriteBinary( SBufferWriter* buf, const void* data, size_t len ) { + tbufWriteLength( buf, len ); + tbufWrite( buf, data, len ); +} + +void tbufWriteBool( SBufferWriter* buf, bool data ) { + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteBoolAt( SBufferWriter* buf, size_t pos, bool data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteChar( SBufferWriter* buf, char data ) { + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteCharAt( SBufferWriter* buf, size_t pos, char data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteInt8( SBufferWriter* buf, int8_t data ) { + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteInt8At( SBufferWriter* buf, size_t pos, int8_t data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteUint8( SBufferWriter* buf, uint8_t data ) { + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteUint8At( SBufferWriter* buf, size_t pos, uint8_t data ) { + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteInt16( SBufferWriter* buf, int16_t data ) { + if( buf->endian ) { + data = (int16_t)htons( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteInt16At( SBufferWriter* buf, size_t pos, int16_t data ) { + if( buf->endian ) { + data = (int16_t)htons( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteUint16( SBufferWriter* buf, uint16_t data ) { + if( buf->endian ) { + data = htons( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteUint16At( SBufferWriter* buf, size_t pos, uint16_t data ) { + if( buf->endian ) { + data = htons( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteInt32( SBufferWriter* buf, int32_t data ) { + if( buf->endian ) { + data = (int32_t)htonl( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteInt32At( SBufferWriter* buf, size_t pos, int32_t data ) { + if( buf->endian ) { + data = (int32_t)htonl( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteUint32( SBufferWriter* buf, uint32_t data ) { + if( buf->endian ) { + data = htonl( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteUint32At( SBufferWriter* buf, size_t pos, uint32_t data ) { + if( buf->endian ) { + data = htonl( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteInt64( SBufferWriter* buf, int64_t data ) { + if( buf->endian ) { + data = (int64_t)htobe64( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteInt64At( SBufferWriter* buf, size_t pos, int64_t data ) { + if( buf->endian ) { + data = (int64_t)htobe64( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteUint64( SBufferWriter* buf, uint64_t data ) { + if( buf->endian ) { + data = htobe64( data ); + } + tbufWrite( buf, &data, sizeof(data) ); +} + +void tbufWriteUint64At( SBufferWriter* buf, size_t pos, uint64_t data ) { + if( buf->endian ) { + data = htobe64( data ); + } + tbufWriteAt( buf, pos, &data, sizeof(data) ); +} + +void tbufWriteFloat( SBufferWriter* buf, float data ) { + tbufWriteUint32( buf, *(uint32_t*)(&data) ); +} + +void tbufWriteFloatAt( SBufferWriter* buf, size_t pos, float data ) { + tbufWriteUint32At( buf, pos, *(uint32_t*)(&data) ); +} + +void tbufWriteDouble( SBufferWriter* buf, double data ) { + tbufWriteUint64( buf, *(uint64_t*)(&data) ); } -void tbufWriteString(SBuffer* buf, const char* str) { - tbufWriteStringLen(buf, str, strlen(str)); +void tbufWriteDoubleAt( SBufferWriter* buf, size_t pos, double data ) { + tbufWriteUint64At( buf, pos, *(uint64_t*)(&data) ); } diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh new file mode 100755 index 0000000000000000000000000000000000000000..ec06eb38ef038eab2d8cfa798070be84e7f7623e --- /dev/null +++ b/tests/pytest/fulltest.sh @@ -0,0 +1,77 @@ +#!/bin/bash +python3 ./test.py $1 -f insert/basic.py +python3 ./test.py $1 -f insert/int.py +python3 ./test.py $1 -f insert/float.py +python3 ./test.py $1 -f insert/bigint.py +python3 ./test.py $1 -f insert/bool.py +python3 ./test.py $1 -f insert/double.py +python3 ./test.py $1 -f insert/smallint.py +python3 ./test.py $1 -f insert/tinyint.py +python3 ./test.py $1 -f insert/date.py +python3 ./test.py $1 -f insert/binary.py +python3 ./test.py $1 -f import_merge/importBlock1HO.py +python3 ./test.py $1 -f import_merge/importBlock1HPO.py +python3 ./test.py $1 -f import_merge/importBlock1H.py +python3 ./test.py $1 -f import_merge/importBlock1S.py +python3 ./test.py $1 -f import_merge/importBlock1Sub.py +python3 ./test.py $1 -f import_merge/importBlock1TO.py +python3 ./test.py $1 -f import_merge/importBlock1TPO.py +python3 ./test.py $1 -f import_merge/importBlock1T.py +python3 ./test.py $1 -f import_merge/importBlock2HO.py +python3 ./test.py $1 -f import_merge/importBlock2HPO.py +python3 ./test.py $1 -f import_merge/importBlock2H.py +python3 ./test.py $1 -f import_merge/importBlock2S.py +python3 ./test.py $1 -f import_merge/importBlock2Sub.py +python3 ./test.py $1 -f import_merge/importBlock2TO.py +python3 ./test.py $1 -f import_merge/importBlock2TPO.py +python3 ./test.py $1 -f import_merge/importBlock2T.py +python3 ./test.py $1 -f import_merge/importBlockbetween.py +python3 ./test.py $1 -f import_merge/importCacheFileHO.py +python3 ./test.py $1 -f import_merge/importCacheFileHPO.py +python3 ./test.py $1 -f import_merge/importCacheFileH.py +python3 ./test.py $1 -f import_merge/importCacheFileS.py +python3 ./test.py $1 -f import_merge/importCacheFileSub.py +python3 ./test.py $1 -f import_merge/importCacheFileTO.py +python3 ./test.py $1 -f import_merge/importCacheFileTPO.py +python3 ./test.py $1 -f import_merge/importCacheFileT.py +python3 ./test.py $1 -f import_merge/importDataH2.py +python3 ./test.py $1 -f import_merge/importDataHO2.py +python3 ./test.py $1 -f import_merge/importDataHO.py +python3 ./test.py $1 -f import_merge/importDataHPO.py +python3 ./test.py $1 -f import_merge/importDataLastHO.py +python3 ./test.py $1 -f import_merge/importDataLastHPO.py +python3 ./test.py $1 -f import_merge/importDataLastH.py +python3 ./test.py $1 -f import_merge/importDataLastS.py +python3 ./test.py $1 -f import_merge/importDataLastSub.py +python3 ./test.py $1 -f import_merge/importDataLastTO.py +python3 ./test.py $1 -f import_merge/importDataLastTPO.py +python3 ./test.py $1 -f import_merge/importDataLastT.py +python3 ./test.py $1 -f import_merge/importDataS.py +python3 ./test.py $1 -f import_merge/importDataSub.py +python3 ./test.py $1 -f import_merge/importDataTO.py +python3 ./test.py $1 -f import_merge/importDataTPO.py +python3 ./test.py $1 -f import_merge/importDataT.py +python3 ./test.py $1 -f import_merge/importHeadOverlap.py +python3 ./test.py $1 -f import_merge/importHeadPartOverlap.py +python3 ./test.py $1 -f import_merge/importHead.py +python3 ./test.py $1 -f import_merge/importHORestart.py +python3 ./test.py $1 -f import_merge/importHPORestart.py +python3 ./test.py $1 -f import_merge/importHRestart.py +python3 ./test.py $1 -f import_merge/importLastHO.py +python3 ./test.py $1 -f import_merge/importLastHPO.py +python3 ./test.py $1 -f import_merge/importLastH.py +python3 ./test.py $1 -f import_merge/importLastS.py +python3 ./test.py $1 -f import_merge/importLastSub.py +python3 ./test.py $1 -f import_merge/importLastTO.py +python3 ./test.py $1 -f import_merge/importLastTPO.py +python3 ./test.py $1 -f import_merge/importLastT.py +python3 ./test.py $1 -f import_merge/importSpan.py +python3 ./test.py $1 -f import_merge/importSRestart.py +python3 ./test.py $1 -f import_merge/importSubRestart.py +python3 ./test.py $1 -f import_merge/importTailOverlap.py +python3 ./test.py $1 -f import_merge/importTailPartOverlap.py +python3 ./test.py $1 -f import_merge/importTail.py +python3 ./test.py $1 -f import_merge/importToCommit.py +python3 ./test.py $1 -f import_merge/importTORestart.py +python3 ./test.py $1 -f import_merge/importTPORestart.py +python3 ./test.py $1 -f import_merge/importTRestart.py diff --git a/tests/pytest/import_merge/__init__.py b/tests/pytest/import_merge/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/tests/pytest/import_merge/importBlock1H.py b/tests/pytest/import_merge/importBlock1H.py new file mode 100644 index 0000000000000000000000000000000000000000..a1ba905b173b52a835488a6f0151587729d9e1d7 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1H.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HO.py b/tests/pytest/import_merge/importBlock1HO.py new file mode 100644 index 0000000000000000000000000000000000000000..73aec07a9038f3e10350ef10e3da1d2acfbc2a4f --- /dev/null +++ b/tests/pytest/import_merge/importBlock1HO.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(43) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1HPO.py b/tests/pytest/import_merge/importBlock1HPO.py new file mode 100644 index 0000000000000000000000000000000000000000..ad224e5c65ea4703ef54e06bffa33a65f817d705 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1HPO.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(15, 43): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(47) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1S.py b/tests/pytest/import_merge/importBlock1S.py new file mode 100644 index 0000000000000000000000000000000000000000..37c2ad663190622520a6e9c4c9a2d7af59c7be44 --- /dev/null +++ b/tests/pytest/import_merge/importBlock1S.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 50 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 51): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(50) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1Sub.py b/tests/pytest/import_merge/importBlock1Sub.py new file mode 100644 index 0000000000000000000000000000000000000000..5228563651883ccca70e4132edee055912cf8bbd --- /dev/null +++ b/tests/pytest/import_merge/importBlock1Sub.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1T.py b/tests/pytest/import_merge/importBlock1T.py new file mode 100644 index 0000000000000000000000000000000000000000..75f41b98cfbee136d8f147736f8765fa4b82eddb --- /dev/null +++ b/tests/pytest/import_merge/importBlock1T.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 38 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(39) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TO.py b/tests/pytest/import_merge/importBlock1TO.py new file mode 100644 index 0000000000000000000000000000000000000000..b43428da27900e57754f2095d001ea2f80d67acb --- /dev/null +++ b/tests/pytest/import_merge/importBlock1TO.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 30 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(40) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock1TPO.py b/tests/pytest/import_merge/importBlock1TPO.py new file mode 100644 index 0000000000000000000000000000000000000000..913ca1cc0230d56c7970b1648a7cdde80cff612e --- /dev/null +++ b/tests/pytest/import_merge/importBlock1TPO.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 38 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(35, 43): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(38) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 25 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(55) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2H.py b/tests/pytest/import_merge/importBlock2H.py new file mode 100644 index 0000000000000000000000000000000000000000..bacd88cbe793d0c0a1139b3ae72f6b294a37b997 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2H.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HO.py b/tests/pytest/import_merge/importBlock2HO.py new file mode 100644 index 0000000000000000000000000000000000000000..01c0f622b62cf25eb9f66b7f95f1e2b829fd6030 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2HO.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(81) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2HPO.py b/tests/pytest/import_merge/importBlock2HPO.py new file mode 100644 index 0000000000000000000000000000000000000000..ee8d580dfe90a5d4ad0cf3f4e198ba63b050325b --- /dev/null +++ b/tests/pytest/import_merge/importBlock2HPO.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(15, 81): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2S.py b/tests/pytest/import_merge/importBlock2S.py new file mode 100644 index 0000000000000000000000000000000000000000..d85074bfeb0e34b81a222fa52c3ebd06c5bd16ab --- /dev/null +++ b/tests/pytest/import_merge/importBlock2S.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 90 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 91): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(90) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2Sub.py b/tests/pytest/import_merge/importBlock2Sub.py new file mode 100644 index 0000000000000000000000000000000000000000..deb1dc83373fdcd03ec758aeb91ff6aab3b17a4d --- /dev/null +++ b/tests/pytest/import_merge/importBlock2Sub.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2T.py b/tests/pytest/import_merge/importBlock2T.py new file mode 100644 index 0000000000000000000000000000000000000000..ded698d28c282aa5b304c088e2b11cefb1efcbed --- /dev/null +++ b/tests/pytest/import_merge/importBlock2T.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 76 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TO.py b/tests/pytest/import_merge/importBlock2TO.py new file mode 100644 index 0000000000000000000000000000000000000000..ffc88c2c999b51e83d6024c6b05e6b36f87ffc14 --- /dev/null +++ b/tests/pytest/import_merge/importBlock2TO.py @@ -0,0 +1,75 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 77): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 70 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(80) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlock2TPO.py b/tests/pytest/import_merge/importBlock2TPO.py new file mode 100644 index 0000000000000000000000000000000000000000..8b6c70c32be0190b32cc5ccf2f8df64906522b2e --- /dev/null +++ b/tests/pytest/import_merge/importBlock2TPO.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 61): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(65, 81): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import 30 data later with partly overlap") + startTime = self.startTime + 55 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(85) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importBlockbetween.py b/tests/pytest/import_merge/importBlockbetween.py new file mode 100644 index 0000000000000000000000000000000000000000..c3482b37763a12b8a36e406421cffe22ea724f3b --- /dev/null +++ b/tests/pytest/import_merge/importBlockbetween.py @@ -0,0 +1,78 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("one block can import 38 records") + + tdLog.info("================= step2") + tdLog.info("import 76 sequential data with gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 39): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + for rid in range(40, 78): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(76) + + tdLog.info("================= step4") + tdLog.info("import data into the gap between 2 blocks") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(39, 40): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(77) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileH.py b/tests/pytest/import_merge/importCacheFileH.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2b3a73f10ca63b65bd14559b487ec2da5510e6 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileH.py @@ -0,0 +1,88 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHO.py b/tests/pytest/import_merge/importCacheFileHO.py new file mode 100644 index 0000000000000000000000000000000000000000..2e65c337b7c2a77f25726ca5806bd0ca02a55a53 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileHO.py @@ -0,0 +1,91 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data before with overlap") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileHPO.py b/tests/pytest/import_merge/importCacheFileHPO.py new file mode 100644 index 0000000000000000000000000000000000000000..f01ebd0d60dc5ef1bbcfa4e2e8ea45d75454d627 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileHPO.py @@ -0,0 +1,95 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 5): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 5): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(23) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileS.py b/tests/pytest/import_merge/importCacheFileS.py new file mode 100644 index 0000000000000000000000000000000000000000..0bb9107562e72fc2dded3cf918f904920529b94d --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileS.py @@ -0,0 +1,91 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 30 data covering existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileSub.py b/tests/pytest/import_merge/importCacheFileSub.py new file mode 100644 index 0000000000000000000000000000000000000000..cd5d2509687a9be414b9328d1f66749ffc5138e6 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileSub.py @@ -0,0 +1,91 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileT.py b/tests/pytest/import_merge/importCacheFileT.py new file mode 100644 index 0000000000000000000000000000000000000000..be79e26bc7786387a2f04dadb79616f641812de2 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileT.py @@ -0,0 +1,88 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, rid)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTO.py b/tests/pytest/import_merge/importCacheFileTO.py new file mode 100644 index 0000000000000000000000000000000000000000..dd17de3adf9fd9bf640c92fd6c53e492467747c6 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileTO.py @@ -0,0 +1,91 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data again") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step7") + tdLog.info("import 10 data later with overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importCacheFileTPO.py b/tests/pytest/import_merge/importCacheFileTPO.py new file mode 100644 index 0000000000000000000000000000000000000000..948b99ed21d971ea99f347bc7e83adcb63973df8 --- /dev/null +++ b/tests/pytest/import_merge/importCacheFileTPO.py @@ -0,0 +1,95 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 9 sequential data with gap") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 7): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(9) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 9 data again with gap") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + if (rid == 7): + continue + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(18) + + tdLog.info("================= step7") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step8") + tdSql.query('select * from tb1') + tdSql.checkRows(25) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataH2.py b/tests/pytest/import_merge/importDataH2.py new file mode 100644 index 0000000000000000000000000000000000000000..d49abff374b864e80079996a9cb5c81be248d4ba --- /dev/null +++ b/tests/pytest/import_merge/importDataH2.py @@ -0,0 +1,94 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % (self.rows / 2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rows / 2 + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows / 2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2 + 1) + + tdLog.info("================= step8") + tdLog.info("import 10 data in batch before") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime - rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2 + 11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO.py b/tests/pytest/import_merge/importDataHO.py new file mode 100644 index 0000000000000000000000000000000000000000..0483e6844c083766a36441926ec096c3219a9130 --- /dev/null +++ b/tests/pytest/import_merge/importDataHO.py @@ -0,0 +1,84 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rowsPerTable + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 7): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable + 3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHO2.py b/tests/pytest/import_merge/importDataHO2.py new file mode 100644 index 0000000000000000000000000000000000000000..ab7044d2a745822e27f507b5fb20f1d058fa65f6 --- /dev/null +++ b/tests/pytest/import_merge/importDataHO2.py @@ -0,0 +1,84 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 100 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rowsPerTable + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data before with overlap") + startTime = self.startTime - 3 + sqlcmd = ['import into tb1 values'] + for rid in range(6, 0, -1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rowsPerTable + 3) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataHPO.py b/tests/pytest/import_merge/importDataHPO.py new file mode 100644 index 0000000000000000000000000000000000000000..f165bd7b5a734bc21e5005e9413a095de5a202c4 --- /dev/null +++ b/tests/pytest/import_merge/importDataHPO.py @@ -0,0 +1,86 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + self.rowsPerTable = 20 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % self.rowsPerTable) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 10): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(14, self.rowsPerTable + 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, self.rowsPerTable) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 4 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rowsPerTable + 8) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastH.py b/tests/pytest/import_merge/importDataLastH.py new file mode 100644 index 0000000000000000000000000000000000000000..319fd40677d96e447e167079c4a5cf19fdc63ba5 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastH.py @@ -0,0 +1,80 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHO.py b/tests/pytest/import_merge/importDataLastHO.py new file mode 100644 index 0000000000000000000000000000000000000000..5a71c5db654a8b40ba1cee6f435b15d3ba6297e2 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastHO.py @@ -0,0 +1,83 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(207) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastHPO.py b/tests/pytest/import_merge/importDataLastHPO.py new file mode 100644 index 0000000000000000000000000000000000000000..f2c95cbd4d735d1cf1648cd07754bc940a99523e --- /dev/null +++ b/tests/pytest/import_merge/importDataLastHPO.py @@ -0,0 +1,85 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(14, 209): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(210) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastS.py b/tests/pytest/import_merge/importDataLastS.py new file mode 100644 index 0000000000000000000000000000000000000000..929e02dd1e5f134f229c101db1de9c5c7526ede1 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastS.py @@ -0,0 +1,83 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 250 data covering the existing data") + startTime = self.startTime - 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 251): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(250) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastSub.py b/tests/pytest/import_merge/importDataLastSub.py new file mode 100644 index 0000000000000000000000000000000000000000..158fa0fb3c7950360979a16d5916719462b841a9 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastSub.py @@ -0,0 +1,83 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastT.py b/tests/pytest/import_merge/importDataLastT.py new file mode 100644 index 0000000000000000000000000000000000000000..9bc90a8275d842e9b64de79e9c8c143e2d048ed2 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastT.py @@ -0,0 +1,76 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 205 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(206) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTO.py b/tests/pytest/import_merge/importDataLastTO.py new file mode 100644 index 0000000000000000000000000000000000000000..0c93ac430b4fa90f9d535071869eb22ad122cb18 --- /dev/null +++ b/tests/pytest/import_merge/importDataLastTO.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 206): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 5 data later with overlap") + startTime = self.startTime + 203 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(208) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataLastTPO.py b/tests/pytest/import_merge/importDataLastTPO.py new file mode 100644 index 0000000000000000000000000000000000000000..188e93e0dbc2a4742308cb5a4a03fb7accfdba4e --- /dev/null +++ b/tests/pytest/import_merge/importDataLastTPO.py @@ -0,0 +1,81 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than %d rows less than %d rows will go to data and last file" % + (self.rows, 10 + self.rows)) + + tdLog.info("================= step2") + tdLog.info("import 205 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 196): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(200, 210): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(205) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 192 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(212) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataS.py b/tests/pytest/import_merge/importDataS.py new file mode 100644 index 0000000000000000000000000000000000000000..65d4087c3d79ebf266048d1971e1f4cac513cba4 --- /dev/null +++ b/tests/pytest/import_merge/importDataS.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 30 data covering the existing data") + startTime = self.startTime - 5 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 31): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(30) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataSub.py b/tests/pytest/import_merge/importDataSub.py new file mode 100644 index 0000000000000000000000000000000000000000..4bf85f2bdd39f1734d6e78ad8b9c813ce361b4fb --- /dev/null +++ b/tests/pytest/import_merge/importDataSub.py @@ -0,0 +1,84 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import %d sequential data" % (self.rows / 2)) + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, self.rows / 2 + 1): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(self.rows / 2) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 10 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 11): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step9") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(self.rows / 2) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataT.py b/tests/pytest/import_merge/importDataT.py new file mode 100644 index 0000000000000000000000000000000000000000..66016c5555f2b1dcc3fcad8735d8142ae4575263 --- /dev/null +++ b/tests/pytest/import_merge/importDataT.py @@ -0,0 +1,76 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 20 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(21) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTO.py b/tests/pytest/import_merge/importDataTO.py new file mode 100644 index 0000000000000000000000000000000000000000..a3c17b2846c2dbdfaa115212223568a802d458e4 --- /dev/null +++ b/tests/pytest/import_merge/importDataTO.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 6 data later with overlap") + startTime = self.startTime + 18 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 7): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(24) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importDataTPO.py b/tests/pytest/import_merge/importDataTPO.py new file mode 100644 index 0000000000000000000000000000000000000000..20eb41cc08755ad09237568fc1f4973e55d354c8 --- /dev/null +++ b/tests/pytest/import_merge/importDataTPO.py @@ -0,0 +1,82 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info( + "More than 10 rows less than %d rows will go to data file" % + self.rows) + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 18): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(22, 25): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + tdLog.sleep(5) + + tdLog.info("================= step5") + tdLog.info("import 20 data later with partly overlap") + startTime = self.startTime + 15 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(35) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHORestart.py b/tests/pytest/import_merge/importHORestart.py new file mode 100644 index 0000000000000000000000000000000000000000..cfbfa61c90ec1521d3db507f7a485b0f609e858d --- /dev/null +++ b/tests/pytest/import_merge/importHORestart.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHPORestart.py b/tests/pytest/import_merge/importHPORestart.py new file mode 100644 index 0000000000000000000000000000000000000000..7e96d44a1a3390a351a45620ae29f02fdff06d51 --- /dev/null +++ b/tests/pytest/import_merge/importHPORestart.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 4): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(6, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHRestart.py b/tests/pytest/import_merge/importHRestart.py new file mode 100644 index 0000000000000000000000000000000000000000..aa1783977ee833418c35803fa4069af9e448a6aa --- /dev/null +++ b/tests/pytest/import_merge/importHRestart.py @@ -0,0 +1,69 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHead.py b/tests/pytest/import_merge/importHead.py new file mode 100644 index 0000000000000000000000000000000000000000..6971986ebc9bedd41326985ef0848dabc39aa260 --- /dev/null +++ b/tests/pytest/import_merge/importHead.py @@ -0,0 +1,70 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadOverlap.py b/tests/pytest/import_merge/importHeadOverlap.py new file mode 100644 index 0000000000000000000000000000000000000000..df5f07b5a250116b37e9d67f1e5b1dae28c7242c --- /dev/null +++ b/tests/pytest/import_merge/importHeadOverlap.py @@ -0,0 +1,65 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 5 data before with overlap") + startTime = self.startTime - 2 + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importHeadPartOverlap.py b/tests/pytest/import_merge/importHeadPartOverlap.py new file mode 100644 index 0000000000000000000000000000000000000000..8c9885e22ff9b83ca98207d9dc862b9a7e994cbb --- /dev/null +++ b/tests/pytest/import_merge/importHeadPartOverlap.py @@ -0,0 +1,69 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdSql.prepare() + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 4): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + startTime += 2 + for rid in range(6, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + for rid in range(1, 9): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(12) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastH.py b/tests/pytest/import_merge/importLastH.py new file mode 100644 index 0000000000000000000000000000000000000000..c69f453971eb036382ad78cfc6c451dc6c8fad57 --- /dev/null +++ b/tests/pytest/import_merge/importLastH.py @@ -0,0 +1,74 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data before") + startTime = self.startTime - 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHO.py b/tests/pytest/import_merge/importLastHO.py new file mode 100644 index 0000000000000000000000000000000000000000..ec930d1807c3ed9cf1d9b3b4096921c9389f79be --- /dev/null +++ b/tests/pytest/import_merge/importLastHO.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data before with overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastHPO.py b/tests/pytest/import_merge/importLastHPO.py new file mode 100644 index 0000000000000000000000000000000000000000..9603a7b8529d2d435a1e5e174f1d3df7bc41fe30 --- /dev/null +++ b/tests/pytest/import_merge/importLastHPO.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(6, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data before with partly overlap") + startTime = self.startTime - 2 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastS.py b/tests/pytest/import_merge/importLastS.py new file mode 100644 index 0000000000000000000000000000000000000000..7dbe74e2ca0f802861c7b66c63e83fba296d0f14 --- /dev/null +++ b/tests/pytest/import_merge/importLastS.py @@ -0,0 +1,76 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 20 data covering existing data") + startTime = self.startTime - 10 + for rid in range(1, 21): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastSub.py b/tests/pytest/import_merge/importLastSub.py new file mode 100644 index 0000000000000000000000000000000000000000..f028ba5fd77603a283bd7e5daf02ffb5fb738813 --- /dev/null +++ b/tests/pytest/import_merge/importLastSub.py @@ -0,0 +1,77 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 3 data totally repetitive") + startTime = self.startTime + 1 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastT.py b/tests/pytest/import_merge/importLastT.py new file mode 100644 index 0000000000000000000000000000000000000000..3fe4e0006c80958822e4ccd7dd1f14c562308820 --- /dev/null +++ b/tests/pytest/import_merge/importLastT.py @@ -0,0 +1,78 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 1 data later") + startTime = self.startTime + 5 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime + 1, 1)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTO.py b/tests/pytest/import_merge/importLastTO.py new file mode 100644 index 0000000000000000000000000000000000000000..76e5016bdbf87d850caf9b39b9aa4b4277080b5d --- /dev/null +++ b/tests/pytest/import_merge/importLastTO.py @@ -0,0 +1,81 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 5 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 6): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(5) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 4 data later with overlap") + startTime = self.startTime + 3 + sqlcmd = ['import into tb1 values'] + for rid in range(1, 5): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(7) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importLastTPO.py b/tests/pytest/import_merge/importLastTPO.py new file mode 100644 index 0000000000000000000000000000000000000000..08f416806328b0f3ba391bff350bd6aa954fb7f9 --- /dev/null +++ b/tests/pytest/import_merge/importLastTPO.py @@ -0,0 +1,82 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + self.rows = 200 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db rows %d' % self.rows) + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + tdLog.info("less than 10 rows will go to last file") + + tdLog.info("================= step2") + tdLog.info("import 6 sequential data") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(1, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + for rid in range(6, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(6) + + tdLog.info("================= step4") + tdDnodes.stop(1) + tdLog.sleep(5) + tdDnodes.start(1) + + tdLog.info("================= step5") + tdLog.info("import 8 data later with partly overlap") + startTime = self.startTime + 2 + for rid in range(1, 9): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSRestart.py b/tests/pytest/import_merge/importSRestart.py new file mode 100644 index 0000000000000000000000000000000000000000..0771b8bf9c3b9d758363f3089dcdee0c8830f4f3 --- /dev/null +++ b/tests/pytest/import_merge/importSRestart.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSpan.py b/tests/pytest/import_merge/importSpan.py new file mode 100644 index 0000000000000000000000000000000000000000..736c4bad64038ff92704cbf5a55e922cd99cfe29 --- /dev/null +++ b/tests/pytest/import_merge/importSpan.py @@ -0,0 +1,74 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 20 data cover existing data") + startTime = self.startTime - 5 + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importSubRestart.py b/tests/pytest/import_merge/importSubRestart.py new file mode 100644 index 0000000000000000000000000000000000000000..f7f33d32c1a9f149bbcef7174bca0f087caadc40 --- /dev/null +++ b/tests/pytest/import_merge/importSubRestart.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 20 sequential data") + startTime = self.startTime + for rid in range(1, 21): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + tdLog.info("================= step4") + tdLog.info("import 10 data totally repetitive") + startTime = self.startTime + 5 + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(20) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTORestart.py b/tests/pytest/import_merge/importTORestart.py new file mode 100644 index 0000000000000000000000000000000000000000..194756cd1291f3a9ef2f6c13e896be4e053fc479 --- /dev/null +++ b/tests/pytest/import_merge/importTORestart.py @@ -0,0 +1,79 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1, 7): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTPORestart.py b/tests/pytest/import_merge/importTPORestart.py new file mode 100644 index 0000000000000000000000000000000000000000..36d4b64390807c11a51beb36368d2aafa841d3af --- /dev/null +++ b/tests/pytest/import_merge/importTPORestart.py @@ -0,0 +1,83 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 6): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(8, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTRestart.py b/tests/pytest/import_merge/importTRestart.py new file mode 100644 index 0000000000000000000000000000000000000000..9308518d8c442c1e18ca39133a352e1659bfed72 --- /dev/null +++ b/tests/pytest/import_merge/importTRestart.py @@ -0,0 +1,76 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime = self.startTime + 11 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdDnodes.forcestop(1) + tdDnodes.start(1) + tdLog.sleep(10) + + tdLog.info("================= step6") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTail.py b/tests/pytest/import_merge/importTail.py new file mode 100644 index 0000000000000000000000000000000000000000..a80db730a0d0c02bb30d402986a7e0e355f9d046 --- /dev/null +++ b/tests/pytest/import_merge/importTail.py @@ -0,0 +1,70 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 1 data after") + startTime += 1 + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailOverlap.py b/tests/pytest/import_merge/importTailOverlap.py new file mode 100644 index 0000000000000000000000000000000000000000..98596d2f77ce679b782a288bc2be1ce0103a070d --- /dev/null +++ b/tests/pytest/import_merge/importTailOverlap.py @@ -0,0 +1,72 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 10 sequential data") + startTime = self.startTime + for rid in range(1, 11): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(10) + + tdLog.info("================= step4") + tdLog.info("import 6 data after with overlap") + startTime = self.startTime + 8 + for rid in range(1, 7): + tdSql.execute('import into tb1 values(%ld, %d)' % (startTime, rid)) + startTime += 1 + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(14) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importTailPartOverlap.py b/tests/pytest/import_merge/importTailPartOverlap.py new file mode 100644 index 0000000000000000000000000000000000000000..0263114a25c3e9e5d56329ad6d52502bf8fcbb9b --- /dev/null +++ b/tests/pytest/import_merge/importTailPartOverlap.py @@ -0,0 +1,78 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, speed int)') + + tdLog.info("================= step2") + tdLog.info("import 8 sequential data with gap") + startTime = self.startTime + for rid in range(1, 6): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + for rid in range(8, 11): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step3") + tdSql.query('select * from tb1') + tdSql.checkRows(8) + + tdLog.info("================= step4") + tdLog.info("import 8 data after with partly overlap") + startTime = self.startTime + 3 + for rid in range(1, 9): + tdSql.execute( + 'import into tb1 values(%ld, %d)' % + (startTime + rid, rid)) + + tdLog.info("================= step5") + tdSql.query('select * from tb1') + tdSql.checkRows(11) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/import_merge/importToCommit.py b/tests/pytest/import_merge/importToCommit.py new file mode 100644 index 0000000000000000000000000000000000000000..7a408bcdce52f8c699e751057554eb26fe70c4dc --- /dev/null +++ b/tests/pytest/import_merge/importToCommit.py @@ -0,0 +1,85 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + self.ntables = 1 + self.startTime = 1520000010000 + + tdDnodes.stop(1) + tdDnodes.deploy(1) + tdDnodes.start(1) + tdSql.execute('reset query cache') + tdSql.execute('drop database if exists db') + tdSql.execute('create database db cache 512 tables 10') + tdSql.execute('use db') + + tdLog.info("================= step1") + tdLog.info("create 1 table") + tdSql.execute('create table tb1 (ts timestamp, i int)') + tdLog.info( + "one block can import 38 records and totally there are 40 blocks") + + tdLog.info("================= step2") + tdLog.info('insert data until the first commit') + dnodesDir = tdDnodes.getDnodesRootDir() + dataDir = dnodesDir + '/dnode1/data/data' + startTime = self.startTime + rid0 = 1 + while (True): + sqlcmd = 'insert into tb1 values(%ld, %d)' % ( + startTime + rid0 * 2, rid0) + tdSql.execute(sqlcmd) + rid0 += 1 + vnodes = os.listdir(dataDir) + if (len(vnodes) > 0): + tdLog.info("data is committed, stop inserting") + break + + tdLog.info("================= step5") + tdLog.info("import 1 data before ") + startTime = self.startTime + sqlcmd = ['import into tb1 values'] + for rid in range(3, 4): + sqlcmd.append('(%ld, %d)' % (startTime + rid, rid)) + tdSql.execute(" ".join(sqlcmd)) + + tdLog.info("================= step6") + tdSql.execute('reset query cache') + tdSql.query('select * from tb1 order by ts desc') + tdSql.checkRows(rid0 - 1 + 1) + + tdLog.info("================= step7") + tdSql.execute('reset query cache') + tdSql.query('select count(*) from tb1') + tdSql.checkData(0, 0, rid0 - 1 + 1) + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/insert/binary.py b/tests/pytest/insert/binary.py new file mode 100644 index 0000000000000000000000000000000000000000..9989865f96af21d5d806c42660a79f1be62b5b4c --- /dev/null +++ b/tests/pytest/insert/binary.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- + +import sys +from util.log import * +from util.cases import * +from util.sql import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + tdSql.prepare() + + + tdLog.info('=============== step1') + tdLog.info('create table tb (ts timestamp, speed binary(5))') + tdSql.execute('create table tb (ts timestamp, speed binary(5))') + tdLog.info("insert into tb values (now, ) -x step1") + tdSql.error("insert into tb values (now, )") + tdLog.info('=============== step2') + tdLog.info("insert into tb values (now+1a, '1234')") + tdSql.execute("insert into tb values (now+1a, '1234')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + tdLog.info('tdSql.checkData(0, 0, 1234)') + tdSql.checkData(0, 0, 1234) + tdLog.info('=============== step3') + tdLog.info("insert into tb values (now+2a, '23456')") + tdSql.execute("insert into tb values (now+2a, '23456')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + tdLog.info('==> $data00') + tdLog.info('tdSql.checkData(0, 0, 23456)') + tdSql.checkData(0, 0, 23456) + tdLog.info('=============== step4') + tdLog.info("insert into tb values (now+3a, '345678')") + tdSql.error("insert into tb values (now+3a, '345678')") + tdLog.info("insert into tb values (now+3a, '34567')") + tdSql.execute("insert into tb values (now+3a, '34567')") + tdLog.info('select speed from tb order by ts desc') + tdSql.query('select speed from tb order by ts desc') + tdLog.info('tdSql.checkRow(3)') + tdSql.checkRows(3) + tdLog.info('==> $data00') + tdLog.info('tdSql.checkData(0, 0, 34567)') + tdSql.checkData(0, 0, 34567) + tdLog.info('drop database db') + tdSql.execute('drop database db') + tdLog.info('show databases') + tdSql.query('show databases') + tdLog.info('tdSql.checkRow(0)') + tdSql.checkRows(0) +# convert end + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/insert/date.py b/tests/pytest/insert/date.py new file mode 100644 index 0000000000000000000000000000000000000000..716d799cb0c05057bd87b8721491ad9e0e9f3683 --- /dev/null +++ b/tests/pytest/insert/date.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- + +import sys +from util.log import * +from util.cases import * +from util.sql import * + + +class TDTestCase: + def init(self, conn): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor()) + + def run(self): + tdSql.prepare() + + # TSIM: system sh/stop_dnodes.sh + # TSIM: + # TSIM: system sh/ip.sh -i 1 -s up + # TSIM: system sh/deploy.sh -n dnode1 -m 192.168.0.1 -i 192.168.0.1 + # TSIM: system sh/cfg.sh -n dnode1 -c commitLog -v 0 + # TSIM: system sh/exec.sh -n dnode1 -s start + # TSIM: + # TSIM: sleep 3000 + # TSIM: sql connect + # TSIM: + # TSIM: $i = 0 + # TSIM: $dbPrefix = lm_da_db + # TSIM: $tbPrefix = lm_da_tb + # TSIM: $db = $dbPrefix . $i + # TSIM: $tb = $tbPrefix . $i + # TSIM: + # TSIM: print =============== step1 + tdLog.info('=============== step1') + # TSIM: sql create database $db + # TSIM: sql use $db + # TSIM: + # TSIM: sql create table $tb (ts timestamp, speed int) + tdLog.info("create table tb0 (ts timestamp, speed int)") + tdSql.execute('create table tb0 (ts timestamp, speed int)') + # TSIM: sql insert into $tb values ('2017-01-01 08:00:00.001', 1) + tdLog.info("insert into tb0 values ('2017-01-01 08:00:00.001', 1)") + tdSql.execute("insert into tb0 values ('2017-01-01 08:00:00.001', 1)") + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 1 then + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + # TSIM: return -1 + # TSIM: endi + # TSIM: if $data00 != @17-01-01 08:00:00.001@ then + tdLog.info("tdSql.checkData(0, 0, 17-01-01 08:00:00.001)") + expectedData = datetime.datetime.strptime( + "17-01-01 08:00:00.001", "%y-%m-%d %H:%M:%S.%f") + tdSql.checkData(0, 0, expectedData) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step2 + tdLog.info('=============== step2') + # TSIM: sql insert into $tb values ('2017-08-28 00:23:46.429+ 1a', 2) + tdLog.info("insert into tb0 values ('2017-08-28 00:23:46.429+ 1a', 2)") + tdSql.execute( + "insert into tb0 values ('2017-08-28 00:23:46.429+ 1a', 2)") + # TSIM: #sql insert into $tb values ('2017-08-28 00:23:46cd .429', 2) + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 2 then + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step3 + tdLog.info('=============== step3') + # TSIM: #sql insert into $tb values ('1970-01-01 08:00:00.000', 3) + # TSIM: #sql insert into $tb values ('1970-01-01 08:00:00.000', 3) + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 2 then + tdLog.info('tdSql.checkRow(2)') + tdSql.checkRows(2) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step4 + tdLog.info('=============== step4') + # TSIM: sql insert into $tb values(now, 4); + tdLog.info("insert into tb0 values(now, 4);") + tdSql.execute("insert into tb0 values(now, 4);") + # TSIM: sql insert into $tb values(now+1a, 5); + tdLog.info("insert into tb0 values(now+1a, 5);") + tdSql.execute("insert into tb0 values(now+1a, 5);") + # TSIM: sql insert into $tb values(now+1s, 6); + tdLog.info("insert into tb0 values(now+1s, 6);") + tdSql.execute("insert into tb0 values(now+1s, 6);") + # TSIM: sql insert into $tb values(now+1m, 7); + tdLog.info("insert into tb0 values(now+1m, 7);") + tdSql.execute("insert into tb0 values(now+1m, 7);") + # TSIM: sql insert into $tb values(now+1h, 8); + tdLog.info("insert into tb0 values(now+1h, 8);") + tdSql.execute("insert into tb0 values(now+1h, 8);") + # TSIM: sql insert into $tb values(now+1d, 9); + tdLog.info("insert into tb0 values(now+1d, 9);") + tdSql.execute("insert into tb0 values(now+1d, 9);") + # TSIM: sql_error insert into $tb values(now+3w, 10); + tdLog.info("insert into tb0 values(now+3w, 10);") + tdSql.error("insert into tb0 values(now+3w, 10);") + # TSIM: sql_error insert into $tb values(now+1n, 11); + tdLog.info("insert into tb0 values(now+1n, 11);") + tdSql.error("insert into tb0 values(now+1n, 11);") + # TSIM: sql_error insert into $tb values(now+1y, 12); + tdLog.info("insert into tb0 values(now+1y, 12);") + tdSql.error("insert into tb0 values(now+1y, 12);") + # TSIM: + # TSIM: print =============== step5 + tdLog.info('=============== step5') + # TSIM: sql_error insert into $tb values ('9999-12-31 213:59:59.999', + # 13) + tdLog.info("insert into tb0 values ('9999-12-31 213:59:59.999', 13)") + tdSql.error("insert into tb0 values ('9999-12-31 213:59:59.999', 13)") + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: print $rows + tdLog.info('$rows') + # TSIM: if $rows != 8 then + tdLog.info('tdSql.checkRow(8)') + tdSql.checkRows(8) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step6 + tdLog.info('=============== step6') + # TSIM: sql_error insert into $tb values ('9999-12-99 23:59:59.999', + # 13) + tdLog.info("insert into tb0 values ('9999-12-99 23:59:59.999', 13)") + tdSql.error("insert into tb0 values ('9999-12-99 23:59:59.999', 13)") + # TSIM: + # TSIM: sql select ts from $tb + tdLog.info('select ts from tb0') + tdSql.query('select ts from tb0') + # TSIM: if $rows != 8 then + tdLog.info('tdSql.checkRow(8)') + tdSql.checkRows(8) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: print =============== step7 + tdLog.info('=============== step7') + # TSIM: $i = 1 + # TSIM: $tb = $tbPrefix . $i + # TSIM: sql create table $tb (ts timestamp, ts2 timestamp) + tdLog.info("create table tb1 (ts timestamp, ts2 timestamp)") + tdSql.execute('create table tb1 (ts timestamp, ts2 timestamp)') + # TSIM: + # TSIM: print =============== step8 + tdLog.info('=============== step8') + # TSIM: sql insert into $tb values (now, now) + tdLog.info("insert into tb1 values (now, now)") + tdSql.execute("insert into tb1 values (now, now)") + # TSIM: sql select * from $tb + tdLog.info('select * from tb1') + tdSql.query('select * from tb1') + # TSIM: if $rows != 1 then + tdLog.info('tdSql.checkRow(1)') + tdSql.checkRows(1) + # TSIM: return -1 + # TSIM: endi + # TSIM: + # TSIM: sql drop database $db + tdLog.info('drop database db') + tdSql.execute('drop database db') + # TSIM: sql show databases + tdLog.info('show databases') + tdSql.query('show databases') + # TSIM: if $rows != 0 then + tdLog.info('tdSql.checkRow(0)') + tdSql.checkRows(0) + # TSIM: return -1 + # TSIM: endi +# convert end + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/query/tbname.py b/tests/pytest/query/tbname.py index d2799efa2501c0a22d79497756c2adb87e3089f0..5ea89fff82a1ef4af0b6f8d9a14671fb97180cd5 100644 --- a/tests/pytest/query/tbname.py +++ b/tests/pytest/query/tbname.py @@ -26,12 +26,14 @@ class TDTestCase: def run(self): tdSql.prepare() - tdSql.execute('create table cars (ts timestamp, speed int) tags(id int)') + tdSql.execute( + 'create table cars (ts timestamp, speed int) tags(id int)') tdSql.execute("create table carzero using cars tags(0)") tdSql.execute("create table carone using cars tags(1)") tdSql.execute("create table cartwo using cars tags(2)") - tdSql.execute("insert into carzero values(now, 100) carone values(now, 110)") + tdSql.execute( + "insert into carzero values(now, 100) carone values(now, 110)") tdSql.query("select * from cars where tbname in ('carzero', 'carone')") tdSql.checkRows(2) @@ -39,13 +41,16 @@ class TDTestCase: tdSql.query("select * from cars where tbname in ('carzero', 'cartwo')") tdSql.checkRows(1) - tdSql.query("select * from cars where id=1 or tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=1 or tbname in ('carzero', 'cartwo')") tdSql.checkRows(2) - tdSql.query("select * from cars where id=1 and tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=1 and tbname in ('carzero', 'cartwo')") tdSql.checkRows(0) - tdSql.query("select * from cars where id=0 and tbname in ('carzero', 'cartwo')") + tdSql.query( + "select * from cars where id=0 and tbname in ('carzero', 'cartwo')") tdSql.checkRows(1) """ diff --git a/tests/pytest/simpletest.sh b/tests/pytest/simpletest.sh deleted file mode 100755 index 73b25d2056d55f4650930b53aa78c53cd181f374..0000000000000000000000000000000000000000 --- a/tests/pytest/simpletest.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -python3 ./test.py $1 -f insert/basic.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/int.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/float.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/bigint.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/bool.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/double.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/smallint.py -python3 ./test.py -s $1 -sleep 1 -python3 ./test.py $1 -f insert/tinyint.py -python3 ./test.py -s $1 -sleep 1 diff --git a/tests/pytest/smoketest.sh b/tests/pytest/smoketest.sh new file mode 100755 index 0000000000000000000000000000000000000000..71d19df5c0d6dbecf2dbd693f80cfc04335507c3 --- /dev/null +++ b/tests/pytest/smoketest.sh @@ -0,0 +1,55 @@ +#!/bin/bash +python3 ./test.py $1 -f insert/basic.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/int.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/float.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/bigint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/bool.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/double.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/smallint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f insert/tinyint.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastTO.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataLastT.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataTO.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importDataT.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHeadOverlap.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHeadPartOverlap.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHORestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHPORestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importHRestart.py +python3 ./test.py -s $1 +sleep 1 +python3 ./test.py $1 -f import_merge/importLastSub.py +python3 ./test.py -s $1 +sleep 1 diff --git a/tests/pytest/util/sql.py b/tests/pytest/util/sql.py index edfee4ddb11337b45357022fd195063d3bcf134b..a1f7dd2f6468be7dee1d23e3d64d6070e7540e28 100644 --- a/tests/pytest/util/sql.py +++ b/tests/pytest/util/sql.py @@ -44,7 +44,7 @@ class TDSql: except BaseException: expectErrNotOccured = False if expectErrNotOccured: - tdLog.exit("sql:%.40s, expect error not occured" % (sql)) + tdLog.exit("failed: sql:%.40s, expect error not occured" % (sql)) else: tdLog.info("sql:%.40s, expect error occured" % (sql)) @@ -71,48 +71,51 @@ class TDSql: def checkData(self, row, col, data): if row < 0: tdLog.exit( - "sql:%.40s, row:%d is smaller than zero" % + "failed: sql:%.40s, row:%d is smaller than zero" % (self.sql, row)) if col < 0: tdLog.exit( - "sql:%.40s, col:%d is smaller than zero" % + "failed: sql:%.40s, col:%d is smaller than zero" % (self.sql, col)) if row >= self.queryRows: tdLog.exit( - "sql:%.40s, row:%d is larger than queryRows:%d" % + "failed: sql:%.40s, row:%d is larger than queryRows:%d" % (self.sql, row, self.queryRows)) if col >= self.queryCols: tdLog.exit( - "sql:%.40s, col:%d is larger than queryRows:%d" % + "failed: sql:%.40s, col:%d is larger than queryRows:%d" % (self.sql, col, self.queryCols)) if self.queryResult[row][col] != data: tdLog.exit( - "sql:%.40s row:%d col:%d data:%s != expect:%s" % + "failed: sql:%.40s row:%d col:%d data:%s != expect:%s" % (self.sql, row, col, self.queryResult[row][col], data)) if data is None: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%s" % - (self.sql, row, col, self.queryResult[row][col], data)) + (self.sql, row, col, self.queryResult[row][col], data)) + elif isinstance(data, datetime.date): + tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%s" % + (self.sql, row, col, self.queryResult[row][col], data)) else: tdLog.info("sql:%.40s, row:%d col:%d data:%s == expect:%d" % - (self.sql, row, col, self.queryResult[row][col], data)) + (self.sql, row, col, self.queryResult[row][col], data)) def getData(self, row, col): if row < 0: tdLog.exit( - "sql:%.40s, row:%d is smaller than zero" % + "failed: sql:%.40s, row:%d is smaller than zero" % (self.sql, row)) if col < 0: tdLog.exit( - "sql:%.40s, col:%d is smaller than zero" % + "failed: sql:%.40s, col:%d is smaller than zero" % (self.sql, col)) if row >= self.queryRows: tdLog.exit( - "sql:%.40s, row:%d is larger than queryRows:%d" % + "failed: sql:%.40s, row:%d is larger than queryRows:%d" % (self.sql, row, self.queryRows)) if col >= self.queryCols: tdLog.exit( - "sql:%.40s, col:%d is larger than queryRows:%d" % + "failed: sql:%.40s, col:%d is larger than queryRows:%d" % (self.sql, col, self.queryCols)) return self.queryResult[row][col] @@ -131,7 +134,7 @@ class TDSql: def checkAffectedRows(self, expectAffectedRows): if self.affectedRows != expectAffectedRows: - tdLog.exit("sql:%.40s, affectedRows:%d != expect:%d" % + tdLog.exit("failed: sql:%.40s, affectedRows:%d != expect:%d" % (self.sql, self.affectedRows, expectAffectedRows)) tdLog.info("sql:%.40s, affectedRows:%d == expect:%d" % (self.sql, self.affectedRows, expectAffectedRows)) diff --git a/tests/test-all.sh b/tests/test-all.sh index 6943dd47a780c1de5cf431e9f45789af19412c09..907ef4bedd2617d8a61d661d41019cc10a290f8a 100755 --- a/tests/test-all.sh +++ b/tests/test-all.sh @@ -25,7 +25,12 @@ if [ "$totalFailed" -ne "0" ]; then fi cd ../pytest -./simpletest.sh 2>&1 | tee pytest-out.txt + +if [ "$1" == "cron" ]; then + ./fulltest.sh 2>&1 | tee pytest-out.txt +else + ./smoketest.sh 2>&1 | tee pytest-out.txt +fi totalPySuccess=`grep 'successfully executed' pytest-out.txt | wc -l` if [ "$totalPySuccess" -gt "0" ]; then