diff --git a/.gitignore b/.gitignore index da47590a2fac52879c470e3c6b89dffaa57d775a..50f4251320abc80358b67eab22c02672d5f26bd6 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,8 @@ CMakeError.log *.o version.c taos.rc +src/connector/jdbc/.classpath +src/connector/jdbc/.project src/connector/jdbc/.settings/ tests/comparisonTest/cassandra/cassandratest/.classpath tests/comparisonTest/cassandra/cassandratest/.project diff --git a/CMakeLists.txt b/CMakeLists.txt index fe2cc6b42ebe6661bdf942428757e8a19aef25ba..093731f190a380539cca3db8f8c12793d4b6557c 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,3 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) IF (CMAKE_VERSION VERSION_LESS 3.0) PROJECT(TDengine CXX) SET(PROJECT_VERSION_MAJOR "${LIB_MAJOR_VERSION}") @@ -10,6 +9,12 @@ ELSE () PROJECT(TDengine VERSION "${LIB_VERSION_STRING}" LANGUAGES CXX) ENDIF () +IF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) +ELSE () + CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +ENDIF () + SET(TD_ACCOUNT FALSE) SET(TD_ADMIN FALSE) SET(TD_GRANT FALSE) diff --git a/README-CN.md b/README-CN.md index d5586c78b7ef4d4652fbe57b4d17af30a8bc8d36..a9bc814e8d6f6bef0ad94e29588f62e2e4c0e7f1 100644 --- a/README-CN.md +++ b/README-CN.md @@ -185,9 +185,10 @@ cmake .. && cmake --build . # 安装 -如果你不想安装,可以直接在shell中运行。生成完成后,安装 TDengine: +生成完成后,安装 TDengine(下文给出的指令以 Linux 为例,如果是在 Windows 下,那么对应的指令会是 `nmake install`): + ```bash -make install +sudo make install ``` 用户可以在[文件目录结构](https://www.taosdata.com/cn/documentation/administrator#directories)中了解更多在操作系统中生成的目录或文件。 @@ -195,7 +196,7 @@ make install 安装成功后,在终端中启动 TDengine 服务: ```bash -taosd +sudo systemctl start taosd ``` 用户可以使用 TDengine Shell 来连接 TDengine 服务,在终端中,输入: @@ -208,7 +209,7 @@ taos ## 快速运行 -TDengine 生成后,在终端执行以下命令: +如果不希望以服务方式运行 TDengine,也可以在终端中直接运行它。也即在生成完成后,执行以下命令(在 Windows 下,生成的可执行文件会带有 .exe 后缀,例如会名为 taosd.exe ): ```bash ./build/bin/taosd -c test/cfg diff --git a/README.md b/README.md index 89e35f6e630ea3db2ddab2bc0187a62f9793ac32..2dea05f09d268b0d78de15ab98f3584df055c353 100644 --- a/README.md +++ b/README.md @@ -175,7 +175,7 @@ cmake .. && cmake --build . # Installing -After building successfully, TDengine can be installed by: +After building successfully, TDengine can be installed by: (On Windows platform, the following command should be `nmake install`) ```bash sudo make install ``` @@ -197,7 +197,7 @@ If TDengine shell connects the server successfully, welcome messages and version ## Quick Run -If you don't want to run TDengine as a service, you can run it in current shell. For example, to quickly start a TDengine server after building, run the command below in terminal: +If you don't want to run TDengine as a service, you can run it in current shell. For example, to quickly start a TDengine server after building, run the command below in terminal: (We take Linux as an example, command on Windows will be `taosd.exe`) ```bash ./build/bin/taosd -c test/cfg ``` diff --git a/cmake/define.inc b/cmake/define.inc index c4f9396cf1ccac443505d957eb53b1d1ea567745..96a406cfb47a969fe99eb73eeb5c1bf7ecdb1e7c 100755 --- a/cmake/define.inc +++ b/cmake/define.inc @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_ACCOUNT) diff --git a/cmake/env.inc b/cmake/env.inc index fa15ec6aee01a619139417fceb21b3a71bd96364..2ceaecc2d9e486c249931ae45089e6a820e475b9 100755 --- a/cmake/env.inc +++ b/cmake/env.inc @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) SET(CMAKE_C_STANDARD 11) diff --git a/cmake/input.inc b/cmake/input.inc index 04160b47756b420247bfa7477e2fb67d0346ff3f..06a23ecb148d3eb77828b8ba6d02fa975ff94a6c 100755 --- a/cmake/input.inc +++ b/cmake/input.inc @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (${ACCOUNT} MATCHES "true") diff --git a/cmake/platform.inc b/cmake/platform.inc index bdc54c2384e7da2272c11f869bb6d8a89c0afebf..a78082a1fc62a8ad66c54dcf005e3e15edf5f5f0 100755 --- a/cmake/platform.inc +++ b/cmake/platform.inc @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) # diff --git a/cmake/version.inc b/cmake/version.inc index 7338b4593631550861774dbc12633a260830c3ea..7c646c67b65b6f52542cc155938f5396c589ae7a 100755 --- a/cmake/version.inc +++ b/cmake/version.inc @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (DEFINED VERNUMBER) diff --git a/deps/CMakeLists.txt b/deps/CMakeLists.txt index 62c3b643c0ac42536da5f966f03ddde1569f6025..eb22459d342da5f726d8688a74b4a5efde2ac5ec 100644 --- a/deps/CMakeLists.txt +++ b/deps/CMakeLists.txt @@ -1,6 +1,11 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) PROJECT(TDengine) +IF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) +ELSE () + CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +ENDIF () + ADD_SUBDIRECTORY(zlib-1.2.11) ADD_SUBDIRECTORY(pthread) ADD_SUBDIRECTORY(regex) @@ -11,9 +16,7 @@ ADD_SUBDIRECTORY(wepoll) ADD_SUBDIRECTORY(MsvcLibX) ADD_SUBDIRECTORY(rmonotonic) -IF (TD_LINUX_64) - ADD_SUBDIRECTORY(lua) -ENDIF () +ADD_SUBDIRECTORY(lua) IF (TD_LINUX AND TD_MQTT) ADD_SUBDIRECTORY(MQTT-C) diff --git a/deps/MQTT-C/CMakeLists.txt b/deps/MQTT-C/CMakeLists.txt index 15b35525210ec90e6e2efbdcd0e6128cb4d34f91..37959140e70d4808c845e3ca6e415ce8bdecf3ac 100644 --- a/deps/MQTT-C/CMakeLists.txt +++ b/deps/MQTT-C/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) # MQTT-C build options option(MQTT_C_OpenSSL_SUPPORT "Build MQTT-C with OpenSSL support?" OFF) diff --git a/deps/MsvcLibX/CMakeLists.txt b/deps/MsvcLibX/CMakeLists.txt index 4428579e1c098425c9d72d7d58a5fda15cd34c93..4197f502b131b8dc7ae289fd822e15f8a6522cbf 100644 --- a/deps/MsvcLibX/CMakeLists.txt +++ b/deps/MsvcLibX/CMakeLists.txt @@ -1,6 +1,11 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) PROJECT(TDengine) +IF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) +ELSE () + CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +ENDIF () + IF (TD_WINDOWS) INCLUDE_DIRECTORIES(include) AUX_SOURCE_DIRECTORY(src SRC) diff --git a/deps/iconv/CMakeLists.txt b/deps/iconv/CMakeLists.txt index 286070fa9071f8fcd1949850cec87c1ced3245d7..ab5fa1a5d1f409496118dc6212fb6f1512b51bb2 100644 --- a/deps/iconv/CMakeLists.txt +++ b/deps/iconv/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_WINDOWS) diff --git a/deps/lua/CMakeLists.txt b/deps/lua/CMakeLists.txt index 8981350c3b419ff3c8611b0e2d72041ec179fd4d..3e34d447750046843ad328f0dc3edecde07c385c 100644 --- a/deps/lua/CMakeLists.txt +++ b/deps/lua/CMakeLists.txt @@ -2,3 +2,6 @@ AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SOURCE_LIST) ADD_LIBRARY(lua ${SOURCE_LIST}) TARGET_INCLUDE_DIRECTORIES(lua PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/inc) +SET_SOURCE_FILES_PROPERTIES(./src/lgc.c PROPERTIES COMPILE_FLAGS -w) +SET_SOURCE_FILES_PROPERTIES(./src/ltable.c PROPERTIES COMPILE_FLAGS -w) +SET_SOURCE_FILES_PROPERTIES(./src/loslib.c PROPERTIES COMPILE_FLAGS -w) diff --git a/deps/pthread/CMakeLists.txt b/deps/pthread/CMakeLists.txt index 04e5be7472a9b8cbdb384348697b919bf2dd0ece..16d03f3590bf933c383dd1294b1117fd9f95ad7a 100644 --- a/deps/pthread/CMakeLists.txt +++ b/deps/pthread/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_WINDOWS) diff --git a/deps/regex/CMakeLists.txt b/deps/regex/CMakeLists.txt index 054b093d07c386d7ff9b0ffc4c05909d79b33129..05d01f02efa4c731bb67f6f5f654b499f6f2be03 100644 --- a/deps/regex/CMakeLists.txt +++ b/deps/regex/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_WINDOWS) diff --git a/deps/wepoll/CMakeLists.txt b/deps/wepoll/CMakeLists.txt index a81fd782bbc4b05a1158273a7fcc6701bc4d980d..e9b7749d82e381e7002f7bca65dc6d5a4e1a7740 100644 --- a/deps/wepoll/CMakeLists.txt +++ b/deps/wepoll/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_WINDOWS) diff --git a/deps/zlib-1.2.11/CMakeLists.txt b/deps/zlib-1.2.11/CMakeLists.txt index f83aa70085491fb6575c0a6bf93252192cddd040..1220cc4246b4cef9b0709e2f14dec46ba787c4cc 100644 --- a/deps/zlib-1.2.11/CMakeLists.txt +++ b/deps/zlib-1.2.11/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_WINDOWS) diff --git a/packaging/tools/makeclient.sh b/packaging/tools/makeclient.sh index d0eeffc86a980269a72cf3ceab59f9bca85b4d53..0ae573359902680107d92d43a9bac38c6995c04f 100755 --- a/packaging/tools/makeclient.sh +++ b/packaging/tools/makeclient.sh @@ -69,6 +69,39 @@ mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/* +if [ -f ${build_dir}/bin/jemalloc-config ]; then + mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3} + cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin + if [ -f ${build_dir}/bin/jemalloc.sh ]; then + cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/bin/jeprof ]; then + cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then + cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc + fi + if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then + cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib + ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so + fi + if [ -f ${build_dir}/lib/libjemalloc.a ]; then + cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then + cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then + cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig + fi + if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then + cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc + fi + if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then + cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3 + fi +fi + cd ${install_dir} if [ "$osType" != "Darwin" ]; then diff --git a/packaging/tools/makeclient_power.sh b/packaging/tools/makeclient_power.sh index 8241319e4f03a3f7a4953513df2ca3301541a16b..6d10245b4f45761dee268dbf48a0679285d43342 100755 --- a/packaging/tools/makeclient_power.sh +++ b/packaging/tools/makeclient_power.sh @@ -91,6 +91,39 @@ else fi chmod a+x ${install_dir}/bin/* || : +if [ -f ${build_dir}/bin/jemalloc-config ]; then + mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3} + cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin + if [ -f ${build_dir}/bin/jemalloc.sh ]; then + cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/bin/jeprof ]; then + cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then + cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc + fi + if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then + cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib + ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so + fi + if [ -f ${build_dir}/lib/libjemalloc.a ]; then + cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then + cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then + cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig + fi + if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then + cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc + fi + if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then + cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3 + fi +fi + cd ${install_dir} if [ "$osType" != "Darwin" ]; then diff --git a/packaging/tools/makeclient_tq.sh b/packaging/tools/makeclient_tq.sh index 51fd064e1b769191c2baaf27c3a45f73a475cabd..03d9b13059daadfdc7207c78b6f89cae321f25ac 100755 --- a/packaging/tools/makeclient_tq.sh +++ b/packaging/tools/makeclient_tq.sh @@ -91,6 +91,39 @@ else fi chmod a+x ${install_dir}/bin/* || : +if [ -f ${build_dir}/bin/jemalloc-config ]; then + mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3} + cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin + if [ -f ${build_dir}/bin/jemalloc.sh ]; then + cp ${build_dir}/bin/jemalloc.sh ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/bin/jeprof ]; then + cp ${build_dir}/bin/jeprof ${install_dir}/jemalloc/bin + fi + if [ -f ${build_dir}/include/jemalloc/jemalloc.h ]; then + cp ${build_dir}/include/jemalloc/jemalloc.h ${install_dir}/jemalloc/include/jemalloc + fi + if [ -f ${build_dir}/lib/libjemalloc.so.2 ]; then + cp ${build_dir}/lib/libjemalloc.so.2 ${install_dir}/jemalloc/lib + ln -sf libjemalloc.so.2 ${install_dir}/jemalloc/lib/libjemalloc.so + fi + if [ -f ${build_dir}/lib/libjemalloc.a ]; then + cp ${build_dir}/lib/libjemalloc.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/libjemalloc_pic.a ]; then + cp ${build_dir}/lib/libjemalloc_pic.a ${install_dir}/jemalloc/lib + fi + if [ -f ${build_dir}/lib/pkgconfig/jemalloc.pc ]; then + cp ${build_dir}/lib/pkgconfig/jemalloc.pc ${install_dir}/jemalloc/lib/pkgconfig + fi + if [ -f ${build_dir}/share/doc/jemalloc/jemalloc.html ]; then + cp ${build_dir}/share/doc/jemalloc/jemalloc.html ${install_dir}/jemalloc/share/doc/jemalloc + fi + if [ -f ${build_dir}/share/man/man3/jemalloc.3 ]; then + cp ${build_dir}/share/man/man3/jemalloc.3 ${install_dir}/jemalloc/share/man/man3 + fi +fi + cd ${install_dir} if [ "$osType" != "Darwin" ]; then diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8cc5cee3b51675b2d42ad62c442b2b030e802cbb..f480996cefeb95b67c3b2b46a97f41899f8e0583 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) # Base compile diff --git a/src/balance/CMakeLists.txt b/src/balance/CMakeLists.txt index 967635e52ce20761dbd674a380563deeeb9af189..bffa415deb7cc3ebe15082051ebd22a81e45c899 100644 --- a/src/balance/CMakeLists.txt +++ b/src/balance/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/mnode/inc) diff --git a/src/client/CMakeLists.txt b/src/client/CMakeLists.txt index 6ff36c33c46a16b5b5df86f390003a7e31876bdb..bdd42cb457875fe2d554fc4c65636910806af382 100644 --- a/src/client/CMakeLists.txt +++ b/src/client/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/client/src/tscParseInsert.c b/src/client/src/tscParseInsert.c index 137a7be7c7443d071acdb81543ef7f6402196074..cd56153bc4cbf0a6bf5748fc5f6e6a7390159377 100644 --- a/src/client/src/tscParseInsert.c +++ b/src/client/src/tscParseInsert.c @@ -961,6 +961,10 @@ static int32_t tscCheckIfCreateTable(char **sqlstr, SSqlObj *pSql, char** boundC break; } + if (sToken.n == 0 || sToken.type == TK_SEMI || index == 0) { + return tscSQLSyntaxErrMsg(pCmd->payload, "unexpected token", sql); + } + sql += index; ++numOfColsAfterTags; } diff --git a/src/client/src/tscServer.c b/src/client/src/tscServer.c index ac4243d5b4e3302d500cd54bba5a160c94b26a93..d30ee32d67c2a9294b3a3aa0b2bc9144829a364c 100644 --- a/src/client/src/tscServer.c +++ b/src/client/src/tscServer.c @@ -2005,7 +2005,6 @@ static SVgroupsInfo* createVgroupInfoFromMsg(char* pMsg, int32_t* size, uint64_t SVgroupMsg *vmsg = &pVgroupMsg->vgroups[j]; vmsg->vgId = htonl(vmsg->vgId); - vmsg->numOfEps = vmsg->numOfEps; for (int32_t k = 0; k < vmsg->numOfEps; ++k) { vmsg->epAddr[k].port = htons(vmsg->epAddr[k].port); } diff --git a/src/client/src/tscUtil.c b/src/client/src/tscUtil.c index d653489e29396766dea39608be90223c3fbc593c..173db05e10a53316566db4edb661c4466cfc5f2f 100644 --- a/src/client/src/tscUtil.c +++ b/src/client/src/tscUtil.c @@ -634,7 +634,9 @@ static void setResRawPtrImpl(SSqlRes* pRes, SInternalField* pInfo, int32_t i, bo void tscSetResRawPtr(SSqlRes* pRes, SQueryInfo* pQueryInfo) { assert(pRes->numOfCols > 0); - + if (pRes->numOfRows == 0) { + return; + } int32_t offset = 0; for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) { SInternalField* pInfo = (SInternalField*)TARRAY_GET_ELEM(pQueryInfo->fieldsInfo.internalField, i); @@ -746,6 +748,37 @@ typedef struct SJoinOperatorInfo { SRspResultInfo resultInfo; // todo refactor, add this info for each operator } SJoinOperatorInfo; +static void converNcharFilterColumn(SSingleColumnFilterInfo* pFilterInfo, int32_t numOfFilterCols, int32_t rows, bool *gotNchar) { + for (int32_t i = 0; i < numOfFilterCols; ++i) { + if (pFilterInfo[i].info.type == TSDB_DATA_TYPE_NCHAR) { + pFilterInfo[i].pData2 = pFilterInfo[i].pData; + pFilterInfo[i].pData = malloc(rows * pFilterInfo[i].info.bytes); + int32_t bufSize = pFilterInfo[i].info.bytes - VARSTR_HEADER_SIZE; + for (int32_t j = 0; j < rows; ++j) { + char* dst = (char *)pFilterInfo[i].pData + j * pFilterInfo[i].info.bytes; + char* src = (char *)pFilterInfo[i].pData2 + j * pFilterInfo[i].info.bytes; + int32_t len = 0; + taosMbsToUcs4(varDataVal(src), varDataLen(src), varDataVal(dst), bufSize, &len); + varDataLen(dst) = len; + } + *gotNchar = true; + } + } +} + +static void freeNcharFilterColumn(SSingleColumnFilterInfo* pFilterInfo, int32_t numOfFilterCols) { + for (int32_t i = 0; i < numOfFilterCols; ++i) { + if (pFilterInfo[i].info.type == TSDB_DATA_TYPE_NCHAR) { + if (pFilterInfo[i].pData2) { + tfree(pFilterInfo[i].pData); + pFilterInfo[i].pData = pFilterInfo[i].pData2; + pFilterInfo[i].pData2 = NULL; + } + } + } +} + + static void doSetupSDataBlock(SSqlRes* pRes, SSDataBlock* pBlock, SSingleColumnFilterInfo* pFilterInfo, int32_t numOfFilterCols) { int32_t offset = 0; char* pData = pRes->data; @@ -764,8 +797,13 @@ static void doSetupSDataBlock(SSqlRes* pRes, SSDataBlock* pBlock, SSingleColumnF // filter data if needed if (numOfFilterCols > 0) { doSetFilterColumnInfo(pFilterInfo, numOfFilterCols, pBlock); + bool gotNchar = false; + converNcharFilterColumn(pFilterInfo, numOfFilterCols, pBlock->info.rows, &gotNchar); int8_t* p = calloc(pBlock->info.rows, sizeof(int8_t)); bool all = doFilterDataBlock(pFilterInfo, numOfFilterCols, pBlock->info.rows, p); + if (gotNchar) { + freeNcharFilterColumn(pFilterInfo, numOfFilterCols); + } if (!all) { doCompactSDataBlock(pBlock, pBlock->info.rows, p); } diff --git a/src/client/tests/CMakeLists.txt b/src/client/tests/CMakeLists.txt index 1a6c45aadef1989253c661c21a1d39f0f30fd1be..24bfb44ac90e11e01ba99423aa68bd5a9511f746 100644 --- a/src/client/tests/CMakeLists.txt +++ b/src/client/tests/CMakeLists.txt @@ -1,10 +1,11 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) FIND_PATH(HEADER_GTEST_INCLUDE_DIR gtest.h /usr/include/gtest /usr/local/include/gtest) -FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib) +FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib /usr/lib64) +FIND_LIBRARY(LIB_GTEST_SHARED_DIR libgtest.so /usr/lib/ /usr/local/lib /usr/lib64) -IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) +IF (HEADER_GTEST_INCLUDE_DIR AND (LIB_GTEST_STATIC_DIR OR LIB_GTEST_SHARED_DIR)) MESSAGE(STATUS "gTest library found, build unit test") # GoogleTest requires at least C++11 @@ -17,4 +18,4 @@ IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) ADD_EXECUTABLE(cliTest ${SOURCE_LIST}) TARGET_LINK_LIBRARIES(cliTest taos tutil common gtest pthread) -ENDIF() \ No newline at end of file +ENDIF() diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt index 0da7bda994db83882e36e9d52a7983635ad85330..4dce63e54f5db2b56c569bf6564899236c24a421 100644 --- a/src/common/CMakeLists.txt +++ b/src/common/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/connector/jdbc/CMakeLists.txt b/src/connector/jdbc/CMakeLists.txt index 9642b0490f5ba7168f62c75cd4c06b63168eed9f..81af0ec1444cb309c74c84be67c060000f104748 100644 --- a/src/connector/jdbc/CMakeLists.txt +++ b/src/connector/jdbc/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/TSDBPreparedStatement.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/TSDBPreparedStatement.java index d9042fb729d9d746e16bded46e780a83913f778b..22fb0c4ae4987ade0a406fe5628bf80d975f3ae5 100644 --- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/TSDBPreparedStatement.java +++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/TSDBPreparedStatement.java @@ -57,8 +57,8 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat parameterCnt++; } } - parameters = new Object[parameterCnt]; } + parameters = new Object[parameterCnt]; if (parameterCnt > 1) { // the table name is also a parameter, so ignore it. diff --git a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulPreparedStatement.java b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulPreparedStatement.java index 1eaddce1d636f5c74c1226f0df0e66e322600345..f2abbd24454ff13f708636b61bcf93be2e0d47b5 100644 --- a/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulPreparedStatement.java +++ b/src/connector/jdbc/src/main/java/com/taosdata/jdbc/rs/RestfulPreparedStatement.java @@ -22,16 +22,16 @@ public class RestfulPreparedStatement extends RestfulStatement implements Prepar super(conn, database); this.rawSql = sql; + int parameterCnt = 0; if (sql.contains("?")) { - int parameterCnt = 0; for (int i = 0; i < sql.length(); i++) { if ('?' == sql.charAt(i)) { parameterCnt++; } } - parameters = new Object[parameterCnt]; this.isPrepared = true; } + parameters = new Object[parameterCnt]; // build parameterMetaData this.parameterMetaData = new RestfulParameterMetaData(parameters); diff --git a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulPreparedStatementTest.java b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulPreparedStatementTest.java index 244e8afd117868432638c771b21048fae3b1367c..4760a723e4b4e662326987290c2c630803f8f470 100644 --- a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulPreparedStatementTest.java +++ b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/rs/RestfulPreparedStatementTest.java @@ -15,6 +15,8 @@ public class RestfulPreparedStatementTest { private static PreparedStatement pstmt_insert; private static final String sql_select = "select * from t1 where ts > ? and ts <= ? and f1 >= ?"; private static PreparedStatement pstmt_select; + private static final String sql_without_parameters = "select count(*) from t1"; + private static PreparedStatement pstmt_without_parameters; @Test public void executeQuery() throws SQLException { @@ -237,6 +239,7 @@ public class RestfulPreparedStatementTest { @Test public void clearParameters() throws SQLException { pstmt_insert.clearParameters(); + pstmt_without_parameters.clearParameters(); } @Test @@ -382,6 +385,7 @@ public class RestfulPreparedStatementTest { pstmt_insert = conn.prepareStatement(sql_insert); pstmt_select = conn.prepareStatement(sql_select); + pstmt_without_parameters = conn.prepareStatement(sql_without_parameters); } catch (SQLException e) { e.printStackTrace(); } @@ -394,6 +398,8 @@ public class RestfulPreparedStatementTest { pstmt_insert.close(); if (pstmt_select != null) pstmt_select.close(); + if (pstmt_without_parameters != null) + pstmt_without_parameters.close(); if (conn != null) conn.close(); } catch (SQLException e) { diff --git a/src/connector/nodejs/nodetaos/cinterface.js b/src/connector/nodejs/nodetaos/cinterface.js index 1c2685b8cb7ec14d66c97e04e32aa86ae94759f4..03d27e5593ccb15d8ff47cd3c3dedba765d14fc1 100644 --- a/src/connector/nodejs/nodetaos/cinterface.js +++ b/src/connector/nodejs/nodetaos/cinterface.js @@ -15,36 +15,18 @@ const { NULL_POINTER } = require('ref-napi'); module.exports = CTaosInterface; -function convertMillisecondsToDatetime(time) { - return new TaosObjects.TaosTimestamp(time); -} -function convertMicrosecondsToDatetime(time) { - return new TaosObjects.TaosTimestamp(time * 0.001, true); -} - -function convertTimestamp(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { - timestampConverter = convertMillisecondsToDatetime; - if (micro == true) { - timestampConverter = convertMicrosecondsToDatetime; - } +function convertTimestamp(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; while (currOffset < data.length) { - let queue = []; - let time = 0; - for (let i = currOffset; i < currOffset + nbytes; i++) { - queue.push(data[i]); - } - for (let i = queue.length - 1; i >= 0; i--) { - time += queue[i] * Math.pow(16, i * 2); - } + let time = data.readInt64LE(currOffset); currOffset += nbytes; - res.push(timestampConverter(time)); + res.push(new TaosObjects.TaosTimestamp(time, precision)); } return res; } -function convertBool(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertBool(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = new Array(data.length); for (let i = 0; i < data.length; i++) { @@ -60,7 +42,7 @@ function convertBool(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { } return res; } -function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -71,7 +53,7 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, micro = false } return res; } -function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -82,7 +64,7 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, micro = fals } return res; } -function convertInt(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -93,7 +75,7 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { } return res; } -function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -104,7 +86,7 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, micro = false) } return res; } -function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -115,7 +97,7 @@ function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, micro = false) } return res; } -function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; let currOffset = 0; @@ -127,7 +109,7 @@ function convertDouble(data, num_of_rows, nbytes = 0, offset = 0, micro = false) return res; } -function convertNchar(data, num_of_rows, nbytes = 0, offset = 0, micro = false) { +function convertNchar(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); let res = []; @@ -272,7 +254,7 @@ CTaosInterface.prototype.config = function config() { CTaosInterface.prototype.connect = function connect(host = null, user = "root", password = "taosdata", db = null, port = 0) { let _host, _user, _password, _db, _port; try { - _host = host != null ? ref.allocCString(host) : ref.alloc(ref.types.char_ptr, ref.NULL); + _host = host != null ? ref.allocCString(host) : ref.NULL; } catch (err) { throw "Attribute Error: host is expected as a str"; @@ -290,7 +272,7 @@ CTaosInterface.prototype.connect = function connect(host = null, user = "root", throw "Attribute Error: password is expected as a str"; } try { - _db = db != null ? ref.allocCString(db) : ref.alloc(ref.types.char_ptr, ref.NULL); + _db = db != null ? ref.allocCString(db) : ref.NULL; } catch (err) { throw "Attribute Error: db is expected as a str"; @@ -345,8 +327,7 @@ CTaosInterface.prototype.fetchBlock = function fetchBlock(result, fields) { } var fieldL = this.libtaos.taos_fetch_lengths(result); - - let isMicro = (this.libtaos.taos_result_precision(result) == FieldTypes.C_TIMESTAMP_MICRO); + let precision = this.libtaos.taos_result_precision(result); var fieldlens = []; @@ -373,7 +354,7 @@ CTaosInterface.prototype.fetchBlock = function fetchBlock(result, fields) { if (!convertFunctions[fields[i]['type']]) { throw new errors.DatabaseError("Invalid data type returned from database"); } - blocks[i] = convertFunctions[fields[i]['type']](pdata, num_of_rows, fieldlens[i], offset, isMicro); + blocks[i] = convertFunctions[fields[i]['type']](pdata, num_of_rows, fieldlens[i], offset, precision); } } return { blocks: blocks, num_of_rows } @@ -423,7 +404,7 @@ CTaosInterface.prototype.fetch_rows_a = function fetch_rows_a(result, callback, let row = cti.libtaos.taos_fetch_row(result2); let fields = cti.fetchFields_a(result2); - let isMicro = (cti.libtaos.taos_result_precision(result2) == FieldTypes.C_TIMESTAMP_MICRO); + let precision = cti.libtaos.taos_result_precision(result2); let blocks = new Array(fields.length); blocks.fill(null); numOfRows2 = Math.abs(numOfRows2); @@ -449,7 +430,7 @@ CTaosInterface.prototype.fetch_rows_a = function fetch_rows_a(result, callback, let prow = ref.reinterpret(row, 8, i * 8); prow = prow.readPointer(); prow = ref.ref(prow); - blocks[i] = convertFunctions[fields[i]['type']](prow, 1, fieldlens[i], offset, isMicro); + blocks[i] = convertFunctions[fields[i]['type']](prow, 1, fieldlens[i], offset, precision); //offset += fields[i]['bytes'] * numOfRows2; } } @@ -572,7 +553,7 @@ CTaosInterface.prototype.openStream = function openStream(connection, sql, callb var cti = this; let asyncCallbackWrapper = function (param2, result2, row) { let fields = cti.fetchFields_a(result2); - let isMicro = (cti.libtaos.taos_result_precision(result2) == FieldTypes.C_TIMESTAMP_MICRO); + let precision = cti.libtaos.taos_result_precision(result2); let blocks = new Array(fields.length); blocks.fill(null); let numOfRows2 = 1; @@ -582,7 +563,7 @@ CTaosInterface.prototype.openStream = function openStream(connection, sql, callb if (!convertFunctions[fields[i]['type']]) { throw new errors.DatabaseError("Invalid data type returned from database"); } - blocks[i] = convertFunctions[fields[i]['type']](row, numOfRows2, fields[i]['bytes'], offset, isMicro); + blocks[i] = convertFunctions[fields[i]['type']](row, numOfRows2, fields[i]['bytes'], offset, precision); offset += fields[i]['bytes'] * numOfRows2; } } diff --git a/src/connector/nodejs/nodetaos/taosobjects.js b/src/connector/nodejs/nodetaos/taosobjects.js index 809d17a016ac5aafc504c71f6417858e9d00821b..0fc8dc8ef1a057c7e410956a2b68072e65cbb613 100644 --- a/src/connector/nodejs/nodetaos/taosobjects.js +++ b/src/connector/nodejs/nodetaos/taosobjects.js @@ -1,5 +1,5 @@ const FieldTypes = require('./constants'); - +const util = require('util'); /** * Various objects such as TaosRow and TaosColumn that help make parsing data easier * @module TaosObjects @@ -14,7 +14,7 @@ const FieldTypes = require('./constants'); * var trow = new TaosRow(row); * console.log(trow.data); */ -function TaosRow (row) { +function TaosRow(row) { this.data = row; this.length = row.length; return this; @@ -29,10 +29,10 @@ function TaosRow (row) { */ function TaosField(field) { - this._field = field; - this.name = field.name; - this.type = FieldTypes.getType(field.type); - return this; + this._field = field; + this.name = field.name; + this.type = FieldTypes.getType(field.type); + return this; } /** @@ -42,39 +42,110 @@ function TaosField(field) { * @param {Date} date - A Javascript date time object or the time in milliseconds past 1970-1-1 00:00:00.000 */ class TaosTimestamp extends Date { - constructor(date, micro = false) { - super(date); - this._type = 'TaosTimestamp'; - if (micro) { - this.microTime = date - Math.floor(date); + constructor(date, precision = 0) { + if (precision === 1) { + super(Math.floor(date / 1000)); + this.precisionExtras = date % 1000; + } else if (precision === 2) { + super(parseInt(date / 1000000)); + // use BigInt to fix: 1625801548423914405 % 1000000 = 914496 which not expected (914405) + this.precisionExtras = parseInt(BigInt(date) % 1000000n); + } else { + super(parseInt(date)); + } + this.precision = precision; + } + + /** + * TDengine raw timestamp. + * @returns raw taos timestamp (int64) + */ + taosTimestamp() { + if (this.precision == 1) { + return (this * 1000 + this.precisionExtras); + } else if (this.precision == 2) { + return (this * 1000000 + this.precisionExtras); + } else { + return Math.floor(this); + } + } + + /** + * Gets the microseconds of a Date. + * @return {Int} A microseconds integer + */ + getMicroseconds() { + if (this.precision == 1) { + return this.getMilliseconds() * 1000 + this.precisionExtras; + } else if (this.precision == 2) { + return this.getMilliseconds() * 1000 + this.precisionExtras / 1000; + } else { + return 0; + } + } + /** + * Gets the nanoseconds of a TaosTimestamp. + * @return {Int} A nanoseconds integer + */ + getNanoseconds() { + if (this.precision == 1) { + return this.getMilliseconds() * 1000000 + this.precisionExtras * 1000; + } else if (this.precision == 2) { + return this.getMilliseconds() * 1000000 + this.precisionExtras; + } else { + return 0; + } + } + + /** + * @returns {String} a string for timestamp string format + */ + _precisionExtra() { + if (this.precision == 1) { + return String(this.precisionExtras).padStart(3, '0'); + } else if (this.precision == 2) { + return String(this.precisionExtras).padStart(6, '0'); + } else { + return ''; } } /** * @function Returns the date into a string usable by TDengine * @return {string} A Taos Timestamp String */ - toTaosString(){ + toTaosString() { var tzo = -this.getTimezoneOffset(), - dif = tzo >= 0 ? '+' : '-', - pad = function(num) { - var norm = Math.floor(Math.abs(num)); - return (norm < 10 ? '0' : '') + norm; - }, - pad2 = function(num) { - var norm = Math.floor(Math.abs(num)); - if (norm < 10) return '00' + norm; - if (norm < 100) return '0' + norm; - if (norm < 1000) return norm; - }; + dif = tzo >= 0 ? '+' : '-', + pad = function (num) { + var norm = Math.floor(Math.abs(num)); + return (norm < 10 ? '0' : '') + norm; + }, + pad2 = function (num) { + var norm = Math.floor(Math.abs(num)); + if (norm < 10) return '00' + norm; + if (norm < 100) return '0' + norm; + if (norm < 1000) return norm; + }; return this.getFullYear() + - '-' + pad(this.getMonth() + 1) + - '-' + pad(this.getDate()) + - ' ' + pad(this.getHours()) + - ':' + pad(this.getMinutes()) + - ':' + pad(this.getSeconds()) + - '.' + pad2(this.getMilliseconds()) + - '' + (this.microTime ? pad2(Math.round(this.microTime * 1000)) : ''); + '-' + pad(this.getMonth() + 1) + + '-' + pad(this.getDate()) + + ' ' + pad(this.getHours()) + + ':' + pad(this.getMinutes()) + + ':' + pad(this.getSeconds()) + + '.' + pad2(this.getMilliseconds()) + + '' + this._precisionExtra(); + } + + /** + * Custom console.log + * @returns {String} string format for debug + */ + [util.inspect.custom](depth, opts) { + return this.toTaosString() + JSON.stringify({ precision: this.precision, precisionExtras: this.precisionExtras }, opts); + } + toString() { + return this.toTaosString(); } } -module.exports = {TaosRow, TaosField, TaosTimestamp} +module.exports = { TaosRow, TaosField, TaosTimestamp } diff --git a/src/connector/nodejs/package.json b/src/connector/nodejs/package.json index b57d4c635c05ba423bf1ea58f5dd64f70752f8bf..db37318a164c6207432ebb64defb608381d2cb49 100644 --- a/src/connector/nodejs/package.json +++ b/src/connector/nodejs/package.json @@ -1,13 +1,13 @@ { "name": "td2.0-connector", - "version": "2.0.8", + "version": "2.0.9", "description": "A Node.js connector for TDengine.", "main": "tdengine.js", "directories": { "test": "test" }, "scripts": { - "test": "node test/test.js" + "test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js" }, "repository": { "type": "git", diff --git a/src/connector/nodejs/tdengine.js b/src/connector/nodejs/tdengine.js index aa296279d5e20f3d049d478ea2af44ea47a2b8e3..047c744a4fc90c6306e851eaa529a7f9f578fe12 100644 --- a/src/connector/nodejs/tdengine.js +++ b/src/connector/nodejs/tdengine.js @@ -1,4 +1,4 @@ var TDengineConnection = require('./nodetaos/connection.js') -module.exports.connect = function (connection=null) { +module.exports.connect = function (connection={}) { return new TDengineConnection(connection); } diff --git a/src/connector/nodejs/test/test.js b/src/connector/nodejs/test/test.js index bf4bb2c54188d3eb0f9c7fb5306912effc7b0760..caf05955da4c960ebedc872f400c17d18be767dd 100644 --- a/src/connector/nodejs/test/test.js +++ b/src/connector/nodejs/test/test.js @@ -1,5 +1,5 @@ const taos = require('../tdengine'); -var conn = taos.connect({host:"127.0.0.1", user:"root", password:"taosdata", config:"/etc/taos",port:10}); +var conn = taos.connect(); var c1 = conn.cursor(); let stime = new Date(); let interval = 1000; diff --git a/src/connector/nodejs/test/testMicroseconds.js b/src/connector/nodejs/test/testMicroseconds.js new file mode 100644 index 0000000000000000000000000000000000000000..cc65b3d919f92b3b4d7e0e216c6c8ac64a294d7f --- /dev/null +++ b/src/connector/nodejs/test/testMicroseconds.js @@ -0,0 +1,49 @@ +const taos = require('../tdengine'); +var conn = taos.connect(); +var c1 = conn.cursor(); +let stime = new Date(); +let interval = 1000; + +function convertDateToTS(date) { + let tsArr = date.toISOString().split("T") + return "\"" + tsArr[0] + " " + tsArr[1].substring(0, tsArr[1].length - 1) + "\""; +} +function R(l, r) { + return Math.random() * (r - l) - r; +} +function randomBool() { + if (Math.random() < 0.5) { + return true; + } + return false; +} + +// Initialize +//c1.execute('drop database td_connector_test;'); +const dbname = 'nodejs_test_us'; +c1.execute('create database if not exists ' + dbname + ' precision "us"'); +c1.execute('use ' + dbname) +c1.execute('create table if not exists tstest (ts timestamp, _int int);'); +c1.execute('insert into tstest values(1625801548423914, 0)'); +// Select +console.log('select * from tstest'); +c1.execute('select * from tstest'); + +var d = c1.fetchall(); +console.log(c1.fields); +let ts = d[0][0]; +console.log(ts); + +if (ts.taosTimestamp() != 1625801548423914) { + throw "microseconds not match!"; +} +if (ts.getMicroseconds() % 1000 !== 914) { + throw "micronsecond precision error"; +} +setTimeout(function () { + c1.query('drop database nodejs_us_test;'); +}, 200); + +setTimeout(function () { + conn.close(); +}, 2000); diff --git a/src/connector/nodejs/test/testNanoseconds.js b/src/connector/nodejs/test/testNanoseconds.js new file mode 100644 index 0000000000000000000000000000000000000000..85a7600b01f2c908f22e621488f22678083149ea --- /dev/null +++ b/src/connector/nodejs/test/testNanoseconds.js @@ -0,0 +1,49 @@ +const taos = require('../tdengine'); +var conn = taos.connect(); +var c1 = conn.cursor(); +let stime = new Date(); +let interval = 1000; + +function convertDateToTS(date) { + let tsArr = date.toISOString().split("T") + return "\"" + tsArr[0] + " " + tsArr[1].substring(0, tsArr[1].length - 1) + "\""; +} +function R(l, r) { + return Math.random() * (r - l) - r; +} +function randomBool() { + if (Math.random() < 0.5) { + return true; + } + return false; +} + +// Initialize +//c1.execute('drop database td_connector_test;'); +const dbname = 'nodejs_test_ns'; +c1.execute('create database if not exists ' + dbname + ' precision "ns"'); +c1.execute('use ' + dbname) +c1.execute('create table if not exists tstest (ts timestamp, _int int);'); +c1.execute('insert into tstest values(1625801548423914405, 0)'); +// Select +console.log('select * from tstest'); +c1.execute('select * from tstest'); + +var d = c1.fetchall(); +console.log(c1.fields); +let ts = d[0][0]; +console.log(ts); + +if (ts.taosTimestamp() != 1625801548423914405) { + throw "nanosecond not match!"; +} +if (ts.getNanoseconds() % 1000000 !== 914405) { + throw "nanosecond precision error"; +} +setTimeout(function () { + c1.query('drop database nodejs_ns_test;'); +}, 200); + +setTimeout(function () { + conn.close(); +}, 2000); diff --git a/src/connector/odbc/CMakeLists.txt b/src/connector/odbc/CMakeLists.txt index 5a93ac3f7e2934fd8383c5a18f22c24845793f1a..87746f23ae3796f4d0ab20257f90599860430568 100644 --- a/src/connector/odbc/CMakeLists.txt +++ b/src/connector/odbc/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_LINUX_64) @@ -20,8 +20,8 @@ IF (TD_LINUX_64) if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0.0) message(WARNING "gcc 4.8.0 will complain too much about flex-generated code, we just bypass building ODBC driver in such case") else () - SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wconversion") - SET(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -Wconversion") + SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} ") + SET(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} ") ADD_SUBDIRECTORY(src) ADD_SUBDIRECTORY(tools) ADD_SUBDIRECTORY(examples) diff --git a/src/connector/odbc/src/CMakeLists.txt b/src/connector/odbc/src/CMakeLists.txt index f0e50415e2e4f14e1c247b834e1e52a2c2fd2868..e990647e1aadcafb8b3306ee7e43a4d3ac285c94 100644 --- a/src/connector/odbc/src/CMakeLists.txt +++ b/src/connector/odbc/src/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) add_subdirectory(base) diff --git a/src/connector/odbc/src/base/CMakeLists.txt b/src/connector/odbc/src/base/CMakeLists.txt index fa13f3e07737bb6c2a36e5296a49a9f282346e3b..e34091360900a3a856d9fe56bb9fec994f4ba321 100644 --- a/src/connector/odbc/src/base/CMakeLists.txt +++ b/src/connector/odbc/src/base/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) aux_source_directory(. SRC) diff --git a/src/cq/CMakeLists.txt b/src/cq/CMakeLists.txt index e9ed2996c74e2c59d56245e6fc1e932ebb07dfb0..f01ccb8728eb9a2a4695a8a0c133422e3134b8e2 100644 --- a/src/cq/CMakeLists.txt +++ b/src/cq/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/cq/test/CMakeLists.txt b/src/cq/test/CMakeLists.txt index cd124567afd8766173cf07e7a6191ab473be1714..d713dd7401c4f2d791ee0b4de1216b6ede558507 100644 --- a/src/cq/test/CMakeLists.txt +++ b/src/cq/test/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) LIST(APPEND CQTEST_SRC ./cqtest.c) diff --git a/src/dnode/CMakeLists.txt b/src/dnode/CMakeLists.txt index f8d8f88438429f1c0be405825cef4ab9c1b130bc..e7ac1be5b1160df447271eeec78e2939923b6d53 100644 --- a/src/dnode/CMakeLists.txt +++ b/src/dnode/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/query/inc) diff --git a/src/kit/CMakeLists.txt b/src/kit/CMakeLists.txt index 66e8cf73988ab25db7544b9a52215d2279630c63..7053052007c5e00a5ac001d72b64029dc08ddf8b 100644 --- a/src/kit/CMakeLists.txt +++ b/src/kit/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) ADD_SUBDIRECTORY(shell) diff --git a/src/kit/shell/CMakeLists.txt b/src/kit/shell/CMakeLists.txt index d9049454352efbd9344eae3c776f10c8f37fe090..794ca5e2de1820035524cc4180558b9f290c22c6 100644 --- a/src/kit/shell/CMakeLists.txt +++ b/src/kit/shell/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc) diff --git a/src/kit/shell/inc/shellCommand.h b/src/kit/shell/inc/shellCommand.h index 3094bdb9ddb1ccd9debdbca88a34197385deb367..a08c1f48d11a8cd0e51fa5fb2d05a16da96d38c9 100644 --- a/src/kit/shell/inc/shellCommand.h +++ b/src/kit/shell/inc/shellCommand.h @@ -45,7 +45,7 @@ extern void updateBuffer(Command *cmd); extern int isReadyGo(Command *cmd); extern void resetCommand(Command *cmd, const char s[]); -int countPrefixOnes(char c); +int countPrefixOnes(unsigned char c); void clearScreen(int ecmd_pos, int cursor_pos); void printChar(char c, int times); void positionCursor(int step, int direction); diff --git a/src/kit/shell/src/shellCommand.c b/src/kit/shell/src/shellCommand.c index 9173ab0efdae7e5900218b2ab256993df71b21dd..e1a3dfe10205715d8c5cf8677a5be60c1a478b05 100644 --- a/src/kit/shell/src/shellCommand.c +++ b/src/kit/shell/src/shellCommand.c @@ -26,7 +26,7 @@ typedef struct { char widthOnScreen; } UTFCodeInfo; -int countPrefixOnes(char c) { +int countPrefixOnes(unsigned char c) { unsigned char mask = 127; mask = ~mask; int ret = 0; @@ -48,7 +48,7 @@ void getPrevCharSize(const char *str, int pos, int *size, int *width) { while (--pos >= 0) { *size += 1; - if (str[pos] > 0 || countPrefixOnes(str[pos]) > 1) break; + if (str[pos] > 0 || countPrefixOnes((unsigned char )str[pos]) > 1) break; } int rc = mbtowc(&wc, str + pos, MB_CUR_MAX); diff --git a/src/kit/taosdemo/CMakeLists.txt b/src/kit/taosdemo/CMakeLists.txt index 091eecfe270a20987a48f7223f57d2400169ac6d..584de340947035457abd985ac93697ed51c305af 100644 --- a/src/kit/taosdemo/CMakeLists.txt +++ b/src/kit/taosdemo/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc) diff --git a/src/kit/taosdemo/taosdemo.c b/src/kit/taosdemo/taosdemo.c index 6513f3e214d4c16ebf86ecdbb9ff1ca3debe0f59..e5945f4eac375585687008d9d2169bb253885e01 100644 --- a/src/kit/taosdemo/taosdemo.c +++ b/src/kit/taosdemo/taosdemo.c @@ -3216,13 +3216,6 @@ static int readTagFromCsvFileToMem(SSuperTable * superTblInfo) { return 0; } -#if 0 -int readSampleFromJsonFileToMem(SSuperTable * superTblInfo) { - // TODO - return 0; -} -#endif - /* Read 10000 lines at most. If more than 10000 lines, continue to read after using */ @@ -5122,13 +5115,13 @@ static int32_t generateStbDataTail( } else { lenOfRow = getRowDataFromSample( data, - remainderBufLen < MAX_DATA_SIZE ? remainderBufLen : MAX_DATA_SIZE, + (remainderBufLen < MAX_DATA_SIZE)?remainderBufLen:MAX_DATA_SIZE, startTime + superTblInfo->timeStampStep * k, superTblInfo, pSamplePos); } - if (lenOfRow > remainderBufLen) { + if ((lenOfRow + 1) > remainderBufLen) { break; } @@ -5338,7 +5331,7 @@ static int64_t generateInterlaceDataWithoutStb( #if STMT_IFACE_ENABLED == 1 static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, - char *dataType, int32_t dataLen, char **ptr) + char *dataType, int32_t dataLen, char **ptr, char *value) { if (0 == strncasecmp(dataType, "BINARY", strlen("BINARY"))) { @@ -5348,12 +5341,18 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, return -1; } char *bind_binary = (char *)*ptr; - rand_string(bind_binary, dataLen); bind->buffer_type = TSDB_DATA_TYPE_BINARY; - bind->buffer_length = dataLen; - bind->buffer = bind_binary; + if (value) { + strncpy(bind_binary, value, strlen(value)); + bind->buffer_length = strlen(bind_binary); + } else { + rand_string(bind_binary, dataLen); + bind->buffer_length = dataLen; + } + bind->length = &bind->buffer_length; + bind->buffer = bind_binary; bind->is_null = NULL; *ptr += bind->buffer_length; @@ -5365,9 +5364,14 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, return -1; } char *bind_nchar = (char *)*ptr; - rand_string(bind_nchar, dataLen); bind->buffer_type = TSDB_DATA_TYPE_NCHAR; + if (value) { + strncpy(bind_nchar, value, strlen(value)); + } else { + rand_string(bind_nchar, dataLen); + } + bind->buffer_length = strlen(bind_nchar); bind->buffer = bind_nchar; bind->length = &bind->buffer_length; @@ -5378,7 +5382,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "INT", strlen("INT"))) { int32_t *bind_int = (int32_t *)*ptr; - *bind_int = rand_int(); + if (value) { + *bind_int = atoi(value); + } else { + *bind_int = rand_int(); + } bind->buffer_type = TSDB_DATA_TYPE_INT; bind->buffer_length = sizeof(int32_t); bind->buffer = bind_int; @@ -5390,7 +5398,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "BIGINT", strlen("BIGINT"))) { int64_t *bind_bigint = (int64_t *)*ptr; - *bind_bigint = rand_bigint(); + if (value) { + *bind_bigint = atoll(value); + } else { + *bind_bigint = rand_bigint(); + } bind->buffer_type = TSDB_DATA_TYPE_BIGINT; bind->buffer_length = sizeof(int64_t); bind->buffer = bind_bigint; @@ -5402,7 +5414,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "FLOAT", strlen("FLOAT"))) { float *bind_float = (float *) *ptr; - *bind_float = rand_float(); + if (value) { + *bind_float = (float)atof(value); + } else { + *bind_float = rand_float(); + } bind->buffer_type = TSDB_DATA_TYPE_FLOAT; bind->buffer_length = sizeof(float); bind->buffer = bind_float; @@ -5414,7 +5430,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "DOUBLE", strlen("DOUBLE"))) { double *bind_double = (double *)*ptr; - *bind_double = rand_double(); + if (value) { + *bind_double = atof(value); + } else { + *bind_double = rand_double(); + } bind->buffer_type = TSDB_DATA_TYPE_DOUBLE; bind->buffer_length = sizeof(double); bind->buffer = bind_double; @@ -5426,7 +5446,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "SMALLINT", strlen("SMALLINT"))) { int16_t *bind_smallint = (int16_t *)*ptr; - *bind_smallint = rand_smallint(); + if (value) { + *bind_smallint = (int16_t)atoi(value); + } else { + *bind_smallint = rand_smallint(); + } bind->buffer_type = TSDB_DATA_TYPE_SMALLINT; bind->buffer_length = sizeof(int16_t); bind->buffer = bind_smallint; @@ -5438,7 +5462,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "TINYINT", strlen("TINYINT"))) { int8_t *bind_tinyint = (int8_t *)*ptr; - *bind_tinyint = rand_tinyint(); + if (value) { + *bind_tinyint = (int8_t)atoi(value); + } else { + *bind_tinyint = rand_tinyint(); + } bind->buffer_type = TSDB_DATA_TYPE_TINYINT; bind->buffer_length = sizeof(int8_t); bind->buffer = bind_tinyint; @@ -5461,7 +5489,11 @@ static int32_t prepareStmtBindArrayByType(TAOS_BIND *bind, "TIMESTAMP", strlen("TIMESTAMP"))) { int64_t *bind_ts2 = (int64_t *) *ptr; - *bind_ts2 = rand_bigint(); + if (value) { + *bind_ts2 = atoll(value); + } else { + *bind_ts2 = rand_bigint(); + } bind->buffer_type = TSDB_DATA_TYPE_TIMESTAMP; bind->buffer_length = sizeof(int64_t); bind->buffer = bind_ts2; @@ -5527,12 +5559,13 @@ static int32_t prepareStmtWithoutStb( ptr += bind->buffer_length; for (int i = 0; i < g_args.num_of_CPR; i ++) { - bind = (TAOS_BIND *)((char *)bindArray + (sizeof(TAOS_BIND) * (i + 1))); + bind = (TAOS_BIND *)((char *)bindArray + + (sizeof(TAOS_BIND) * (i + 1))); if ( -1 == prepareStmtBindArrayByType( bind, data_type[i], g_args.len_of_binary, - &ptr)) { + &ptr, NULL)) { return -1; } } @@ -5551,12 +5584,14 @@ static int32_t prepareStmtWithoutStb( return k; } -static int32_t prepareStbStmt(SSuperTable *stbInfo, +static int32_t prepareStbStmt( + SSuperTable *stbInfo, TAOS_STMT *stmt, char *tableName, uint32_t batch, uint64_t insertRows, uint64_t recordFrom, - int64_t startTime, char *buffer) + int64_t startTime, + int64_t *pSamplePos) { int ret = taos_stmt_set_tbname(stmt, tableName); if (ret != 0) { @@ -5567,16 +5602,24 @@ static int32_t prepareStbStmt(SSuperTable *stbInfo, char *bindArray = malloc(sizeof(TAOS_BIND) * (stbInfo->columnCount + 1)); if (bindArray == NULL) { - errorPrint("Failed to allocate %d bind params\n", - (stbInfo->columnCount + 1)); + errorPrint("%s() LN%d, Failed to allocate %d bind params\n", + __func__, __LINE__, (stbInfo->columnCount + 1)); return -1; } - bool tsRand; + bool sourceRand; if (0 == strncasecmp(stbInfo->dataSource, "rand", strlen("rand"))) { - tsRand = true; + sourceRand = true; } else { - tsRand = false; + sourceRand = false; // from sample data file + } + + char *bindBuffer = malloc(g_args.len_of_binary); + if (bindBuffer == NULL) { + errorPrint("%s() LN%d, Failed to allocate %d bind buffer\n", + __func__, __LINE__, g_args.len_of_binary); + free(bindArray); + return -1; } uint32_t k; @@ -5592,7 +5635,7 @@ static int32_t prepareStbStmt(SSuperTable *stbInfo, bind_ts = (int64_t *)ptr; bind->buffer_type = TSDB_DATA_TYPE_TIMESTAMP; - if (tsRand) { + if (sourceRand) { *bind_ts = startTime + getTSRandTail( stbInfo->timeStampStep, k, stbInfo->disorderRatio, @@ -5607,14 +5650,46 @@ static int32_t prepareStbStmt(SSuperTable *stbInfo, ptr += bind->buffer_length; + int cursor = 0; for (int i = 0; i < stbInfo->columnCount; i ++) { bind = (TAOS_BIND *)((char *)bindArray + (sizeof(TAOS_BIND) * (i + 1))); - if ( -1 == prepareStmtBindArrayByType( - bind, - stbInfo->columns[i].dataType, - stbInfo->columns[i].dataLen, - &ptr)) { - return -1; + + if (sourceRand) { + if ( -1 == prepareStmtBindArrayByType( + bind, + stbInfo->columns[i].dataType, + stbInfo->columns[i].dataLen, + &ptr, + NULL)) { + free(bindArray); + free(bindBuffer); + return -1; + } + } else { + char *restStr = stbInfo->sampleDataBuf + cursor; + int lengthOfRest = strlen(restStr); + + int index = 0; + for (index = 0; index < lengthOfRest; index ++) { + if (restStr[index] == ',') { + break; + } + } + + memset(bindBuffer, 0, g_args.len_of_binary); + strncpy(bindBuffer, restStr, index); + cursor += index + 1; // skip ',' too + + if ( -1 == prepareStmtBindArrayByType( + bind, + stbInfo->columns[i].dataType, + stbInfo->columns[i].dataLen, + &ptr, + bindBuffer)) { + free(bindArray); + free(bindBuffer); + return -1; + } } } taos_stmt_bind_param(stmt, (TAOS_BIND *)bindArray); @@ -5623,11 +5698,16 @@ static int32_t prepareStbStmt(SSuperTable *stbInfo, k++; recordFrom ++; + + if (!sourceRand) { + (*pSamplePos) ++; + } if (recordFrom >= insertRows) { break; } } + free(bindBuffer); free(bindArray); return k; } @@ -5820,13 +5900,14 @@ static void* syncWriteInterlace(threadInfo *pThreadInfo) { if (superTblInfo) { if (superTblInfo->iface == STMT_IFACE) { #if STMT_IFACE_ENABLED == 1 - generated = prepareStbStmt(superTblInfo, + generated = prepareStbStmt( + superTblInfo, pThreadInfo->stmt, tableName, batchPerTbl, insertRows, i, startTime, - pThreadInfo->buffer); + &(pThreadInfo->samplePos)); #else generated = -1; #endif @@ -6051,7 +6132,8 @@ static void* syncWriteProgressive(threadInfo *pThreadInfo) { pThreadInfo->stmt, tableName, g_args.num_of_RPR, - insertRows, i, start_time, pstr); + insertRows, i, start_time, + &(pThreadInfo->samplePos)); #else generated = -1; #endif @@ -7332,6 +7414,7 @@ static void *superSubscribe(void *sarg) { TAOS_RES* res = NULL; uint64_t st = 0, et = 0; + while ((g_queryInfo.superQueryInfo.endAfterConsume == -1) || (g_queryInfo.superQueryInfo.endAfterConsume > consumed[pThreadInfo->end_table_to diff --git a/src/kit/taosdump/CMakeLists.txt b/src/kit/taosdump/CMakeLists.txt index 58897b89e95743c802755c0476f3b2843a244a59..51f4748eab462c8e883e83cd5923f38dd7fb9b5a 100644 --- a/src/kit/taosdump/CMakeLists.txt +++ b/src/kit/taosdump/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc) diff --git a/src/kit/taosdump/taosdump.c b/src/kit/taosdump/taosdump.c index 94a89bcba6b5c64f6dbaa777bf458573e1787124..b03d55730941ccb106284be294fc1aa82cb417d4 100644 --- a/src/kit/taosdump/taosdump.c +++ b/src/kit/taosdump/taosdump.c @@ -27,6 +27,8 @@ #include "tutil.h" #include +#define TSDB_SUPPORT_NANOSECOND 1 + #define MAX_FILE_NAME_LEN 256 // max file name length on linux is 255 #define COMMAND_SIZE 65536 #define MAX_RECORDS_PER_REQ 32766 @@ -228,7 +230,11 @@ static struct argp_option options[] = { {"avro", 'V', 0, 0, "Dump apache avro format data file. By default, dump sql command sequence.", 2}, {"start-time", 'S', "START_TIME", 0, "Start time to dump. Either epoch or ISO8601/RFC3339 format is acceptable. ISO8601 format example: 2017-10-01T18:00:00.000+0800 or 2017-10-0100:00:00.000+0800 or '2017-10-01 00:00:00.000+0800'", 4}, {"end-time", 'E', "END_TIME", 0, "End time to dump. Either epoch or ISO8601/RFC3339 format is acceptable. ISO8601 format example: 2017-10-01T18:00:00.000+0800 or 2017-10-0100:00:00.000+0800 or '2017-10-01 00:00:00.000+0800'", 5}, +#if TSDB_SUPPORT_NANOSECOND == 1 {"precision", 'C', "PRECISION", 0, "Epoch precision. Valid value is one of ms, us, and ns. Default is ms.", 6}, +#else + {"precision", 'C', "PRECISION", 0, "Epoch precision. Valid value is one of ms and us. Default is ms.", 6}, +#endif {"data-batch", 'B', "DATA_BATCH", 0, "Number of data point per insert statement. Max value is 32766. Default is 1.", 3}, {"max-sql-len", 'L', "SQL_LEN", 0, "Max length of one sql. Default is 65480.", 3}, {"table-batch", 't', "TABLE_BATCH", 0, "Number of table dumpout into one output file. Default is 1.", 3}, @@ -527,7 +533,10 @@ static void parse_precision_first( } if ((0 != strncasecmp(tmp, "ms", strlen("ms"))) && (0 != strncasecmp(tmp, "us", strlen("us"))) - && (0 != strncasecmp(tmp, "ns", strlen("ns")))) { +#if TSDB_SUPPORT_NANOSECOND == 1 + && (0 != strncasecmp(tmp, "ns", strlen("ns"))) +#endif + ) { // errorPrint("input precision: %s is invalid value\n", tmp); free(tmp); @@ -564,9 +573,11 @@ static void parse_timestamp( } else if (0 == strncasecmp(arguments->precision, "us", strlen("us"))) { timePrec = TSDB_TIME_PRECISION_MICRO; +#if TSDB_SUPPORT_NANOSECOND == 1 } else if (0 == strncasecmp(arguments->precision, "ns", strlen("ns"))) { timePrec = TSDB_TIME_PRECISION_NANO; +#endif } else { errorPrint("Invalid time precision: %s", arguments->precision); diff --git a/src/mnode/CMakeLists.txt b/src/mnode/CMakeLists.txt index 2df4708c239515febafc7a4f3ab3f63bd9e434e8..a7fc54d87786f430f913980f089d29d969b01fce 100644 --- a/src/mnode/CMakeLists.txt +++ b/src/mnode/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/query/inc) diff --git a/src/os/CMakeLists.txt b/src/os/CMakeLists.txt index 4472c683c70f0e4463ae46c63aaff7c7c7ba0fd6..a64c9d79dd6af511448ad0f9b186f6e50d59c728 100644 --- a/src/os/CMakeLists.txt +++ b/src/os/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_LINUX) diff --git a/src/os/src/darwin/CMakeLists.txt b/src/os/src/darwin/CMakeLists.txt index 259e1a7a0b56a02b7d67825acc85caef5b598089..ed75cac03da112348ff153005d5330786f6386ac 100644 --- a/src/os/src/darwin/CMakeLists.txt +++ b/src/os/src/darwin/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) AUX_SOURCE_DIRECTORY(. SRC) diff --git a/src/os/src/detail/CMakeLists.txt b/src/os/src/detail/CMakeLists.txt index 5c49df24c1c7b1e88c0ba206f2d100fe90ed21c6..ac68cf4cd8cbd217da8aa2d4a41a5aa159562868 100644 --- a/src/os/src/detail/CMakeLists.txt +++ b/src/os/src/detail/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(.) diff --git a/src/os/src/linux/CMakeLists.txt b/src/os/src/linux/CMakeLists.txt index 08b696ba1aedb80ecc17997811591fea1209f1ae..d1d95b0096124e122cd630df89ef1057def10373 100644 --- a/src/os/src/linux/CMakeLists.txt +++ b/src/os/src/linux/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) AUX_SOURCE_DIRECTORY(. SRC) diff --git a/src/os/src/windows/CMakeLists.txt b/src/os/src/windows/CMakeLists.txt index e5472e1abd618f27d119efbb926a959bf8c737c6..83012d6e3e5a2e11655f4a1c0742cdd25cccddf2 100644 --- a/src/os/src/windows/CMakeLists.txt +++ b/src/os/src/windows/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) AUX_SOURCE_DIRECTORY(. SRC) diff --git a/src/os/tests/CMakeLists.txt b/src/os/tests/CMakeLists.txt index b00f0ebdc85a24974fd36228ff69ced16a32b76d..3c477641899994bf34237e93122c3d83f0365fad 100644 --- a/src/os/tests/CMakeLists.txt +++ b/src/os/tests/CMakeLists.txt @@ -1,10 +1,11 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) FIND_PATH(HEADER_GTEST_INCLUDE_DIR gtest.h /usr/include/gtest /usr/local/include/gtest) -FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib) +FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib /usr/lib64) +FIND_LIBRARY(LIB_GTEST_SHARED_DIR libgtest.so /usr/lib/ /usr/local/lib /usr/lib64) -IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) +IF (HEADER_GTEST_INCLUDE_DIR AND (LIB_GTEST_STATIC_DIR OR LIB_GTEST_SHARED_DIR)) MESSAGE(STATUS "gTest library found, build unit test") # GoogleTest requires at least C++11 @@ -17,4 +18,4 @@ IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) ADD_EXECUTABLE(osTest ${SOURCE_LIST}) TARGET_LINK_LIBRARIES(osTest taos os tutil common gtest pthread) -ENDIF() \ No newline at end of file +ENDIF() diff --git a/src/plugins/CMakeLists.txt b/src/plugins/CMakeLists.txt index 7dcaaf27e615ead75e83630788288a27e938b0a9..320445f7f784884f8aa009e37182fc57a38bb96f 100644 --- a/src/plugins/CMakeLists.txt +++ b/src/plugins/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) ADD_SUBDIRECTORY(monitor) diff --git a/src/plugins/http/CMakeLists.txt b/src/plugins/http/CMakeLists.txt index bfb47ad12e8b1ef7099109ecf5849ec3575caf5f..57fc2ee3a2692c239d7fa36d6e55ddae738a2720 100644 --- a/src/plugins/http/CMakeLists.txt +++ b/src/plugins/http/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/zlib-1.2.11/inc) diff --git a/src/plugins/monitor/CMakeLists.txt b/src/plugins/monitor/CMakeLists.txt index 28c62a099c0f2bea8b33a57c577bc89c7fb15aaa..8a05d63e141facfe34e740887384fec0337534d4 100644 --- a/src/plugins/monitor/CMakeLists.txt +++ b/src/plugins/monitor/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/plugins/mqtt/CMakeLists.txt b/src/plugins/mqtt/CMakeLists.txt index 50b0bbe8af4faeab41a7b041d6aa51747f0aab3e..081512138505ab7e7a54a8bbe770aa293adec0be 100644 --- a/src/plugins/mqtt/CMakeLists.txt +++ b/src/plugins/mqtt/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/query/CMakeLists.txt b/src/query/CMakeLists.txt index f23ac7dd86932ba42dde7c2891865f7dff546a00..d2411d47b52bac65e86852ef06116066e9b18774 100644 --- a/src/query/CMakeLists.txt +++ b/src/query/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/tsdb/inc) diff --git a/src/query/inc/qExecutor.h b/src/query/inc/qExecutor.h index bc934647ec4827faea4814dfbfb46acb34033668..ed9494a443f32e5c837211fd0f7558c3779eb444 100644 --- a/src/query/inc/qExecutor.h +++ b/src/query/inc/qExecutor.h @@ -73,14 +73,14 @@ typedef struct SResultRowPool { typedef struct SResultRow { int32_t pageId; // pageId & rowId is the position of current result in disk-based output buffer - int32_t offset:29; // row index in buffer page + int32_t offset:29; // row index in buffer page bool startInterp; // the time window start timestamp has done the interpolation already. bool endInterp; // the time window end timestamp has done the interpolation already. bool closed; // this result status: closed or opened uint32_t numOfRows; // number of rows of current time window SResultRowCellInfo* pCellInfo; // For each result column, there is a resultInfo - STimeWindow win; - char* key; // start key of current result row + STimeWindow win; + char *key; // start key of current result row } SResultRow; typedef struct SGroupResInfo { @@ -105,7 +105,7 @@ typedef struct SResultRowInfo { int16_t type:8; // data type for hash key int32_t size:24; // number of result set int32_t capacity; // max capacity - int32_t curIndex; // current start active index + SResultRow* current; // current start active index int64_t prevSKey; // previous (not completed) sliding window start key } SResultRowInfo; @@ -118,6 +118,7 @@ typedef struct SColumnFilterElem { typedef struct SSingleColumnFilterInfo { void* pData; + void* pData2; //used for nchar column int32_t numOfFilters; SColumnInfo info; SColumnFilterElem* pFilters; @@ -276,6 +277,7 @@ typedef struct SQueryRuntimeEnv { bool enableGroupData; SDiskbasedResultBuf* pResultBuf; // query result buffer based on blocked-wised disk file SHashObj* pResultRowHashTable; // quick locate the window object for each result + SHashObj* pResultRowListSet; // used to check if current ResultRowInfo has ResultRow object or not char* keyBuf; // window key buffer SResultRowPool* pool; // window result object pool char** prevRow; diff --git a/src/query/src/qExecutor.c b/src/query/src/qExecutor.c index ef88f8bc0641b50438fd4e72e335ccb7828e124e..557d38d7f108606e387dca6c85fd1938e0a97905 100644 --- a/src/query/src/qExecutor.c +++ b/src/query/src/qExecutor.c @@ -410,10 +410,25 @@ static void prepareResultListBuffer(SResultRowInfo* pResultRowInfo, SQueryRuntim pResultRowInfo->capacity = (int32_t)newCapacity; } -static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRowInfo, char *pData, - int16_t bytes, bool masterscan, uint64_t uid) { +//static int32_t ascResultRowCompareFn(const void* p1, const void* p2) { +// SResultRow* pRow1 = *(SResultRow**)p1; +// SResultRow* pRow2 = *(SResultRow**)p2; +// +// if (pRow1 == pRow2) { +// return 0; +// } else { +// return pRow1->win.skey < pRow2->win.skey? -1:1; +// } +//} + +//static int32_t descResultRowCompareFn(const void* p1, const void* p2) { +// return -ascResultRowCompareFn(p1, p2); +//} + +static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRowInfo, int64_t tid, char *pData, + int16_t bytes, bool masterscan, uint64_t tableGroupId) { bool existed = false; - SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, pData, bytes, uid); + SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, pData, bytes, tableGroupId); SResultRow **p1 = (SResultRow **)taosHashGet(pRuntimeEnv->pResultRowHashTable, pRuntimeEnv->keyBuf, GET_RES_WINDOW_KEY_LEN(bytes)); @@ -425,16 +440,26 @@ static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SRes } if (p1 != NULL) { - for(int32_t i = pResultRowInfo->size - 1; i >= 0; --i) { - if (pResultRowInfo->pResult[i] == (*p1)) { - pResultRowInfo->curIndex = i; - existed = true; - break; - } + pResultRowInfo->current = (*p1); + + if (pResultRowInfo->size == 0) { + existed = false; + } else if (pResultRowInfo->size == 1) { + existed = (pResultRowInfo->pResult[0] == (*p1)); + } else { // check if current pResultRowInfo contains the existed pResultRow + SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, pData, bytes, tid); + void* ptr = taosHashGet(pRuntimeEnv->pResultRowListSet, pRuntimeEnv->keyBuf, GET_RES_WINDOW_KEY_LEN(bytes)); + existed = (ptr != NULL); +// __compar_fn_t fn = QUERY_IS_ASC_QUERY(pRuntimeEnv->pQueryAttr)? ascResultRowCompareFn:descResultRowCompareFn; +// void* ptr = taosbsearch(p1, pResultRowInfo->pResult, pResultRowInfo->size, POINTER_BYTES, fn, TD_EQ); +// if (ptr != NULL) { +// existed = true; +// } } } } else { - if (p1 != NULL) { // group by column query + // In case of group by column query, the required SResultRow object must be existed in the pResultRowInfo object. + if (p1 != NULL) { return *p1; } } @@ -456,8 +481,12 @@ static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SRes pResult = *p1; } - pResultRowInfo->pResult[pResultRowInfo->size] = pResult; - pResultRowInfo->curIndex = pResultRowInfo->size++; + pResultRowInfo->pResult[pResultRowInfo->size++] = pResult; + pResultRowInfo->current = pResult; + + int64_t dummyVal = 0; + SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, pData, bytes, tid); + taosHashPut(pRuntimeEnv->pResultRowListSet, pRuntimeEnv->keyBuf, GET_RES_WINDOW_KEY_LEN(bytes), &dummyVal, POINTER_BYTES); } // too many time window in query @@ -465,7 +494,7 @@ static SResultRow *doPrepareResultRowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SRes longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_TOO_MANY_TIMEWINDOW); } - return getResultRow(pResultRowInfo, pResultRowInfo->curIndex); + return pResultRowInfo->current; } static void getInitialStartTimeWindow(SQueryAttr* pQueryAttr, TSKEY ts, STimeWindow* w) { @@ -496,7 +525,7 @@ static void getInitialStartTimeWindow(SQueryAttr* pQueryAttr, TSKEY ts, STimeWin static STimeWindow getActiveTimeWindow(SResultRowInfo * pResultRowInfo, int64_t ts, SQueryAttr *pQueryAttr) { STimeWindow w = {0}; - if (pResultRowInfo->curIndex == -1) { // the first window, from the previous stored value + if (pResultRowInfo->current == NULL) { // the first window, from the previous stored value if (pResultRowInfo->prevSKey == TSKEY_INITIAL_VAL) { getInitialStartTimeWindow(pQueryAttr, ts, &w); pResultRowInfo->prevSKey = w.skey; @@ -510,8 +539,9 @@ static STimeWindow getActiveTimeWindow(SResultRowInfo * pResultRowInfo, int64_t w.ekey = w.skey + pQueryAttr->interval.interval - 1; } } else { - int32_t slot = curTimeWindowIndex(pResultRowInfo); - SResultRow* pWindowRes = getResultRow(pResultRowInfo, slot); +// int32_t slot = curTimeWindowIndex(pResultRowInfo); +// SResultRow* pWindowRes = getResultRow(pResultRowInfo, slot); + SResultRow* pWindowRes = pResultRowInfo->current; w = pWindowRes->win; } @@ -592,13 +622,13 @@ static int32_t addNewWindowResultBuf(SResultRow *pWindowRes, SDiskbasedResultBuf return 0; } -static int32_t setWindowOutputBufByKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRowInfo, STimeWindow *win, - bool masterscan, SResultRow **pResult, int64_t groupId, SQLFunctionCtx* pCtx, +static int32_t setWindowOutputBufByKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRowInfo, int64_t tid, STimeWindow *win, + bool masterscan, SResultRow **pResult, int64_t tableGroupId, SQLFunctionCtx* pCtx, int32_t numOfOutput, int32_t* rowCellInfoOffset) { assert(win->skey <= win->ekey); SDiskbasedResultBuf *pResultBuf = pRuntimeEnv->pResultBuf; - SResultRow *pResultRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, (char *)&win->skey, TSDB_KEYSIZE, masterscan, groupId); + SResultRow *pResultRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, tid, (char *)&win->skey, TSDB_KEYSIZE, masterscan, tableGroupId); if (pResultRow == NULL) { *pResult = NULL; return TSDB_CODE_SUCCESS; @@ -606,7 +636,7 @@ static int32_t setWindowOutputBufByKey(SQueryRuntimeEnv *pRuntimeEnv, SResultRow // not assign result buffer yet, add new result buffer if (pResultRow->pageId == -1) { - int32_t ret = addNewWindowResultBuf(pResultRow, pResultBuf, (int32_t) groupId, pRuntimeEnv->pQueryAttr->intermediateResultRowSize); + int32_t ret = addNewWindowResultBuf(pResultRow, pResultBuf, (int32_t) tableGroupId, pRuntimeEnv->pQueryAttr->intermediateResultRowSize); if (ret != TSDB_CODE_SUCCESS) { return -1; } @@ -697,7 +727,12 @@ static void doUpdateResultRowIndex(SResultRowInfo*pResultRowInfo, TSKEY lastKey, // all result rows are closed, set the last one to be the skey if (skey == TSKEY_INITIAL_VAL) { - pResultRowInfo->curIndex = pResultRowInfo->size - 1; + if (pResultRowInfo->size == 0) { +// assert(pResultRowInfo->current == NULL); + pResultRowInfo->current = NULL; + } else { + pResultRowInfo->current = pResultRowInfo->pResult[pResultRowInfo->size - 1]; + } } else { for (i = pResultRowInfo->size - 1; i >= 0; --i) { @@ -708,12 +743,12 @@ static void doUpdateResultRowIndex(SResultRowInfo*pResultRowInfo, TSKEY lastKey, } if (i == pResultRowInfo->size - 1) { - pResultRowInfo->curIndex = i; + pResultRowInfo->current = pResultRowInfo->pResult[i]; } else { - pResultRowInfo->curIndex = i + 1; // current not closed result object + pResultRowInfo->current = pResultRowInfo->pResult[i + 1]; // current not closed result object } - pResultRowInfo->prevSKey = pResultRowInfo->pResult[pResultRowInfo->curIndex]->win.skey; + pResultRowInfo->prevSKey = pResultRowInfo->current->win.skey; } } @@ -721,7 +756,7 @@ static void updateResultRowInfoActiveIndex(SResultRowInfo* pResultRowInfo, SQuer bool ascQuery = QUERY_IS_ASC_QUERY(pQueryAttr); if ((lastKey > pQueryAttr->window.ekey && ascQuery) || (lastKey < pQueryAttr->window.ekey && (!ascQuery))) { closeAllResultRows(pResultRowInfo); - pResultRowInfo->curIndex = pResultRowInfo->size - 1; + pResultRowInfo->current = pResultRowInfo->pResult[pResultRowInfo->size - 1]; } else { int32_t step = ascQuery ? 1 : -1; doUpdateResultRowIndex(pResultRowInfo, lastKey - step, ascQuery, pQueryAttr->timeWindowInterpo); @@ -1220,7 +1255,7 @@ static void doWindowBorderInterpolation(SOperatorInfo* pOperatorInfo, SSDataBloc } } -static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResultRowInfo, SSDataBlock* pSDataBlock, int32_t groupId) { +static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResultRowInfo, SSDataBlock* pSDataBlock, int32_t tableGroupId) { STableIntervalOperatorInfo* pInfo = (STableIntervalOperatorInfo*) pOperatorInfo->info; SQueryRuntimeEnv* pRuntimeEnv = pOperatorInfo->pRuntimeEnv; @@ -1230,7 +1265,8 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul int32_t step = GET_FORWARD_DIRECTION_FACTOR(pQueryAttr->order.order); bool ascQuery = QUERY_IS_ASC_QUERY(pQueryAttr); - int32_t prevIndex = curTimeWindowIndex(pResultRowInfo); + SResultRow* prevRow = pResultRowInfo->current; +// int32_t prevIndex = curTimeWindowIndex(pResultRowInfo); TSKEY* tsCols = NULL; if (pSDataBlock->pDataBlock != NULL) { @@ -1247,7 +1283,7 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul bool masterScan = IS_MASTER_SCAN(pRuntimeEnv); SResultRow* pResult = NULL; - int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, &win, masterScan, &pResult, groupId, pInfo->pCtx, + int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &win, masterScan, &pResult, tableGroupId, pInfo->pCtx, numOfOutput, pInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS || pResult == NULL) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -1259,18 +1295,24 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul getNumOfRowsInTimeWindow(pRuntimeEnv, &pSDataBlock->info, tsCols, startPos, ekey, binarySearchForKey, true); // prev time window not interpolation yet. - int32_t curIndex = curTimeWindowIndex(pResultRowInfo); - if (prevIndex != -1 && prevIndex < curIndex && pQueryAttr->timeWindowInterpo) { - for (int32_t j = prevIndex; j < curIndex; ++j) { // previous time window may be all closed already. +// int32_t curIndex = curTimeWindowIndex(pResultRowInfo); +// if (prevIndex != -1 && prevIndex < curIndex && pQueryAttr->timeWindowInterpo) { +// for (int32_t j = prevIndex; j < curIndex; ++j) { // previous time window may be all closed already. + if (prevRow != NULL && prevRow != pResultRowInfo->current && pQueryAttr->timeWindowInterpo) { + int32_t j = 0; + while(pResultRowInfo->pResult[j] != prevRow) { + j++; + } + + for(; pResultRowInfo->pResult[j] != pResultRowInfo->current; ++j) { SResultRow* pRes = pResultRowInfo->pResult[j]; if (pRes->closed) { - assert(resultRowInterpolated(pRes, RESULT_ROW_START_INTERP) && - resultRowInterpolated(pRes, RESULT_ROW_END_INTERP)); + assert(resultRowInterpolated(pRes, RESULT_ROW_START_INTERP) && resultRowInterpolated(pRes, RESULT_ROW_END_INTERP)); continue; } STimeWindow w = pRes->win; - ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, &w, masterScan, &pResult, groupId, pInfo->pCtx, + ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &w, masterScan, &pResult, tableGroupId, pInfo->pCtx, numOfOutput, pInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -1288,7 +1330,7 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul } // restore current time window - ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, &win, masterScan, &pResult, groupId, pInfo->pCtx, + ret = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &win, masterScan, &pResult, tableGroupId, pInfo->pCtx, numOfOutput, pInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -1308,7 +1350,7 @@ static void hashIntervalAgg(SOperatorInfo* pOperatorInfo, SResultRowInfo* pResul } // null data, failed to allocate more memory buffer - int32_t code = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, &nextWin, masterScan, &pResult, groupId, + int32_t code = setWindowOutputBufByKey(pRuntimeEnv, pResultRowInfo, pSDataBlock->info.tid, &nextWin, masterScan, &pResult, tableGroupId, pInfo->pCtx, numOfOutput, pInfo->rowCellInfoOffset); if (code != TSDB_CODE_SUCCESS || pResult == NULL) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -1449,7 +1491,7 @@ static void doSessionWindowAggImpl(SOperatorInfo* pOperator, SSWindowOperatorInf SResultRow* pResult = NULL; pInfo->curWindow.ekey = pInfo->curWindow.skey; - int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, &pInfo->curWindow, masterScan, + int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, pSDataBlock->info.tid, &pInfo->curWindow, masterScan, &pResult, item->groupIndex, pBInfo->pCtx, pOperator->numOfOutput, pBInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { // null data, too many state code @@ -1470,7 +1512,7 @@ static void doSessionWindowAggImpl(SOperatorInfo* pOperator, SSWindowOperatorInf SResultRow* pResult = NULL; pInfo->curWindow.ekey = pInfo->curWindow.skey; - int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, &pInfo->curWindow, masterScan, + int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, pSDataBlock->info.tid, &pInfo->curWindow, masterScan, &pResult, item->groupIndex, pBInfo->pCtx, pOperator->numOfOutput, pBInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { // null data, too many state code @@ -1513,7 +1555,8 @@ static int32_t setGroupResultOutputBuf(SQueryRuntimeEnv *pRuntimeEnv, SOptrBasic len = varDataLen(pData); } - SResultRow *pResultRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, d, len, true, groupIndex); + int64_t tid = 0; + SResultRow *pResultRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, tid, d, len, true, groupIndex); assert (pResultRow != NULL); setResultRowKey(pResultRow, pData, type); @@ -1777,6 +1820,7 @@ static int32_t setupQueryRuntimeEnv(SQueryRuntimeEnv *pRuntimeEnv, int32_t numOf pRuntimeEnv->pQueryAttr = pQueryAttr; pRuntimeEnv->pResultRowHashTable = taosHashInit(numOfTables, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + pRuntimeEnv->pResultRowListSet = taosHashInit(numOfTables, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), false, HASH_NO_LOCK); pRuntimeEnv->keyBuf = malloc(pQueryAttr->maxTableColumnWidth + sizeof(int64_t)); pRuntimeEnv->pool = initResultRowPool(getResultRowSize(pRuntimeEnv)); @@ -2026,6 +2070,9 @@ static void teardownQueryRuntimeEnv(SQueryRuntimeEnv *pRuntimeEnv) { taosHashCleanup(pRuntimeEnv->pTableRetrieveTsMap); pRuntimeEnv->pTableRetrieveTsMap = NULL; + taosHashCleanup(pRuntimeEnv->pResultRowListSet); + pRuntimeEnv->pResultRowListSet = NULL; + destroyOperatorInfo(pRuntimeEnv->proot); pRuntimeEnv->pool = destroyResultRowPool(pRuntimeEnv->pool); @@ -2756,7 +2803,7 @@ int32_t loadDataBlockOnDemand(SQueryRuntimeEnv* pRuntimeEnv, STableScanInfo* pTa TSKEY k = ascQuery? pBlock->info.window.skey : pBlock->info.window.ekey; STimeWindow win = getActiveTimeWindow(pTableScanInfo->pResultRowInfo, k, pQueryAttr); - if (setWindowOutputBufByKey(pRuntimeEnv, pTableScanInfo->pResultRowInfo, &win, masterScan, &pResult, groupId, + if (setWindowOutputBufByKey(pRuntimeEnv, pTableScanInfo->pResultRowInfo, pBlock->info.tid, &win, masterScan, &pResult, groupId, pTableScanInfo->pCtx, pTableScanInfo->numOfOutput, pTableScanInfo->rowCellInfoOffset) != TSDB_CODE_SUCCESS) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -2802,7 +2849,7 @@ int32_t loadDataBlockOnDemand(SQueryRuntimeEnv* pRuntimeEnv, STableScanInfo* pTa TSKEY k = ascQuery? pBlock->info.window.skey : pBlock->info.window.ekey; STimeWindow win = getActiveTimeWindow(pTableScanInfo->pResultRowInfo, k, pQueryAttr); - if (setWindowOutputBufByKey(pRuntimeEnv, pTableScanInfo->pResultRowInfo, &win, masterScan, &pResult, groupId, + if (setWindowOutputBufByKey(pRuntimeEnv, pTableScanInfo->pResultRowInfo, pBlock->info.tid, &win, masterScan, &pResult, groupId, pTableScanInfo->pCtx, pTableScanInfo->numOfOutput, pTableScanInfo->rowCellInfoOffset) != TSDB_CODE_SUCCESS) { longjmp(pRuntimeEnv->env, TSDB_CODE_QRY_OUT_OF_MEMORY); @@ -3146,7 +3193,7 @@ void copyToSDataBlock(SQueryRuntimeEnv* pRuntimeEnv, int32_t threshold, SSDataBl } -static void updateTableQueryInfoForReverseScan(SQueryAttr *pQueryAttr, STableQueryInfo *pTableQueryInfo) { +static void updateTableQueryInfoForReverseScan(STableQueryInfo *pTableQueryInfo) { if (pTableQueryInfo == NULL) { return; } @@ -3158,7 +3205,12 @@ static void updateTableQueryInfoForReverseScan(SQueryAttr *pQueryAttr, STableQue pTableQueryInfo->cur.vgroupIndex = -1; // set the index to be the end slot of result rows array - pTableQueryInfo->resInfo.curIndex = pTableQueryInfo->resInfo.size - 1; + SResultRowInfo* pResRowInfo = &pTableQueryInfo->resInfo; + if (pResRowInfo->size > 0) { + pResRowInfo->current = pResRowInfo->pResult[pResRowInfo->size - 1]; + } else { + pResRowInfo->current = NULL; + } } static void setupQueryRangeForReverseScan(SQueryRuntimeEnv* pRuntimeEnv) { @@ -3172,7 +3224,7 @@ static void setupQueryRangeForReverseScan(SQueryRuntimeEnv* pRuntimeEnv) { size_t t = taosArrayGetSize(group); for (int32_t j = 0; j < t; ++j) { STableQueryInfo *pCheckInfo = taosArrayGetP(group, j); - updateTableQueryInfoForReverseScan(pQueryAttr, pCheckInfo); + updateTableQueryInfoForReverseScan(pCheckInfo); // update the last key in tableKeyInfo list, the tableKeyInfo is used to build the tsdbQueryHandle and decide // the start check timestamp of tsdbQueryHandle @@ -3211,8 +3263,8 @@ void setDefaultOutputBuf(SQueryRuntimeEnv *pRuntimeEnv, SOptrBasicInfo *pInfo, i int32_t* rowCellInfoOffset = pInfo->rowCellInfoOffset; SResultRowInfo* pResultRowInfo = &pInfo->resultRowInfo; - int32_t tid = 0; - SResultRow* pRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, (char *)&tid, sizeof(tid), true, uid); + int64_t tid = 0; + SResultRow* pRow = doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, tid, (char *)&tid, sizeof(tid), true, uid); for (int32_t i = 0; i < pDataBlock->info.numOfCols; ++i) { SColumnInfoData* pData = taosArrayGet(pDataBlock->pDataBlock, i); @@ -3443,10 +3495,13 @@ void setResultRowOutputBufInitCtx(SQueryRuntimeEnv *pRuntimeEnv, SResultRow *pRe } void doSetTableGroupOutputBuf(SQueryRuntimeEnv* pRuntimeEnv, SResultRowInfo* pResultRowInfo, SQLFunctionCtx* pCtx, - int32_t* rowCellInfoOffset, int32_t numOfOutput, int32_t groupIndex) { + int32_t* rowCellInfoOffset, int32_t numOfOutput, int32_t tableGroupId) { + // for simple group by query without interval, all the tables belong to one group result. int64_t uid = 0; + int64_t tid = 0; + SResultRow* pResultRow = - doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, (char*)&groupIndex, sizeof(groupIndex), true, uid); + doPrepareResultRowFromKey(pRuntimeEnv, pResultRowInfo, tid, (char*)&tableGroupId, sizeof(tableGroupId), true, uid); assert (pResultRow != NULL); /* @@ -3454,7 +3509,7 @@ void doSetTableGroupOutputBuf(SQueryRuntimeEnv* pRuntimeEnv, SResultRowInfo* pRe * all group belong to one result set, and each group result has different group id so set the id to be one */ if (pResultRow->pageId == -1) { - int32_t ret = addNewWindowResultBuf(pResultRow, pRuntimeEnv->pResultBuf, groupIndex, pRuntimeEnv->pQueryAttr->resultRowSize); + int32_t ret = addNewWindowResultBuf(pResultRow, pRuntimeEnv->pResultBuf, tableGroupId, pRuntimeEnv->pQueryAttr->resultRowSize); if (ret != TSDB_CODE_SUCCESS) { return; } @@ -3463,20 +3518,20 @@ void doSetTableGroupOutputBuf(SQueryRuntimeEnv* pRuntimeEnv, SResultRowInfo* pRe setResultRowOutputBufInitCtx(pRuntimeEnv, pResultRow, pCtx, numOfOutput, rowCellInfoOffset); } -void setExecutionContext(SQueryRuntimeEnv* pRuntimeEnv, SOptrBasicInfo* pInfo, int32_t numOfOutput, int32_t groupIndex, +void setExecutionContext(SQueryRuntimeEnv* pRuntimeEnv, SOptrBasicInfo* pInfo, int32_t numOfOutput, int32_t tableGroupId, TSKEY nextKey) { STableQueryInfo *pTableQueryInfo = pRuntimeEnv->current; // lastKey needs to be updated pTableQueryInfo->lastKey = nextKey; - if (pRuntimeEnv->prevGroupId != INT32_MIN && pRuntimeEnv->prevGroupId == groupIndex) { + if (pRuntimeEnv->prevGroupId != INT32_MIN && pRuntimeEnv->prevGroupId == tableGroupId) { return; } - doSetTableGroupOutputBuf(pRuntimeEnv, &pInfo->resultRowInfo, pInfo->pCtx, pInfo->rowCellInfoOffset, numOfOutput, groupIndex); + doSetTableGroupOutputBuf(pRuntimeEnv, &pInfo->resultRowInfo, pInfo->pCtx, pInfo->rowCellInfoOffset, numOfOutput, tableGroupId); // record the current active group id - pRuntimeEnv->prevGroupId = groupIndex; + pRuntimeEnv->prevGroupId = tableGroupId; } void setResultOutputBuf(SQueryRuntimeEnv *pRuntimeEnv, SResultRow *pResult, SQLFunctionCtx* pCtx, @@ -4574,7 +4629,7 @@ static SSDataBlock* doTableScan(void* param, bool *newgroup) { } if (pResultRowInfo->size > 0) { - pResultRowInfo->curIndex = 0; + pResultRowInfo->current = pResultRowInfo->pResult[0]; pResultRowInfo->prevSKey = pResultRowInfo->pResult[0]->win.skey; } @@ -4600,8 +4655,8 @@ static SSDataBlock* doTableScan(void* param, bool *newgroup) { pTableScanInfo->order = cond.order; if (pResultRowInfo->size > 0) { - pResultRowInfo->curIndex = pResultRowInfo->size-1; - pResultRowInfo->prevSKey = pResultRowInfo->pResult[pResultRowInfo->size-1]->win.skey; + pResultRowInfo->current = pResultRowInfo->pResult[pResultRowInfo->size - 1]; + pResultRowInfo->prevSKey = pResultRowInfo->current->win.skey; } p = doTableScanImpl(pOperator, newgroup); @@ -5461,7 +5516,7 @@ static void doStateWindowAggImpl(SOperatorInfo* pOperator, SStateWindowOperatorI } else { SResultRow* pResult = NULL; pInfo->curWindow.ekey = pInfo->curWindow.skey; - int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, &pInfo->curWindow, masterScan, + int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, pSDataBlock->info.tid, &pInfo->curWindow, masterScan, &pResult, item->groupIndex, pBInfo->pCtx, pOperator->numOfOutput, pBInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { // null data, too many state code @@ -5481,7 +5536,7 @@ static void doStateWindowAggImpl(SOperatorInfo* pOperator, SStateWindowOperatorI SResultRow* pResult = NULL; pInfo->curWindow.ekey = pInfo->curWindow.skey; - int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, &pInfo->curWindow, masterScan, + int32_t ret = setWindowOutputBufByKey(pRuntimeEnv, &pBInfo->resultRowInfo, pSDataBlock->info.tid, &pInfo->curWindow, masterScan, &pResult, item->groupIndex, pBInfo->pCtx, pOperator->numOfOutput, pBInfo->rowCellInfoOffset); if (ret != TSDB_CODE_SUCCESS) { // null data, too many state code diff --git a/src/query/src/qSqlParser.c b/src/query/src/qSqlParser.c index fd957fdcce133cd636617ad0bf924ad3ea26d775..4d23a9685c36853250aa21e2378cb01584798876 100644 --- a/src/query/src/qSqlParser.c +++ b/src/query/src/qSqlParser.c @@ -938,7 +938,7 @@ void SqlInfoDestroy(SSqlInfo *pInfo) { taosArrayDestroy(pInfo->pMiscInfo->a); } - if (pInfo->pMiscInfo != NULL && pInfo->type == TSDB_SQL_CREATE_DB) { + if (pInfo->pMiscInfo != NULL && (pInfo->type == TSDB_SQL_CREATE_DB || pInfo->type == TSDB_SQL_ALTER_DB)) { taosArrayDestroyEx(pInfo->pMiscInfo->dbOpt.keep, freeVariant); } diff --git a/src/query/src/qUtil.c b/src/query/src/qUtil.c index 7b08450d3b9461c5ccfc316485f12c5d4ea84111..7c5a95312e0e5378a5ad47788bf6857143d1190e 100644 --- a/src/query/src/qUtil.c +++ b/src/query/src/qUtil.c @@ -45,7 +45,7 @@ int32_t initResultRowInfo(SResultRowInfo *pResultRowInfo, int32_t size, int16_t pResultRowInfo->type = type; pResultRowInfo->size = 0; pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL; - pResultRowInfo->curIndex = -1; + pResultRowInfo->current = NULL; pResultRowInfo->capacity = size; pResultRowInfo->pResult = calloc(pResultRowInfo->capacity, POINTER_BYTES); @@ -90,9 +90,9 @@ void resetResultRowInfo(SQueryRuntimeEnv *pRuntimeEnv, SResultRowInfo *pResultRo SET_RES_WINDOW_KEY(pRuntimeEnv->keyBuf, &groupIndex, sizeof(groupIndex), uid); taosHashRemove(pRuntimeEnv->pResultRowHashTable, (const char *)pRuntimeEnv->keyBuf, GET_RES_WINDOW_KEY_LEN(sizeof(groupIndex))); } - - pResultRowInfo->curIndex = -1; - pResultRowInfo->size = 0; + + pResultRowInfo->size = 0; + pResultRowInfo->current = NULL; pResultRowInfo->prevSKey = TSKEY_INITIAL_VAL; } diff --git a/src/query/tests/CMakeLists.txt b/src/query/tests/CMakeLists.txt index 0cfe2ff1659027561799145e8ef8ed514988416c..cc4b607bb44f8d85ab61a5c0b035e5f1fc976161 100644 --- a/src/query/tests/CMakeLists.txt +++ b/src/query/tests/CMakeLists.txt @@ -1,10 +1,11 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) FIND_PATH(HEADER_GTEST_INCLUDE_DIR gtest.h /usr/include/gtest /usr/local/include/gtest) -FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib) +FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib /usr/lib64) +FIND_LIBRARY(LIB_GTEST_SHARED_DIR libgtest.so /usr/lib/ /usr/local/lib /usr/lib64) -IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) +IF (HEADER_GTEST_INCLUDE_DIR AND (LIB_GTEST_STATIC_DIR OR LIB_GTEST_SHARED_DIR)) MESSAGE(STATUS "gTest library found, build unit test") # GoogleTest requires at least C++11 diff --git a/src/rpc/CMakeLists.txt b/src/rpc/CMakeLists.txt index f94b4aeb6d21277b6b845587cd35a2c98e0bc0b0..14b77356baa4b87a201e6ff10e785db99cbd47a6 100644 --- a/src/rpc/CMakeLists.txt +++ b/src/rpc/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/rpc/test/CMakeLists.txt b/src/rpc/test/CMakeLists.txt index c10cea6c9dd8c53ab8608c8a736795f2318059d8..a32ac9943d08fe00427ec58520809b4f04657315 100644 --- a/src/rpc/test/CMakeLists.txt +++ b/src/rpc/test/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/rpc/inc) diff --git a/src/sync/CMakeLists.txt b/src/sync/CMakeLists.txt index 521f51ceb71245f96855d71e649e697eb7591df4..2cd84c7c3fff63a702d99d8b2dc45303f17528ef 100644 --- a/src/sync/CMakeLists.txt +++ b/src/sync/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/sync/test/CMakeLists.txt b/src/sync/test/CMakeLists.txt index f2b05ab2263c0d80bc870981f86933922de639e4..a5ab8191371ce97ecbaf9ef4dc8dbace6a6c4802 100644 --- a/src/sync/test/CMakeLists.txt +++ b/src/sync/test/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_LINUX) diff --git a/src/tfs/CMakeLists.txt b/src/tfs/CMakeLists.txt index b435c84366fb47bd137b1c13bc98eab625bbcc66..7f956f07a21ed52363fc2072b01ad0853621712b 100644 --- a/src/tfs/CMakeLists.txt +++ b/src/tfs/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/tsdb/CMakeLists.txt b/src/tsdb/CMakeLists.txt index 8080a61a6c9b10b78d965a953765250e0a157fb6..c5b77df5a25f9f0b1e9294228520f171b9befddd 100644 --- a/src/tsdb/CMakeLists.txt +++ b/src/tsdb/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(inc) diff --git a/src/util/CMakeLists.txt b/src/util/CMakeLists.txt index e8a1d61ee52c6461e88f6cdc16069b2b6b523ab5..85b15c0a4fcae0885ebd6f9a4739924146568345 100644 --- a/src/util/CMakeLists.txt +++ b/src/util/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/rpc/inc) diff --git a/src/util/src/thashutil.c b/src/util/src/thashutil.c index ffe167977ba3fd08ac75f0b142c4fcb7db1d14a2..4a0208a3d0bf22f21b5f6a05513f435664e746af 100644 --- a/src/util/src/thashutil.c +++ b/src/util/src/thashutil.c @@ -126,15 +126,38 @@ _hash_fn_t taosGetDefaultHashFunction(int32_t type) { _hash_fn_t fn = NULL; switch(type) { case TSDB_DATA_TYPE_TIMESTAMP: - case TSDB_DATA_TYPE_BIGINT: fn = taosIntHash_64;break; - case TSDB_DATA_TYPE_BINARY: fn = MurmurHash3_32;break; - case TSDB_DATA_TYPE_NCHAR: fn = MurmurHash3_32;break; - case TSDB_DATA_TYPE_INT: fn = taosIntHash_32; break; - case TSDB_DATA_TYPE_SMALLINT: fn = taosIntHash_16; break; - case TSDB_DATA_TYPE_TINYINT: fn = taosIntHash_8; break; - case TSDB_DATA_TYPE_FLOAT: fn = taosFloatHash; break; - case TSDB_DATA_TYPE_DOUBLE: fn = taosDoubleHash; break; - default: fn = taosIntHash_32;break; + case TSDB_DATA_TYPE_UBIGINT: + case TSDB_DATA_TYPE_BIGINT: + fn = taosIntHash_64; + break; + case TSDB_DATA_TYPE_BINARY: + fn = MurmurHash3_32; + break; + case TSDB_DATA_TYPE_NCHAR: + fn = MurmurHash3_32; + break; + case TSDB_DATA_TYPE_UINT: + case TSDB_DATA_TYPE_INT: + fn = taosIntHash_32; + break; + case TSDB_DATA_TYPE_SMALLINT: + case TSDB_DATA_TYPE_USMALLINT: + fn = taosIntHash_16; + break; + case TSDB_DATA_TYPE_BOOL: + case TSDB_DATA_TYPE_UTINYINT: + case TSDB_DATA_TYPE_TINYINT: + fn = taosIntHash_8; + break; + case TSDB_DATA_TYPE_FLOAT: + fn = taosFloatHash; + break; + case TSDB_DATA_TYPE_DOUBLE: + fn = taosDoubleHash; + break; + default: + fn = taosIntHash_32; + break; } return fn; diff --git a/src/util/tests/CMakeLists.txt b/src/util/tests/CMakeLists.txt index ee99348cd9db86923f2ba06da9b3452d2dcc0347..a60c6cff2809dcc2a55f5cce3e593ef06045a975 100644 --- a/src/util/tests/CMakeLists.txt +++ b/src/util/tests/CMakeLists.txt @@ -1,15 +1,16 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) FIND_PATH(HEADER_GTEST_INCLUDE_DIR gtest.h /usr/include/gtest /usr/local/include/gtest) -FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib) +FIND_LIBRARY(LIB_GTEST_STATIC_DIR libgtest.a /usr/lib/ /usr/local/lib /usr/lib64) +FIND_LIBRARY(LIB_GTEST_SHARED_DIR libgtest.so /usr/lib/ /usr/local/lib /usr/lib64) -IF (HEADER_GTEST_INCLUDE_DIR AND LIB_GTEST_STATIC_DIR) +IF (HEADER_GTEST_INCLUDE_DIR AND (LIB_GTEST_STATIC_DIR OR LIB_GTEST_SHARED_DIR)) MESSAGE(STATUS "gTest library found, build unit test") INCLUDE_DIRECTORIES(${HEADER_GTEST_INCLUDE_DIR}) AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR} SOURCE_LIST) - + LIST(REMOVE_ITEM SOURCE_LIST ${CMAKE_CURRENT_SOURCE_DIR}/trefTest.c) ADD_EXECUTABLE(utilTest ${SOURCE_LIST}) TARGET_LINK_LIBRARIES(utilTest tutil common os gtest pthread gcov) diff --git a/src/vnode/CMakeLists.txt b/src/vnode/CMakeLists.txt index 3fefbea05ba763dfa856dd52c195d36ce70ccd91..6238f43d32ad2ed973f522aca3bb5dfca9101435 100644 --- a/src/vnode/CMakeLists.txt +++ b/src/vnode/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/cJson/inc) diff --git a/src/vnode/src/vnodeSync.c b/src/vnode/src/vnodeSync.c index 4197428fec6b5d24e7791b2a5f8cb7df229cbca5..2bdfd2ead3a31d8c2cba94d93239de965d2e07dc 100644 --- a/src/vnode/src/vnodeSync.c +++ b/src/vnode/src/vnodeSync.c @@ -95,7 +95,7 @@ void vnodeCtrlFlow(int32_t vgId, int32_t level) { } void vnodeStartSyncFile(int32_t vgId) { - SVnodeObj *pVnode = vnodeAcquire(vgId); + SVnodeObj *pVnode = vnodeAcquireNotClose(vgId); if (pVnode == NULL) { vError("vgId:%d, vnode not found while start filesync", vgId); return; @@ -155,7 +155,7 @@ int32_t vnodeWriteToCache(int32_t vgId, void *wparam, int32_t qtype, void *rpara } int32_t vnodeGetVersion(int32_t vgId, uint64_t *fver, uint64_t *wver) { - SVnodeObj *pVnode = vnodeAcquire(vgId); + SVnodeObj *pVnode = vnodeAcquireNotClose(vgId); if (pVnode == NULL) { vError("vgId:%d, vnode not found while write to cache", vgId); return -1; diff --git a/src/wal/CMakeLists.txt b/src/wal/CMakeLists.txt index a89024dab5060b1f18174f769e0d70c00ad00faf..0d9be42bd5d54ddd1fdd372511e4f98fb7d6355b 100644 --- a/src/wal/CMakeLists.txt +++ b/src/wal/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) ADD_DEFINITIONS(-DWAL_CHECKSUM_WHOLE) diff --git a/src/wal/test/CMakeLists.txt b/src/wal/test/CMakeLists.txt index 071ff6fdba084b7bd9a4f6f01c43eac06c774b29..c5bc4198f10d48caf2ea133c475ea99c8e7a2fd2 100644 --- a/src/wal/test/CMakeLists.txt +++ b/src/wal/test/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_LINUX) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 4e7e9a87ea6810c362bd676cd9152f61bc08e29d..e21905af3b88cd6628c5b83471ff70013dc996fc 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -3,7 +3,7 @@ # generate release version: # mkdir release; cd release; cmake -DCMAKE_BUILD_TYPE=Release .. -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) SET(CMAKE_C_STANDARD 11) diff --git a/tests/comparisonTest/tdengine/CMakeLists.txt b/tests/comparisonTest/tdengine/CMakeLists.txt index 36ed3efe191c9d949d6234bd61ffbbe28c3a33d2..0f389c4c0cefd10fe829d86342bc391cffe37901 100644 --- a/tests/comparisonTest/tdengine/CMakeLists.txt +++ b/tests/comparisonTest/tdengine/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) IF (TD_LINUX) diff --git a/tests/pytest/fulltest.sh b/tests/pytest/fulltest.sh index 2cbc3747f6b15862891ea34c816654ac92e9f390..c9a91e16880aae378c5b4be4502c164e069fa397 100755 --- a/tests/pytest/fulltest.sh +++ b/tests/pytest/fulltest.sh @@ -345,12 +345,14 @@ python3 ./test.py -f tag_lite/unsignedTinyint.py python3 ./test.py -f functions/function_percentile2.py python3 ./test.py -f insert/boundary2.py +python3 ./test.py -f insert/insert_locking.py python3 ./test.py -f alter/alter_debugFlag.py python3 ./test.py -f query/queryBetweenAnd.py python3 ./test.py -f tag_lite/alter_tag.py python3 test.py -f tools/taosdemoAllTest/taosdemoTestInsertWithJson.py python3 test.py -f tools/taosdemoAllTest/taosdemoTestQueryWithJson.py +python3 test.py -f tools/taosdemoAllTest/TD-4985/query-limit-offset.py python3 ./test.py -f tag_lite/drop_auto_create.py python3 test.py -f insert/insert_before_use_db.py python3 test.py -f alter/alter_keep.py diff --git a/tests/pytest/insert/insert_locking.py b/tests/pytest/insert/insert_locking.py new file mode 100644 index 0000000000000000000000000000000000000000..0d780a7132fbc83b99e4f5b54fe17101ff4f35f9 --- /dev/null +++ b/tests/pytest/insert/insert_locking.py @@ -0,0 +1,178 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import taos +from util.log import tdLog +from util.cases import tdCases +from util.sql import tdSql +import random + + +class TDTestCase: + def init(self, conn, logSql): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor(), logSql) + + + + def run(self): + tdSql.prepare() + # test case for https://jira.taosdata.com:18080/browse/TD-5021 + + tdLog.info("\n\n----------step1 : drop db and create db----------\n") + tdSql.execute('''drop database if exists db ;''') + tdSql.execute('''create database db ;''') + sql = '''show databases;''' + tdSql.query(sql) + tdSql.checkRows(1) + + tdLog.info("\n\n----------step2 : create stable----------\n") + tdSql.execute('''create stable + db.stable_1 (ts timestamp, payload binary(256)) + tags(t1 binary(16),t2 int);''') + sql = '''show db.stables;''' + tdSql.query(sql) + tdSql.checkRows(1) + + tdLog.info("\n\n----------step3 : create table and insert----------\n") + sql = '''insert into db.table1 using db.stable_1 (t1 , t2) tags ("table_1" , 111) ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" unexpected token") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: syntax error near ', ;' (unexpected token)") + + sql = '''insert into db.table1(ts , payload) using db.stable_1 (t1 , t2) tags ("table_1" , 111) ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" bind columns again") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: syntax error near ', ;' (bind columns again)") + + sql = '''insert into db.table1 using db.stable_1 (t1 , t2) tags ("table_1",111) (ts , payload) ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" keyword VALUES or FILE required ") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: invalid SQL: (keyword VALUES or FILE required)") + + tdSql.execute('''insert into db.table1 using db.stable_1 (t1 , t2) + tags ("table_1" , 111) values ( now , 1) ''') + sql = '''select * from db.stable_1;''' + tdSql.query(sql) + tdSql.checkRows(1) + tdSql.checkData(0,1,1) + tdSql.checkData(0,2,'table_1') + + tdLog.info("\n\n----------step4 : create table and insert again----------\n") + sql = '''insert into db.table2 using db.stable_1 (t1) tags ("table_2") ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" unexpected token") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: syntax error near ', ;' (unexpected token)") + + tdSql.execute('''insert into db.table2 using db.stable_1 (t1) + tags ("table_2") values ( now , 2) ''') + sql = '''select * from db.stable_1;''' + tdSql.query(sql) + tdSql.checkRows(2) + tdSql.checkData(1,1,2) + tdSql.checkData(1,2,'table_2') + + tdLog.info("\n\n----------step5 : create table and insert without db----------\n") + tdSql.execute('''use db''') + sql = '''insert into table3 using stable_1 (t1) tags ("table_3") ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" unexpected token") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: syntax error near ', ;' (unexpected token)") + + tdSql.execute('''insert into table3 using stable_1 (t1 , t2) + tags ("table_3" , 333) values ( now , 3) ''') + sql = '''select * from stable_1;''' + tdSql.query(sql) + tdSql.checkRows(3) + tdSql.checkData(2,1,3) + tdSql.checkData(2,2,'table_3') + + tdLog.info("\n\n----------step6 : create tables in one sql ----------\n") + sql = '''insert into table4 using stable_1 (t1) tags ("table_4") values (now, 4) + table5 using stable_1 (t1) tags ("table_5") ( values (now, ;''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" unexpected token") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("DB error: syntax error near ', ;' (unexpected token)") + + tdSql.execute('''insert into table4 using stable_1 (t1) tags ("table_4") values (now, 4) + table5 using stable_1 (t1) tags ("table_5") values (now, 5) ''') + sql = '''select * from stable_1;''' + tdSql.query(sql) + tdSql.checkRows(5) + tdSql.checkData(3,1,4) + tdSql.checkData(3,2,'table_4') + tdSql.checkData(4,1,5) + tdSql.checkData(4,2,'table_5') + + + sql = '''insert into table6 using stable_1 (t1) tags ("table_6") ( values (now, + table7 using stable_1 (t1) tags ("table_7") values (now, 7);''' + tdLog.info(sql) + tdSql.error(sql) + try: + tdSql.execute(sql) + tdLog.exit(" invalid SQL") + except Exception as e: + tdLog.info(repr(e)) + tdLog.info("invalid SQL") + + tdSql.execute('''insert into table6 using stable_1 (t1 , t2) tags ("table_6" , 666) values (now, 6) + table7 using stable_1 (t1) tags ("table_7") values (now, 7) ''') + sql = '''select * from stable_1;''' + tdSql.query(sql) + tdSql.checkRows(7) + tdSql.checkData(5,1,6) + tdSql.checkData(5,2,'table_6') + tdSql.checkData(6,1,7) + tdSql.checkData(6,2,'table_7') + + + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) \ No newline at end of file diff --git a/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.csv b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.csv new file mode 100644 index 0000000000000000000000000000000000000000..d4138798e350bb1d0aff25819c01f87604b763bc --- /dev/null +++ b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.csv @@ -0,0 +1,10 @@ +0,0,'TAOSdata-0' +1,1,'TAOSdata-1' +2,22,'TAOSdata-2' +3,333,'TAOSdata-3' +4,4444,'TAOSdata-4' +5,55555,'TAOSdata-5' +6,666666,'TAOSdata-6' +7,7777777,'TAOSdata-7' +8,88888888,'TAOSdata-8' +9,999999999,'TAOSdata-9' \ No newline at end of file diff --git a/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.json b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.json new file mode 100644 index 0000000000000000000000000000000000000000..265f42036bc5a4e13dc0766b66fccf32924d7185 --- /dev/null +++ b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.json @@ -0,0 +1,62 @@ +{ + "filetype": "insert", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "thread_count": 10, + "thread_count_create_tbl": 10, + "result_file": "./insert_res.txt", + "confirm_parameter_prompt": "no", + "insert_interval": 0, + "interlace_rows": 10, + "num_of_records_per_req": 1, + "max_sql_len": 1024000, + "databases": [{ + "dbinfo": { + "name": "db", + "drop": "yes", + "replica": 1, + "days": 10, + "cache": 50, + "blocks": 8, + "precision": "ms", + "keep": 365, + "minRows": 100, + "maxRows": 4096, + "comp":2, + "walLevel":1, + "cachelast":0, + "quorum":1, + "fsync":3000, + "update": 0 + }, + "super_tables": [{ + "name": "stb0", + "child_table_exists":"no", + "childtable_count": 10000, + "childtable_prefix": "stb00_", + "auto_create_table": "no", + "batch_create_tbl_num": 1, + "data_source": "sample", + "insert_mode": "taosc", + "insert_rows": 10, + "childtable_limit": 0, + "childtable_offset":0, + "multi_thread_write_one_tbl": "no", + "interlace_rows": 0, + "insert_interval":0, + "max_sql_len": 1024000, + "disorder_ratio": 0, + "disorder_range": 1000, + "timestamp_step": 1, + "start_timestamp": "2020-10-01 00:00:00.000", + "sample_format": "csv", + "sample_file": "./tools/taosdemoAllTest/TD-4985/query-limit-offset.csv", + "tags_file": "./tools/taosdemoAllTest/TD-4985/query-limit-offset.csv", + "columns": [{"type": "INT","count":2}, {"type": "BINARY", "len": 16, "count":1}], + "tags": [{"type": "INT", "count":2}, {"type": "BINARY", "len": 16, "count":1}] + }] + }] +} diff --git a/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.py b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.py new file mode 100644 index 0000000000000000000000000000000000000000..081057f1802bd18d8aab7e7639589e8759ed44ed --- /dev/null +++ b/tests/pytest/tools/taosdemoAllTest/TD-4985/query-limit-offset.py @@ -0,0 +1,191 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import os +from util.log import * +from util.cases import * +from util.sql import * +from util.dnodes import * + + +class TDTestCase: + def init(self, conn, logSql): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor(), logSql) + + def getBuildPath(self): + selfPath = os.path.dirname(os.path.realpath(__file__)) + + if ("community" in selfPath): + projPath = selfPath[:selfPath.find("community")] + else: + projPath = selfPath[:selfPath.find("tests")] + + for root, dirs, files in os.walk(projPath): + if ("taosd" in files): + rootRealPath = os.path.dirname(os.path.realpath(root)) + if ("packaging" not in rootRealPath): + buildPath = root[:len(root)-len("/build/bin")] + break + return buildPath + + def run(self): + buildPath = self.getBuildPath() + if (buildPath == ""): + tdLog.exit("taosd not found!") + else: + tdLog.info("taosd found in %s" % buildPath) + binPath = buildPath+ "/build/bin/" + + # insert: create one or mutiple tables per sql and insert multiple rows per sql + # test case for https://jira.taosdata.com:18080/browse/TD-4985 + os.system("%staosdemo -f tools/taosdemoAllTest/TD-4985/query-limit-offset.json -y " % binPath) + tdSql.execute("use db") + tdSql.query("select count (tbname) from stb0") + tdSql.checkData(0, 0, 10000) + + for i in range(1000): + tdSql.execute('''insert into stb00_9999 values(%d, %d, %d,'test99.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_8888 values(%d, %d, %d,'test98.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_7777 values(%d, %d, %d,'test97.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_6666 values(%d, %d, %d,'test96.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_5555 values(%d, %d, %d,'test95.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_4444 values(%d, %d, %d,'test94.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_3333 values(%d, %d, %d,'test93.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_2222 values(%d, %d, %d,'test92.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_1111 values(%d, %d, %d,'test91.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.execute('''insert into stb00_100 values(%d, %d, %d,'test90.%s')''' + % (1600000000000 + i, i, -10000+i, i)) + tdSql.query("select * from stb0 where col2 like 'test99%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_9999' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_9999' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test98%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_8888' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_8888' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test97%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_7777' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_7777' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test96%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_6666' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_6666' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test95%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_5555' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_5555' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test94%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_4444' limit 10" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_4444' limit 10 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test93%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_3333' limit 100" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_3333' limit 100 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test92%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_2222' limit 100" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_2222' limit 100 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test91%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_1111' limit 100" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_1111' limit 100 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + tdSql.query("select * from stb0 where col2 like 'test90%' ") + tdSql.checkRows(1000) + tdSql.query("select * from stb0 where tbname like 'stb00_100' limit 100" ) + tdSql.checkData(0, 1, 0) + tdSql.checkData(1, 1, 1) + tdSql.checkData(2, 1, 2) + tdSql.query("select * from stb0 where tbname like 'stb00_100' limit 100 offset 5" ) + tdSql.checkData(0, 1, 5) + tdSql.checkData(1, 1, 6) + tdSql.checkData(2, 1, 7) + + + os.system("rm -rf tools/taosdemoAllTest/TD-4985/query-limit-offset.py.sql") + + + + def stop(self): + tdSql.close() + tdLog.success("%s successfully executed" % __file__) + + +tdCases.addWindows(__file__, TDTestCase()) +tdCases.addLinux(__file__, TDTestCase()) diff --git a/tests/pytest/tools/taosdemoPerformance.py b/tests/pytest/tools/taosdemoPerformance.py index c4b125969a7aae7735f29fe9429d029673588d21..90d2c52c15bfc9b5d8c72f3eaa5719f08da19367 100644 --- a/tests/pytest/tools/taosdemoPerformance.py +++ b/tests/pytest/tools/taosdemoPerformance.py @@ -16,6 +16,8 @@ import pandas as pd import argparse import os.path import json +from util.log import tdLog +from util.sql import tdSql class taosdemoPerformace: diff --git a/tests/pytest/tools/taosdumpTest.py b/tests/pytest/tools/taosdumpTest.py index beac22a2af93fea50df6580c9636fe6b9de8ed42..b2c9eb3ec148cd5f28311131e2f07e15de330521 100644 --- a/tests/pytest/tools/taosdumpTest.py +++ b/tests/pytest/tools/taosdumpTest.py @@ -63,7 +63,6 @@ class TDTestCase: tdSql.execute("create database db days 11 keep 3649 blocks 8 ") tdSql.execute("create database db1 days 12 keep 3640 blocks 7 ") tdSql.execute("use db") - tdSql.execute( "create table st(ts timestamp, c1 int, c2 nchar(10)) tags(t1 int, t2 binary(10))") tdSql.execute("create table t1 using st tags(1, 'beijing')") @@ -86,9 +85,10 @@ class TDTestCase: tdLog.info("taosdump found in %s" % buildPath) binPath = buildPath + "/build/bin/" - os.system("rm ./taosdumptest/tmp1/*.sql") os.system("%staosdump --databases db -o ./taosdumptest/tmp1" % binPath) - os.system("%staosdump --databases db1 -o ./taosdumptest/tmp2" % binPath) + os.system( + "%staosdump --databases db1 -o ./taosdumptest/tmp2" % + binPath) tdSql.execute("drop database db") tdSql.execute("drop database db1") diff --git a/tests/script/general/parser/subInfrom.sim b/tests/script/general/parser/subInfrom.sim deleted file mode 100644 index e47831ee8797e3a9a09ee933c7286740120623e6..0000000000000000000000000000000000000000 --- a/tests/script/general/parser/subInfrom.sim +++ /dev/null @@ -1,147 +0,0 @@ -system sh/stop_dnodes.sh - -system sh/deploy.sh -n dnode1 -i 1 -system sh/cfg.sh -n dnode1 -c walLevel -v 1 -system sh/exec.sh -n dnode1 -s start -sleep 100 -sql connect -sleep 100 - -print ========== sub_in_from.sim -$i = 0 - -$dbPrefix = subdb -$tbPrefix = sub_tb -$stbPrefix = sub_stb -$tbNum = 10 -$rowNum = 1000 -$totalNum = $tbNum * $rowNum -$loops = 200000 -$log = 10000 -$ts0 = 1537146000000 -$delta = 600000 -$i = 0 -$db = $dbPrefix . $i -$stb = $stbPrefix . $i - -sql drop database $db -x step1 -step1: -sql create database $db cache 16 maxrows 4096 keep 36500 -print ====== create tables -sql use $db -sql create table $stb (ts timestamp, c1 int, c2 bigint, c3 float, c4 double, c5 smallint, c6 tinyint, c7 bool, c8 binary(10), c9 nchar(10)) tags(t1 int) - -$i = 0 -$ts = $ts0 -$halfNum = $tbNum / 2 -while $i < $halfNum - $tbId = $i + $halfNum - $tb = $tbPrefix . $i - $tb1 = $tbPrefix . $tbId - sql create table $tb using $stb tags( $i ) - sql create table $tb1 using $stb tags( $tbId ) - - $x = 0 - while $x < $rowNum - $xs = $x * $delta - $ts = $ts0 + $xs - $c = $x / 10 - $c = $c * 10 - $c = $x - $c - $binary = 'binary . $c - $binary = $binary . ' - $nchar = 'nchar . $c - $nchar = $nchar . ' - sql insert into $tb values ( $ts , $c , $c , $c , $c , $c , $c , true, $binary , $nchar ) - sql insert into $tb1 values ( $ts , $c , NULL , $c , NULL , $c , $c , true, $binary , $nchar ) - $x = $x + 1 - endw - - $i = $i + 1 -endw -print ====== tables created - -sql_error select count(*) from (select count(*) from abc.sub_stb0) -sql_error select val + 20 from (select count(*) from sub_stb0 interval(10h)) -sql_error select abc+20 from (select count(*) from sub_stb0 interval(1s)) - -sql select count(*) from (select count(*) from sub_stb0 interval(10h)) -if $rows != 1 then - return -1 -endi - -if $data00 != 18 then - print expect 18, actual: $data00 - return -1 -endi - -sql select ts from (select count(*) from sub_stb0 interval(10h)) -if $rows != 18 then - return -1 -endi - -if $data00 != @18-09-17 04:00:00.000@ then - return -1 -endi - -if $data01 != @18-09-17 14:00:00.000@ then - return -1 -endi - -sql select val + 20, val from (select count(*) as val from sub_stb0 interval(10h)) -if $rows != 18 then - return -1 -endi - -if $data00 != 320.000000 then - return -1 -endi - -if $data01 != 300 then - return -1 -endi - -if $data10 != 620 then - return -1 -endi - -if $data11 != 600 then - return -1 -endi - -if $data20 != 620 then - return -1 -endi - -if $data21 != 600 then - return -1 -endi - -sql select max(val), min(val), max(val) - min(val) from (select count(*) val from sub_stb0 interval(10h)) -if $rows != 1 then - return -1 -endi - -if $data00 != 600 then - return -1 -endi - -if $data01 != 100 then - return -1 -endi - -if $data02 != 500.000000 then - return -1 -endi - -sql select first(ts,val),last(ts,val) from (select count(*) val from sub_stb0 interval(10h)) -sql select top(val, 5) from (select count(*) val from sub_stb0 interval(10h)) -sql select diff(val) from (select count(*) val from sub_stb0 interval(10h)) -sql select apercentile(val, 50) from (select count(*) val from sub_stb0 interval(10h)) - -# not support yet -sql select percentile(val, 50) from (select count(*) val from sub_stb0 interval(10h)) -sql select stddev(val) from (select count(*) val from sub_stb0 interval(10h)) - -print ====================>complex query - diff --git a/tests/test/c/CMakeLists.txt b/tests/test/c/CMakeLists.txt index 2eb8ee1614b286f3827705865cf073a7eded0c88..2702d192d3f47022f05888f90ca89c4ef533fe44 100644 --- a/tests/test/c/CMakeLists.txt +++ b/tests/test/c/CMakeLists.txt @@ -1,4 +1,4 @@ -CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) PROJECT(TDengine) INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/inc)