提交 0aaa4500 编写于 作者: G Ganlin Zhao

Merge branch 'develop' into feature/TD-6452

...@@ -16,6 +16,9 @@ ...@@ -16,6 +16,9 @@
[submodule "deps/TSZ"] [submodule "deps/TSZ"]
path = deps/TSZ path = deps/TSZ
url = https://github.com/taosdata/TSZ.git url = https://github.com/taosdata/TSZ.git
[submodule "src/plugins/blm3"] [submodule "deps/avro"]
path = src/plugins/blm3 path = deps/avro
url = https://github.com/taosdata/blm3 url = https://github.com/apache/avro
[submodule "src/plugins/taosadapter"]
path = src/plugins/taosadapter
url = https://github.com/taosdata/taosadapter
...@@ -15,6 +15,26 @@ ELSE () ...@@ -15,6 +15,26 @@ ELSE ()
CMAKE_MINIMUM_REQUIRED(VERSION 3.0) CMAKE_MINIMUM_REQUIRED(VERSION 3.0)
ENDIF () ENDIF ()
if(NOT WIN32)
string(ASCII 27 Esc)
set(ColourReset "${Esc}[m")
set(ColourBold "${Esc}[1m")
set(Red "${Esc}[31m")
set(Green "${Esc}[32m")
set(Yellow "${Esc}[33m")
set(Blue "${Esc}[34m")
set(Magenta "${Esc}[35m")
set(Cyan "${Esc}[36m")
set(White "${Esc}[37m")
set(BoldRed "${Esc}[1;31m")
set(BoldGreen "${Esc}[1;32m")
set(BoldYellow "${Esc}[1;33m")
set(BoldBlue "${Esc}[1;34m")
set(BoldMagenta "${Esc}[1;35m")
set(BoldCyan "${Esc}[1;36m")
set(BoldWhite "${Esc}[1;37m")
endif()
SET(TD_ACCOUNT FALSE) SET(TD_ACCOUNT FALSE)
SET(TD_ADMIN FALSE) SET(TD_ADMIN FALSE)
SET(TD_GRANT FALSE) SET(TD_GRANT FALSE)
......
...@@ -107,7 +107,147 @@ def pre_test(){ ...@@ -107,7 +107,147 @@ def pre_test(){
make > /dev/null make > /dev/null
make install > /dev/null make install > /dev/null
cd ${WKC}/tests cd ${WKC}/tests
pip3 install ${WKC}/src/connector/python/ || echo "not install" pip3 install ${WKC}/src/connector/python/
'''
return 1
}
def pre_test_noinstall(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. > /dev/null
make
'''
return 1
}
def pre_test_mac(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. > /dev/null
cmake --build .
''' '''
return 1 return 1
} }
...@@ -460,31 +600,61 @@ pipeline { ...@@ -460,31 +600,61 @@ pipeline {
stage('arm64centos7') { stage('arm64centos7') {
agent{label " arm64centos7 "} agent{label " arm64centos7 "}
steps { steps {
pre_test() pre_test_noinstall()
} }
} }
stage('arm64centos8') { stage('arm64centos8') {
agent{label " arm64centos8 "} agent{label " arm64centos8 "}
steps { steps {
pre_test() pre_test_noinstall()
} }
} }
stage('arm32bionic') { stage('arm32bionic') {
agent{label " arm32bionic "} agent{label " arm32bionic "}
steps { steps {
pre_test() pre_test_noinstall()
} }
} }
stage('arm64bionic') { stage('arm64bionic') {
agent{label " arm64bionic "} agent{label " arm64bionic "}
steps { steps {
pre_test() pre_test_noinstall()
} }
} }
stage('arm64focal') { stage('arm64focal') {
agent{label " arm64focal "} agent{label " arm64focal "}
steps { steps {
pre_test() pre_test_noinstall()
}
}
stage('centos7') {
agent{label " centos7 "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:trusty') {
agent{label " trusty "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:xenial') {
agent{label " xenial "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:bionic') {
agent{label " bionic "}
steps {
pre_test_noinstall()
}
}
stage('Mac_build') {
agent{label " catalina "}
steps {
pre_test_mac()
} }
} }
......
...@@ -129,7 +129,7 @@ mkdir debug && cd debug ...@@ -129,7 +129,7 @@ mkdir debug && cd debug
cmake .. && cmake --build . cmake .. && cmake --build .
``` ```
Note TDengine 2.3.0.0 and later use a component named 'blm3' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The blm3 is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull blm3 source code. Please install go language 1.14 or above for compiling blm3. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem. Note TDengine 2.3.x.0 and later use a component named 'taosadapter' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The taosadapter is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull taosadapter source code. Please install go language 1.14 or above for compiling taosadapter. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem.
``` ```
go env -w GO111MODULE=on go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct go env -w GOPROXY=https://goproxy.cn,direct
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine) PROJECT(TDengine)
IF (TD_ACCOUNT) IF (TD_ACCOUNT)
...@@ -121,14 +121,13 @@ IF (TD_MIPS_32) ...@@ -121,14 +121,13 @@ IF (TD_MIPS_32)
SET(COMMON_FLAGS "-Wall -Werror -fPIC -D_FILE_OFFSET_BITS=64 -D_LARGE_FILE") SET(COMMON_FLAGS "-Wall -Werror -fPIC -D_FILE_OFFSET_BITS=64 -D_LARGE_FILE")
ENDIF () ENDIF ()
IF (TD_APLHINE) IF (TD_ALPINE)
SET(COMMON_FLAGS "${COMMON_FLAGS} -largp") SET(COMMON_FLAGS "${COMMON_FLAGS} -largp")
link_libraries(/usr/lib/libargp.a) link_libraries(/usr/lib/libargp.a)
ADD_DEFINITIONS(-D_ALPINE) ADD_DEFINITIONS(-D_ALPINE)
MESSAGE(STATUS "aplhine is defined") MESSAGE(STATUS "aplhine is defined")
ENDIF () ENDIF ()
MESSAGE("before BUILD_HTTP: " ${BUILD_HTTP})
IF ("${BUILD_HTTP}" STREQUAL "") IF ("${BUILD_HTTP}" STREQUAL "")
IF (TD_LINUX) IF (TD_LINUX)
IF (TD_ARM_32) IF (TD_ARM_32)
...@@ -140,16 +139,27 @@ IF ("${BUILD_HTTP}" STREQUAL "") ...@@ -140,16 +139,27 @@ IF ("${BUILD_HTTP}" STREQUAL "")
SET(BUILD_HTTP "true") SET(BUILD_HTTP "true")
ENDIF () ENDIF ()
ENDIF () ENDIF ()
MESSAGE("after BUILD_HTTP: " ${BUILD_HTTP})
IF (${BUILD_HTTP} MATCHES "true") IF (${BUILD_HTTP} MATCHES "true")
SET(TD_BUILD_HTTP TRUE) SET(TD_BUILD_HTTP TRUE)
ELSEIF (${BUILD_HTTP} MATCHES "false")
SET(TD_BUILD_HTTP FALSE)
ENDIF () ENDIF ()
IF (TD_BUILD_HTTP) IF (TD_BUILD_HTTP)
ADD_DEFINITIONS(-DHTTP_EMBEDDED) ADD_DEFINITIONS(-DHTTP_EMBEDDED)
ENDIF () ENDIF ()
IF ("${AVRO_SUPPORT}" MATCHES "true")
SET(TD_AVRO_SUPPORT TRUE)
ELSEIF ("${AVRO_SUPPORT}" MATCHES "false")
SET(TD_AVRO_SUPPORT FALSE)
ENDIF ()
IF (TD_AVRO_SUPPORT)
ADD_DEFINITIONS(-DAVRO_SUPPORT)
ENDIF ()
IF (TD_LINUX) IF (TD_LINUX)
ADD_DEFINITIONS(-DLINUX) ADD_DEFINITIONS(-DLINUX)
ADD_DEFINITIONS(-D_LINUX) ADD_DEFINITIONS(-D_LINUX)
...@@ -162,11 +172,14 @@ IF (TD_LINUX) ...@@ -162,11 +172,14 @@ IF (TD_LINUX)
ENDIF () ENDIF ()
IF (TD_MEMORY_SANITIZER) IF (TD_MEMORY_SANITIZER)
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -static-libasan -O0 -g3 -DDEBUG") IF (TD_ARCHLINUX)
MESSAGE(STATUS "memory sanitizer detected as true") SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -O0 -g3 -DDEBUG")
ELSE ()
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -static-libasan -O0 -g3 -DDEBUG")
ENDIF ()
MESSAGE(STATUS "${BoldRed}Will compile with memory sanitizer! ${ColourReset}")
ELSE () ELSE ()
SET(DEBUG_FLAGS "-O0 -g3 -DDEBUG") SET(DEBUG_FLAGS "-O0 -g3 -DDEBUG")
MESSAGE(STATUS "memory sanitizer detected as false")
ENDIF () ENDIF ()
SET(RELEASE_FLAGS "-O3 -Wno-error") SET(RELEASE_FLAGS "-O3 -Wno-error")
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine) PROJECT(TDengine)
SET(CMAKE_C_STANDARD 11) SET(CMAKE_C_STANDARD 11)
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine) PROJECT(TDengine)
IF (${ACCOUNT} MATCHES "true") IF (${ACCOUNT} MATCHES "true")
...@@ -92,6 +92,8 @@ ENDIF () ...@@ -92,6 +92,8 @@ ENDIF ()
SET(TD_BUILD_HTTP FALSE) SET(TD_BUILD_HTTP FALSE)
SET(TD_AVRO_SUPPORT FALSE)
SET(TD_MEMORY_SANITIZER FALSE) SET(TD_MEMORY_SANITIZER FALSE)
IF (${MEMORY_SANITIZER} MATCHES "true") IF (${MEMORY_SANITIZER} MATCHES "true")
SET(TD_MEMORY_SANITIZER TRUE) SET(TD_MEMORY_SANITIZER TRUE)
......
...@@ -35,7 +35,7 @@ ELSEIF (TD_WINDOWS) ...@@ -35,7 +35,7 @@ ELSEIF (TD_WINDOWS)
#INSTALL(TARGETS taos RUNTIME DESTINATION driver) #INSTALL(TARGETS taos RUNTIME DESTINATION driver)
#INSTALL(TARGETS shell RUNTIME DESTINATION .) #INSTALL(TARGETS shell RUNTIME DESTINATION .)
IF (TD_MVN_INSTALLED) IF (TD_MVN_INSTALLED)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.35-dist.jar DESTINATION connector/jdbc) INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.36-dist.jar DESTINATION connector/jdbc)
ENDIF () ENDIF ()
ELSEIF (TD_DARWIN) ELSEIF (TD_DARWIN)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh") SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine) PROJECT(TDengine)
# #
...@@ -21,7 +21,7 @@ SET(TD_LINUX FALSE) ...@@ -21,7 +21,7 @@ SET(TD_LINUX FALSE)
SET(TD_ARM_32 FALSE) SET(TD_ARM_32 FALSE)
SET(TD_MIPS_64 FALSE) SET(TD_MIPS_64 FALSE)
SET(TD_MIPS_32 FALSE) SET(TD_MIPS_32 FALSE)
SET(TD_APLHINE FALSE) SET(TD_ALPINE FALSE)
SET(TD_NINGSI FALSE) SET(TD_NINGSI FALSE)
SET(TD_NINGSI_60 FALSE) SET(TD_NINGSI_60 FALSE)
SET(TD_NINGSI_80 FALSE) SET(TD_NINGSI_80 FALSE)
...@@ -36,7 +36,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") ...@@ -36,7 +36,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# Get OS information and store in variable TD_OS_INFO. # Get OS information and store in variable TD_OS_INFO.
# #
execute_process(COMMAND chmod 777 ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh) execute_process(COMMAND chmod 777 ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh)
execute_process(COMMAND ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO) execute_process(COMMAND sh ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO)
MESSAGE(STATUS "The current os is " ${TD_OS_INFO}) MESSAGE(STATUS "The current os is " ${TD_OS_INFO})
SET(TD_LINUX TRUE) SET(TD_LINUX TRUE)
...@@ -52,8 +52,13 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") ...@@ -52,8 +52,13 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
ENDIF () ENDIF ()
IF (${TD_OS_INFO} MATCHES "Alpine") IF (${TD_OS_INFO} MATCHES "Alpine")
SET(TD_APLHINE TRUE) SET(TD_ALPINE TRUE)
MESSAGE(STATUS "The current OS is Alpine, append extra flags") MESSAGE(STATUS "The current OS is Alpine Linux, append extra flags")
ELSEIF (${TD_OS_INFO} MATCHES "Arch")
SET(TD_ARCHLINUX TRUE)
MESSAGE(STATUS "The current OS is Arch Linux")
ELSE ()
MESSAGE(STATUS "Ths distro is " ${TD_OS_INFO})
ENDIF() ENDIF()
ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
SET(TD_DARWIN TRUE) SET(TD_DARWIN TRUE)
...@@ -155,7 +160,7 @@ ELSEIF (${OSTYPE} MATCHES "Linux") ...@@ -155,7 +160,7 @@ ELSEIF (${OSTYPE} MATCHES "Linux")
MESSAGE(STATUS "input osType: Linux") MESSAGE(STATUS "input osType: Linux")
ELSEIF (${OSTYPE} MATCHES "Alpine") ELSEIF (${OSTYPE} MATCHES "Alpine")
MESSAGE(STATUS "input osType: Alpine") MESSAGE(STATUS "input osType: Alpine")
SET(TD_APLHINE TRUE) SET(TD_ALPINE TRUE)
ELSE () ELSE ()
MESSAGE(STATUS "The user specified osType is unknown: " ${OSTYPE}) MESSAGE(STATUS "The user specified osType is unknown: " ${OSTYPE})
ENDIF () ENDIF ()
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20) CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine) PROJECT(TDengine)
IF (DEFINED VERNUMBER) IF (DEFINED VERNUMBER)
SET(TD_VER_NUMBER ${VERNUMBER}) SET(TD_VER_NUMBER ${VERNUMBER})
ELSE () ELSE ()
SET(TD_VER_NUMBER "2.3.0.0") SET(TD_VER_NUMBER "2.3.1.0")
ENDIF () ENDIF ()
IF (DEFINED VERCOMPATIBLE) IF (DEFINED VERCOMPATIBLE)
......
...@@ -25,10 +25,36 @@ IF (TD_DARWIN AND TD_MQTT) ...@@ -25,10 +25,36 @@ IF (TD_DARWIN AND TD_MQTT)
ADD_SUBDIRECTORY(MQTT-C) ADD_SUBDIRECTORY(MQTT-C)
ENDIF () ENDIF ()
IF (TD_AVRO_SUPPORT)
MESSAGE("")
MESSAGE("${Green} ENABLE avro format support ${ColourReset}")
MESSAGE("")
include(ExternalProject)
ExternalProject_Add(
apache-avro
PREFIX "avro"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c
BUILD_IN_SOURCE 1
PATCH_COMMAND
COMMAND git clean -f -d
COMMAND sed -i.bak -e "/TARGETS avroappend/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avrocat/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avromod/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avropipe/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
CONFIGURE_COMMAND cmake -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}/build
)
ELSE ()
MESSAGE("")
MESSAGE("${Yellow} NO avro format support ${ColourReset}")
MESSAGE("")
ENDIF ()
IF (TD_LINUX_64 AND JEMALLOC_ENABLED) IF (TD_LINUX_64 AND JEMALLOC_ENABLED)
MESSAGE("")
MESSAGE("${Green} ENABLE jemalloc ${ColourReset}")
MESSAGE("")
MESSAGE("setup deps/jemalloc, current source dir:" ${CMAKE_CURRENT_SOURCE_DIR}) MESSAGE("setup deps/jemalloc, current source dir:" ${CMAKE_CURRENT_SOURCE_DIR})
MESSAGE("binary dir:" ${CMAKE_BINARY_DIR}) MESSAGE("binary dir:" ${CMAKE_BINARY_DIR})
include(ExternalProject)
ExternalProject_Add(jemalloc ExternalProject_Add(jemalloc
PREFIX "jemalloc" PREFIX "jemalloc"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc
...@@ -39,5 +65,5 @@ IF (TD_LINUX_64 AND JEMALLOC_ENABLED) ...@@ -39,5 +65,5 @@ IF (TD_LINUX_64 AND JEMALLOC_ENABLED)
ENDIF () ENDIF ()
IF (${TSZ_ENABLED} MATCHES "true") IF (${TSZ_ENABLED} MATCHES "true")
ADD_SUBDIRECTORY(TSZ) ADD_SUBDIRECTORY(TSZ)
ENDIF() ENDIF()
\ No newline at end of file
Subproject commit 0ca5b15a8eac40327dd737be52c926fa5675712c
Subproject commit a1fce29d9675b4dd95dfee9db32cc505d0b2227c
...@@ -81,6 +81,7 @@ TDengine是一个高效的存储、查询、分析时序大数据的平台,专 ...@@ -81,6 +81,7 @@ TDengine是一个高效的存储、查询、分析时序大数据的平台,专
* [Node.js Connector](/connector#nodejs):给node应用提供一个连接TDengine服务器的驱动 * [Node.js Connector](/connector#nodejs):给node应用提供一个连接TDengine服务器的驱动
* [C# Connector](/connector#csharp):给C#应用提供一个连接TDengine服务器的驱动 * [C# Connector](/connector#csharp):给C#应用提供一个连接TDengine服务器的驱动
* [Windows客户端](https://www.taosdata.com/blog/2019/07/26/514.html):自行编译windows客户端,Windows环境的各种连接器都需要它 * [Windows客户端](https://www.taosdata.com/blog/2019/07/26/514.html):自行编译windows客户端,Windows环境的各种连接器都需要它
* [Rust Connector](/connector/rust): Rust语言下通过libtaos客户端或RESTful接口,连接TDengine服务器。
## [与其他工具的连接](/connections) ## [与其他工具的连接](/connections)
......
...@@ -145,7 +145,7 @@ insert delay, avg: 8.31ms, max: 860.12ms, min: 2.00ms ...@@ -145,7 +145,7 @@ insert delay, avg: 8.31ms, max: 860.12ms, min: 2.00ms
``` ```
$ taosdemo --help $ taosdemo --help
-f, --file=FILE The meta file to the execution procedure. -f, --file=FILE The meta file to the execution procedure. Currently, we support standard UTF-8 (without BOM) encoded files only.
-u, --user=USER The user name to use when connecting to the server. -u, --user=USER The user name to use when connecting to the server.
-p, --password The password to use when connecting to the server. -p, --password The password to use when connecting to the server.
-c, --config-dir=CONFIG_DIR Configuration directory. -c, --config-dir=CONFIG_DIR Configuration directory.
...@@ -442,7 +442,7 @@ TDengine是涛思数据专为物联网、车联网、工业互联网、IT运维 ...@@ -442,7 +442,7 @@ TDengine是涛思数据专为物联网、车联网、工业互联网、IT运维
taosdemo支持两种配置参数的模式,一种是命令行参数,一种是使用json格式的配置文件。 taosdemo支持两种配置参数的模式,一种是命令行参数,一种是使用json格式的配置文件。
一、命令行参数 一、命令行参数
-f:指定taosdemo所需参数的meta文件。当使用该参数时,其他所有命令行参数都失效。可选项,缺省是NULL。 -f:指定taosdemo所需参数的meta文件。当使用该参数时,其他所有命令行参数都失效。可选项,缺省是NULL。目前仅支持不含 BOM(byte-order mark)的标准 UTF-8 编码文件。
-u: 用户名。可选项,缺省是“root“。 -u: 用户名。可选项,缺省是“root“。
......
...@@ -27,13 +27,18 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6, ...@@ -27,13 +27,18 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6,
- 对同一张表,如果新插入记录的时间戳已经存在,默认情形下(UPDATE=0)新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。如果在创建数据库时使用了 UPDATE 1 选项,插入相同时间戳的新记录将覆盖原有记录。 - 对同一张表,如果新插入记录的时间戳已经存在,默认情形下(UPDATE=0)新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。如果在创建数据库时使用了 UPDATE 1 选项,插入相同时间戳的新记录将覆盖原有记录。
- 写入的数据的时间戳必须大于当前时间减去配置参数keep的时间。如果keep配置为3650天,那么无法写入比3650天还早的数据。写入数据的时间戳也不能大于当前时间加配置参数days。如果days为2,那么无法写入比当前时间还晚2天的数据。 - 写入的数据的时间戳必须大于当前时间减去配置参数keep的时间。如果keep配置为3650天,那么无法写入比3650天还早的数据。写入数据的时间戳也不能大于当前时间加配置参数days。如果days为2,那么无法写入比当前时间还晚2天的数据。
## <a class="anchor" id="schemaless"></a>Schemaless 写入 ## <a class="anchor" id="schemaless"></a>无模式(Schemaless)写入
**前言**
<br/>在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供调用 Schemaless 写入方式,可以免于预先创建超级表/子表的步骤,随着数据写入写入接口能够自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。
<br/>目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 [Schemaless 方式写入接口](https://www.taosdata.com/cn/documentation/connector#schemaless)章节。这里对 Schemaless 的数据表达格式进行了描述。
<br/>无模式写入方式建立的超级表及其对应的子表与通过 SQL 直接建立的超级表和子表完全没有区别,您也可以通过 SQL 语句直接向其中写入数据。需要注意的是,通过无模式写入方式建立的表,其表名是基于标签值按照固定的映射规则生成,所以无法明确地进行表意,缺乏可读性。
在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供 Schemaless 写入方式,可以免于预先创建超级表/数据子表,而是随着数据写入,自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 [Schemaless 方式写入接口](https://www.taosdata.com/cn/documentation/connector#schemaless) 章节。这里对 Schemaless 的数据表达格式进行描述。 **无模式写入行协议**
<br/>TDengine 的无模式写入的行协议兼容 InfluxDB 的 行协议(Line Protocol)、OpenTSDB 的 telnet 行协议、OpenTSDB 的 Json 格式协议。但是使用这三种协议的时候,需要在 API 中指定输入内容使用解析协议的标准。
### Schemaless 数据行协议 对于InfluxDB、OpenTSDB的标准写入协议请参考各自的文档。下面首先以 InfluxDB 的行协议为基础,介绍 TDengine 扩展的协议内容,允许用户采用更加精细的方式控制(超级表)模式。
Schemaless 采用一个字符串来表达最终存储的一个数据行(可以向 Schemaless 写入 API 中一次传入多个字符串来实现多个数据行的批量写入),其格式约定如下: Schemaless 采用一个字符串来表达一个数据行(可以向写入 API 中一次传入多行字符串来实现多个数据行的批量写入),其格式约定如下:
```json ```json
measurement,tag_set field_set timestamp measurement,tag_set field_set timestamp
``` ```
...@@ -44,51 +49,104 @@ measurement,tag_set field_set timestamp ...@@ -44,51 +49,104 @@ measurement,tag_set field_set timestamp
* field_set 将作为普通列数据,其格式形如 `<field_key>=<field_value>,<field_key>=<field_value>`,同样是使用英文逗号来分隔多个普通列的数据。它与 timestamp 之间使用一个半角空格来分隔。 * field_set 将作为普通列数据,其格式形如 `<field_key>=<field_value>,<field_key>=<field_value>`,同样是使用英文逗号来分隔多个普通列的数据。它与 timestamp 之间使用一个半角空格来分隔。
* timestamp 即本行数据对应的主键时间戳。 * timestamp 即本行数据对应的主键时间戳。
在 Schemaless 的数据行协议中,tag_set、field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说: tag_set 中的所有的数据自动转化为 nchar 数据类型,并不需要使用双引号(")。
<br/>在无模式写入数据行协议中,field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说:
* 如果两边有英文双引号,表示 BIANRY(32) 类型。例如 `"abc"` * 如果两边有英文双引号,表示 BIANRY(32) 类型。例如 `"abc"`
* 如果两边有英文双引号而且带有 L 前缀,表示 NCHAR(32) 类型。例如 `L"报错信息"` * 如果两边有英文双引号而且带有 L 前缀,表示 NCHAR(32) 类型。例如 `L"报错信息"`
* 对空格、等号(=)、逗号(,)、双引号("),前面需要使用反斜杠(\)进行转义。(都指的是英文半角符号) * 对空格、等号(=)、逗号(,)、双引号("),前面需要使用反斜杠(\)进行转义。(都指的是英文半角符号)
* 数值类型将通过后缀来区分数据类型: * 数值类型将通过后缀来区分数据类型:
- 没有后缀,为 FLOAT 类型;
- 后缀为 f32,为 FLOAT 类型; | **序号** | **后缀** | **映射类型** | **大小(字节)** |
- 后缀为 f64,为 DOUBLE 类型; | -- | ------- | ---------| ------ |
- 后缀为 i8,表示为 TINYINT (INT8) 类型; | 1 | 无或f64 | double | 8 |
- 后缀为 i16,表示为 SMALLINT (INT16) 类型; | 2 | f32 | float | 4 |
- 后缀为 i32,表示为 INT (INT32) 类型; | 3 | i8 | TinyInt | 1 |
- 后缀为 i64,表示为 BIGINT (INT64) 类型; | 4 | i16 | SmallInt | 2 |
| 5 | i32 | Int | 4 |
| 6 | i64或i | Bigint | 8 |
* t, T, true, True, TRUE, f, F, false, False 将直接作为 BOOL 型来处理。 * t, T, true, True, TRUE, f, F, false, False 将直接作为 BOOL 型来处理。
<br/>例如如下数据行表示:向名为 st 的超级表下的 t1 标签为 "3"(NCHAR)、t2 标签为 "4"(NCHAR)、t3 标签为 "t3"(NCHAR)的数据子表,写入 c1 列为 3(BIGINT)、c2 列为 false(BOOL)、c3 列为 "passit"(BINARY)、c4 列为 4(DOUBLE)、主键时间戳为 1626006833639000000 的一行数据。
```json
st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4f64 1626006833639000000
```
需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。
timestamp 位置的时间戳通过后缀来声明时间精度,具体如下: ### 无模式写入的主要处理逻辑
* 不带任何后缀的长整数会被当作微秒来处理;
* 当后缀为 s 时,表示秒时间戳;
* 当后缀为 ms 时,表示毫秒时间戳;
* 当后缀为 us 时,表示微秒时间戳;
* 当后缀为 ns 时,表示纳秒时间戳;
* 当时间戳为 0 时,表示采用客户端的当前时间(因此,同一批提交的数据中,时间戳 0 会被解释为同一个时间点,于是就有可能导致时间戳重复)。
例如,如下 Schemaless 数据行表示:向名为 st 的超级表下的 t1 标签为 3(BIGINT 类型)、t2 标签为 4(DOUBLE 类型)、t3 标签为 "t3"(BINARY 类型)的数据子表,写入 c1 列为 3(BIGINT 类型)、c2 列为 false(BOOL 类型)、c3 列为 "passit"(NCHAR 类型)、c4 列为 4(DOUBLE 类型)、主键时间戳为 1626006833639000000(纳秒精度)的一行数据。 无模式写入按照如下原则来处理行数据:
1. 当 tag_set 中有 ID 字段时,该字段的值将作为子表的表名。
2. 没有 ID 字段时,将使用如下规则来生成子表名:
首先将measurement 的名称和标签的 key 和 value 组合成为如下的字符串
```json
"measurement,tag_key1=tag_value1,tag_key2=tag_value2"
```
需要注意的是,这里的tag_key1, tag_key2并不是用户输入的标签的原始顺序,而是使用了标签名称按照字符串升序排列后的结果。所以,tag_key1 并不是在行协议中输入的第一个标签。
排列完成以后计算该字符串的 MD5 散列值 "md5_val"。然后将计算的结果与字符串组合生成表名:“t_md5_val”。其中的 “t_” 是固定的前缀,每个通过该映射关系自动生成的表都具有该前缀。
<br/>3. 如果解析行协议获得的超级表不存在,则会创建这个超级表。
<br/>4. 如果解析行协议获得子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
<br/>5. 如果数据行中指定的标签列或普通列不存在,则在超级表中增加对应的标签列或普通列(只增不减)。
<br/>6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
<br/>7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,自动增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
<br/>8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
<br/>9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
**备注:**
<br/>无模式所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。
**时间分辨率识别**
<br/>无模式写入过程中支持三个指定的模式,具体如下
| **序号** | **值** | **说明** |
| ---- | ------------------- | ------------ |
| 1 | SML_LINE_PROTOCOL | InfluxDB行协议(Line Protocol) |
| 2 | SML_TELNET_PROTOCOL | OpenTSDB文本行协议 |
| 3 | SML_JSON_PROTOCOL | Json协议格式 |
<br/>在 SML_LINE_PROTOCOL 解析模式下,需要用户指定输入的时间戳的时间分辨率。可用的时间分辨率如下表所示:<br/>
| **序号** | **时间分辨率定义** | **含义** |
| ---- | ----------------------------- | --------- |
| 1 | TSDB_SML_TIMESTAMP_NOT_CONFIGURED | 未定义(无效) |
| 2 | TSDB_SML_TIMESTAMP_HOURS | 小时 |
| 3 | TSDB_SML_TIMESTAMP_MINUTES | 分钟 |
| 4 | TSDB_SML_TIMESTAMP_SECONDS | 秒 |
| 5 | TSDB_SML_TIMESTAMP_MILLI_SECONDS | 毫秒 |
| 6 | TSDB_SML_TIMESTAMP_MICRO_SECONDS | 微秒 |
| 7 | TSDB_SML_TIMESTAMP_NANO_SECONDS | 纳秒 |
在 SML_TELNET_PROTOCOL 和 SML_JSON_PROTOCOL 模式下,根据时间戳的长度来确定时间精度(与 OpenTSDB 标准操作方式相同),此时会忽略用户指定的时间分辨率。
**数据模式变更处理**
<br/>本节将说明不同行数据写入情况下,对于数据模式的影响。
在使用行协议写入一个明确的标识的字段类型的时候,后续更改该字段的类型定义,会出现明确的数据模式错误,即会触发写入 API 报告错误。如下所示,
```json ```json
st,t1=3i64,t2=4f64,t3="t3" c1=3i64,c3=L"passit",c2=false,c4=4f64 1626006833639000000ns st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4i 1626006833640000000
``` ```
第一行的数据类型映射将 c4 列定义为 Double, 但是第二行的数据又通过数值后缀方式声明该列为 BigInt, 由此会触发无模式写入的解析错误。
需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。 如果列前面的行协议将数据列声明为了 binary, 后续的要求长度更长的binary长度,此时会触发超级表模式的变更。
```json
st,t1=3,t2=4,t3=t3 c1=3i64,c5="pass" 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c5="passit" 1626006833640000000
```
第一行中行协议解析会声明 c5 列是一个 binary(4)的字段,第二次行数据写入会提取列 c5 仍然是 binary 列,但是其宽度为 6,此时需要将binary的宽度增加到能够容纳 新字符串的宽度。
```json
st,t1=3,t2=4,t3=t3 c1=3i64 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c6="passit" 1626006833640000000
```
第二行数据相对于第一行来说增加了一个列 c6,类型为binary(6)。那么此时会自动增加一个列 c6, 类型为 binary(6)。
### Schemaless 的处理逻辑 **写入完整性**
<br/>TDengine 提供数据写入的幂等性保证,即您可以反复调用 API 进行出错数据的写入操作。但是不提供多行数据写入的原子性保证。即在多行数据一批次写入过程中,会出现部分数据写入成功,部分数据写入失败的情况。
Schemaless 按照如下原则来处理行数据: **错误码**
1. 当 tag_set 中有 ID 字段时,该字段的值将作为数据子表的表名。 <br/>如果是无模式写入过程中的数据本身错误,应用会得到 TSDB_CODE_TSC_LINE_SYNTAX_ERROR 错误信息,该错误信息表明错误发生在写入文本中。其他的错误码与原系统一致,可以通过 taos_errstr 获取具体的错误原因。
2. 没有 ID 字段时,将使用 `measurement + tag_value1 + tag_value2 + ...` 的 md5 值来作为子表名。
3. 如果指定的超级表名不存在,则 Schemaless 会创建这个超级表。
4. 如果指定的数据子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
5. 如果数据行中指定的标签列或普通列不存在,则 Schemaless 会在超级表中增加对应的标签列或普通列(只增不减)。
6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,那么 Schemaless 会增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
**注意:**Schemaless 所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。 **后续升级计划**
<br/>当前版本只提供了 C 版本的 API,后续将提供 其他高级语言的 API,例如 Java/Go/Python/C# 等。此外,在TDengine v2.3及后续版本中,您还可以通过 Taos Adapter 采用 REST 的方式直接写入无模式数据。
关于 Schemaless 的字符串编码处理、时区设置等,均会沿用 TAOSC 客户端的设置。
## <a class="anchor" id="prometheus"></a>Prometheus 直接写入 ## <a class="anchor" id="prometheus"></a>Prometheus 直接写入
...@@ -183,10 +241,10 @@ use prometheus; ...@@ -183,10 +241,10 @@ use prometheus;
select * from apiserver_request_latencies_bucket; select * from apiserver_request_latencies_bucket;
``` ```
## <a class="anchor" id="telegraf"></a> Telegraf 直接写入(通过 BLM v3) ## <a class="anchor" id="telegraf"></a> Telegraf 直接写入(通过 taosadapter)
安装 Telegraf 请参考[官方文档](https://portal.influxdata.com/downloads/) 安装 Telegraf 请参考[官方文档](https://portal.influxdata.com/downloads/)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。 TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。
配置方法,在 /etc/telegraf/telegraf.conf 增加如下文字,其中 database name 请填写希望在 TDengine 保存 Telegraf 数据的数据库名,TDengine server/cluster host、username和 password 填写 TDengine 实际值: 配置方法,在 /etc/telegraf/telegraf.conf 增加如下文字,其中 database name 请填写希望在 TDengine 保存 Telegraf 数据的数据库名,TDengine server/cluster host、username和 password 填写 TDengine 实际值:
``` ```
...@@ -206,14 +264,14 @@ sudo systemctl start telegraf ...@@ -206,14 +264,14 @@ sudo systemctl start telegraf
``` ```
即可在 TDengine 中查询 metrics 数据库中 Telegraf 写入的数据。 即可在 TDengine 中查询 metrics 数据库中 Telegraf 写入的数据。
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。 taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="collectd"></a> collectd 直接写入(通过 BLM v3) ## <a class="anchor" id="collectd"></a> collectd 直接写入(通过 taosadapter)
安装 collectd,请参考[官方文档](https://collectd.org/download.shtml) 安装 collectd,请参考[官方文档](https://collectd.org/download.shtml)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 collectd 的多种应用的数据写入。 TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 collectd 的多种应用的数据写入。
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值: 在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
``` ```
LoadPlugin network LoadPlugin network
<Plugin network> <Plugin network>
...@@ -224,15 +282,15 @@ LoadPlugin network ...@@ -224,15 +282,15 @@ LoadPlugin network
``` ```
sudo systemctl start collectd sudo systemctl start collectd
``` ```
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。 taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="statsd"></a> StatsD 直接写入(通过 BLM v3) ## <a class="anchor" id="statsd"></a> StatsD 直接写入(通过 taosadapter)
安装 StatsD 安装 StatsD
请参考[官方文档](https://github.com/statsd/statsd) 请参考[官方文档](https://github.com/statsd/statsd)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 StatsD 的多种应用的数据写入。 TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 StatsD 的多种应用的数据写入。
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值: 在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
``` ```
backends 部分添加 "./backends/repeater" backends 部分添加 "./backends/repeater"
repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>} repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>}
...@@ -247,12 +305,12 @@ port: 8125 ...@@ -247,12 +305,12 @@ port: 8125
} }
``` ```
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。 taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="blm2-telegraf"></a> 使用 Bailongma 2.0 接入 Telegraf 数据写入 ## <a class="anchor" id="taosadapter2-telegraf"></a> 使用 Bailongma 2.0 接入 Telegraf 数据写入
*注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 BLM v3,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。 *注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 taosadapter ,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。
[Telegraf](https://www.influxdata.com/time-series-platform/telegraf/)是一流行的IT运维数据采集开源工具,TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Telegraf做简单配置,无需任何代码,就可将Telegraf采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。 [Telegraf](https://www.influxdata.com/time-series-platform/telegraf/)是一流行的IT运维数据采集开源工具,TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Telegraf做简单配置,无需任何代码,就可将Telegraf采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
......
# Rust 连接器
![Crates.io](https://img.shields.io/crates/v/libtaos) ![Crates.io](https://img.shields.io/crates/d/libtaos)
> Rust 连接器仍然在快速开发中,版本API变动在所难免,在1.0 之前无法保证其向后兼容,请使用时注意版本及对应的文档。
感谢 [@songtianyi](https://github.com/songtianyi)[libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) 的贡献,使Rust社区能够使用Rust 连接[TDengine]. [libtaos-rs] 项目旨在为Rust开发者提供官方支持,使用taosc接口及HTTP接口构建兼容API以便于用户切换接口方式。
## 依赖
- [Rust](https://www.rust-lang.org/learn/get-started)
默认情况下,[libtaos-rs] 使用 C 接口连接数据库,所以您需要:
- [TDengine] [客户端](https://www.taosdata.com/cn/getting-started/#%E9%80%9A%E8%BF%87%E5%AE%89%E8%A3%85%E5%8C%85%E5%AE%89%E8%A3%85)
- `clang`: `bindgen` 使用 `libclangAST` 来生成对应的Rust绑定。
## 特性列表
- [x] C 接口的Rust绑定
- [x] 使用 `rest` feature 来启用 RESTful API.
- [x] [r2d2] 连接池支持(feature `r2d2`
- [ ] 迭代器接口
- [ ] 流式计算接口
- [ ] 订阅支持
## 构建和测试
```sh
cargo build
cargo test
```
测试使用默认用户名密码和本地连接。您可以根据具体情况设置环境变量:
- `TEST_TAOS_IP`
- `TEST_TAOS_PORT`
- `TEST_TAOS_USER`
- `TEST_TAOS_PASS`
- `TEST_TAOS_DB`
## 使用
使用默认的taosc 连接方式,可以在 `Cargo.toml` 中直接添加 `libtaos` 依赖:
```toml
[dependencies]
libtaos = "v0.3.8"
```
添加 feature `r2d2` 来启动连接池:
```toml
[dependencies]
libtaos = { version = "*", features = ["r2d2"] }
```
对于RESTful接口,可使用 `rest` 特性来替代taosc,免去安装TDengine客户端。
```toml
[dependencies]
libtaos = { version = "*", features = ["rest"] }
```
本项目中提供一个 [示例程序]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) 如下:
```rust
// ...
#[tokio::main]
async fn main() -> Result<(), Error> {
init();
let taos = taos_connect()?;
assert_eq!(
taos.query("drop database if exists demo").await.is_ok(),
true
);
assert_eq!(taos.query("create database demo").await.is_ok(), true);
assert_eq!(taos.query("use demo").await.is_ok(), true);
assert_eq!(
taos.query("create table m1 (ts timestamp, speed int)")
.await
.is_ok(),
true
);
for i in 0..10i32 {
assert_eq!(
taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
.await
.is_ok(),
true
);
}
let rows = taos.query("select * from m1").await?;
println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
for row in rows.rows {
println!("{}", row.into_iter().join(","));
}
Ok(())
}
```
您可以在 [bailongma-rs] - 一个 Rust 编写的 Prometheus 远程存储 API 适配器 - 看到如何在具体应用中使用 Rust 连接器。
[libtaos-rs]: https://github.com/taosdata/libtaos-rs
[TDengine]: https://github.com/taosdata/TDengine
[bailongma-rs]: https://github.com/taosdata/bailongma-rs
[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
## <a class="anchor" id="grafana"></a>Grafana ## <a class="anchor" id="grafana"></a>Grafana
TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine 中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现。 TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine 中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现。关于TDengine插件的使用您可以在[GitHub](https://github.com/taosdata/grafanaplugin/blob/master/README.md)中了解更多。
### 安装Grafana ### 安装Grafana
...@@ -11,19 +11,24 @@ TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/ ...@@ -11,19 +11,24 @@ TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/
### 配置Grafana ### 配置Grafana
TDengine 的 Grafana 插件在安装包的 /usr/local/taos/connector/grafanaplugin 目录下。 TDengine 的 Grafana 插件请从 <https://github.com/taosdata/grafanaplugin/releases/latest> 下载。
以 CentOS 7.2 操作系统为例,将 grafanaplugin 目录拷贝到 /var/lib/grafana/plugins 目录下,重新启动 grafana 即可。
```bash ```bash
sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine GF_VERSION=3.1.1
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
``` ```
Grafana 8.x 版本会对插件进行签名检查,因此还需要在 grafana.ini 文件中增加如下行,才能正确使用插件: 以 CentOS 7.2 操作系统为例,将插件包解压到 /var/lib/grafana/plugins 目录下,重新启动 grafana 即可。
```bash
sudo unzip tdengine-datasource-$GF_VERSION.zip -d /var/lib/grafana/plugins/
``` ```
Grafana 7.3+ / 8.x 版本会对插件进行签名检查,因此还需要在 grafana.ini 文件中增加如下行,才能正确使用插件:
```ini
[plugins] [plugins]
enable_alpha = true allow_loading_unsigned_plugins = tdengine-datasource
allow_loading_unsigned_plugins = taosdata-tdengine-datasource
``` ```
### 使用 Grafana ### 使用 Grafana
...@@ -62,7 +67,6 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource ...@@ -62,7 +67,6 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource
* ALIAS BY:可设置当前查询别名。 * ALIAS BY:可设置当前查询别名。
* GENERATE SQL: 点击该按钮会自动替换相应变量,并生成最终执行的语句。 * GENERATE SQL: 点击该按钮会自动替换相应变量,并生成最终执行的语句。
按照默认提示查询当前 TDengine 部署所在服务器指定间隔系统内存平均使用量如下: 按照默认提示查询当前 TDengine 部署所在服务器指定间隔系统内存平均使用量如下:
![img](../images/connections/create_dashboard2.jpg) ![img](../images/connections/create_dashboard2.jpg)
...@@ -71,16 +75,15 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource ...@@ -71,16 +75,15 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource
#### 导入 Dashboard #### 导入 Dashboard
在 Grafana 插件目录 /usr/local/taos/connector/grafanaplugin/dashboard 下提供了一个 `tdengine-grafana.json` 可导入的 dashboard 我们提供一个 TDengine Dashboard 可以作为 TDengine 集群的监控可视化工具使用,见 [Grafana Dashboard 15146](https://grafana.com/grafana/dashboards/15146)
点击左侧 `Import` 按钮,并上传 `tdengine-grafana.json` 文件 点击左侧 `Import` 按钮,选择 **Grafana.com Dashboard**,j将id `15146` 填入并加载
![img](../images/connections/import_dashboard1.jpg) ![img](../images/connections/import_dashboard1.jpg)
导入完成之后可看到如下效果: 导入完成之后可看到如下效果:
![img](../images/connections/import_dashboard2.jpg) ![img](../images/connections/dashboard-15146.png)
## <a class="anchor" id="matlab"></a>MATLAB ## <a class="anchor" id="matlab"></a>MATLAB
......
...@@ -729,17 +729,17 @@ Query OK, 1 row(s) in set (0.001091s) ...@@ -729,17 +729,17 @@ Query OK, 1 row(s) in set (0.001091s)
| **Operation** | **Note** | **Applicable Data Types** | | **Operation** | **Note** | **Applicable Data Types** |
| ------------- | ------------------------ | ----------------------------------------- | | ------------- | ------------------------ | ----------------------------------------- |
| > | larger than | **`timestamp`** and all numeric types | | > | larger than | all types except bool |
| < | smaller than | **`timestamp`** and all numeric types | | < | smaller than | all types except bool |
| >= | larger than or equal to | **`timestamp`** and all numeric types | | >= | larger than or equal to | all types except bool |
| <= | smaller than or equal to | **`timestamp`** and all numeric types | | <= | smaller than or equal to | all types except bool |
| = | equal to | all types | | = | equal to | all types |
| <> | not equal to | all types | | <> | not equal to | all types |
| is [not] null | is null or is not null | all types | | is [not] null | is null or is not null | all types |
| between and | within a certain range | **`timestamp`** and all numeric types | | between and | within a certain range | all types except bool |
| in | match any value in a set | all types except first column `timestamp` | | in | match any value in a set | all types except first column `timestamp` |
| like | match a wildcard string | **`binary`** **`nchar`** | | like | match a wildcard string | **`binary`** **`nchar`** |
| match/nmatch | filter regex | **regex** | | match/nmatch | filter regex | **`binary`** **`nchar`** |
1. <> 算子也可以写为 != ,请注意,这个算子不能用于数据表第一列的 timestamp 字段。 1. <> 算子也可以写为 != ,请注意,这个算子不能用于数据表第一列的 timestamp 字段。
2. like 算子使用通配符字符串进行匹配检查。 2. like 算子使用通配符字符串进行匹配检查。
...@@ -766,15 +766,10 @@ Query OK, 1 row(s) in set (0.001091s) ...@@ -766,15 +766,10 @@ Query OK, 1 row(s) in set (0.001091s)
**使用限制** **使用限制**
只能针对表名(即 tbname 筛选)和标签的名称和binary类型标签值 进行正则表达式过滤,不支持针对普通列使用正则表达式过滤。 只能针对表名(即 tbname 筛选)、binary/nchar类型标签值进行正则表达式过滤,不支持普通列的过滤。
只能在 WHERE 子句中作为过滤条件存在。
正则匹配字符串长度不能超过 128 字节。可以通过参数 *maxRegexStringLen* 设置和调整最大允许的正则匹配字符串,该参数是客户端配置参数,需要重启才能生效。 正则匹配字符串长度不能超过 128 字节。可以通过参数 *maxRegexStringLen* 设置和调整最大允许的正则匹配字符串,该参数是客户端配置参数,需要重启才能生效。
**嵌套查询支持**
可以在内层查询和外层查询中使用。<!-- REPLACE_OPEN_TO_ENTERPRISE__IN_OPERATOR_AND_UNSIGNED_INTEGER -->
<a class="anchor" id="join"></a> <a class="anchor" id="join"></a>
### JOIN 子句 ### JOIN 子句
......
...@@ -185,23 +185,23 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端 ...@@ -185,23 +185,23 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端
| TCP | 6041 | 客户端与服务端之间的 RESTful 通讯。 | 随 serverPort 端口变化。 | | TCP | 6041 | 客户端与服务端之间的 RESTful 通讯。 | 随 serverPort 端口变化。 |
| TCP | 6042 | Arbitrator 的服务端口。 | 随 Arbitrator 启动参数设置变化。 | | TCP | 6042 | Arbitrator 的服务端口。 | 随 Arbitrator 启动参数设置变化。 |
| TCP | 6043 | TaosKeeper 监控服务端口。 | 随 TaosKeeper 启动参数设置变化。 | | TCP | 6043 | TaosKeeper 监控服务端口。 | 随 TaosKeeper 启动参数设置变化。 |
| TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 | | TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6045 | 支持 collectd 数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 | | TCP | 6045 | 支持 collectd 数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6060 | 企业版内 Monitor 服务的网络端口。 | | | TCP | 6060 | 企业版内 Monitor 服务的网络端口。 | |
| UDP | 6030-6034 | 客户端与服务端之间通讯。 | 随 serverPort 端口变化。 | | UDP | 6030-6034 | 客户端与服务端之间通讯。 | 随 serverPort 端口变化。 |
| UDP | 6035-6039 | 多节点集群的节点间通讯。 | 随 serverPort 端口变化。 | | UDP | 6035-6039 | 多节点集群的节点间通讯。 | 随 serverPort 端口变化。 |
## 20. go 语言编写组件编译失败怎样解决? ## 20. go 语言编写组件编译失败怎样解决?
新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 BLM3 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。 新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 taosadapter 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 blm3 仓库代码后再编译。 使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 taosadapter 仓库代码后再编译。
目前编译方式默认自动编译 blm3。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决: 目前编译方式默认自动编译 taosadapter。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决:
```sh ```sh
go env -w GO111MODULE=on go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct go env -w GOPROXY=https://goproxy.cn,direct
``` ```
如果希望继续使用之前的内置 httpd,可以关闭 blm3 编译,使用 如果希望继续使用之前的内置 httpd,可以关闭 taosadapter 编译,使用
`cmake .. -DBUILD_HTTP=true` 使用原来内置的 httpd。 `cmake .. -DBUILD_HTTP=true` 使用原来内置的 httpd。
...@@ -30,12 +30,14 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如 ...@@ -30,12 +30,14 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
## 数据链路设置 ## 数据链路设置
### 复制 TDengine 插件到 grafana 插件目录 ### 下载 TDengine 插件到 grafana 插件目录
```
1. sudo cp -r /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine ```bash
2. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine 1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
3. echo -e "[plugins]\nallow_loading_unsigned_plugins = taosdata-tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini 2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
4. sudo systemctl restart grafana-server.service 3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
``` ```
### 修改 /etc/telegraf/telegraf.conf ### 修改 /etc/telegraf/telegraf.conf
...@@ -61,7 +63,7 @@ sudo systemctl start telegraf ...@@ -61,7 +63,7 @@ sudo systemctl start telegraf
使用 Web 浏览器访问 IP:3000 登录 Grafana 界面,系统初始用户名密码为 admin/admin。 使用 Web 浏览器访问 IP:3000 登录 Grafana 界面,系统初始用户名密码为 admin/admin。
点击左侧齿轮图标并选择 Plugins,应该可以找到 TDengine data source 插件图标。 点击左侧齿轮图标并选择 Plugins,应该可以找到 TDengine data source 插件图标。
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 文件。如果按照 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 下载 dashboard JSON 文件再导入。之后可以看到如下界面的仪表盘: 点击左侧加号图标并选择 Import,从 https://github.com/taosdata/grafanaplugin/blob/master/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 下载 dashboard JSON 文件后导入。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-telegraf-dashboard.png](../../images/IT-DevOps-Solutions-telegraf-dashboard.png) ![IT-DevOps-Solutions-telegraf-dashboard.png](../../images/IT-DevOps-Solutions-telegraf-dashboard.png)
......
...@@ -30,15 +30,17 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如 ...@@ -30,15 +30,17 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
## 数据链路设置 ## 数据链路设置
### 复制 TDengine 插件到 grafana 插件目录 ### 复制 TDengine 插件到 grafana 插件目录
```
1. sudo cp -r /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine ```bash
2. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine 1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
3. echo -e "[plugins]\nallow_loading_unsigned_plugins = taosdata-tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini 2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
4. sudo systemctl restart grafana-server.service 3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
``` ```
### 配置 collectd ### 配置 collectd
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值: 在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 Taos Adapter 配置的实际值:
``` ```
LoadPlugin network LoadPlugin network
<Plugin network> <Plugin network>
...@@ -49,7 +51,7 @@ sudo systemctl start collectd ...@@ -49,7 +51,7 @@ sudo systemctl start collectd
``` ```
### 配置 StatsD ### 配置 StatsD
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值: 在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 Taos Adapter 配置的实际值:
``` ```
backends 部分添加 "./backends/repeater" backends 部分添加 "./backends/repeater"
repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>} repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>}
...@@ -62,13 +64,13 @@ repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for S ...@@ -62,13 +64,13 @@ repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for S
#### 导入 collectd 仪表盘 #### 导入 collectd 仪表盘
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 文件。如果按照 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 下载 dashboard json 文件再导入。之后可以看到如下界面的仪表盘: 从 https://github.com/taosdata/grafanaplugin/blob/master/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 下载 dashboard json 文件,点击左侧加号图标并选择 Import,按照界面提示选择 JSON 文件导入。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-collectd-dashboard.png](../../images/IT-DevOps-Solutions-collectd-dashboard.png) ![IT-DevOps-Solutions-collectd-dashboard.png](../../images/IT-DevOps-Solutions-collectd-dashboard.png)
#### 导入 StatsD 仪表盘 #### 导入 StatsD 仪表盘
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 文件。如果安装 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 下载 dashboard json 文件再导入。之后可以看到如下界面的仪表盘: 从 https://github.com/taosdata/grafanaplugin/blob/master/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 下载 dashboard json 文件,点击左侧加号图标并选择 Import,按照界面提示导入JSON文件。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-statsd-dashboard.png](../../images/IT-DevOps-Solutions-statsd-dashboard.png) ![IT-DevOps-Solutions-statsd-dashboard.png](../../images/IT-DevOps-Solutions-statsd-dashboard.png)
## 总结 ## 总结
......
...@@ -77,6 +77,7 @@ TDengine is a highly efficient platform to store, query, and analyze time-series ...@@ -77,6 +77,7 @@ TDengine is a highly efficient platform to store, query, and analyze time-series
- [Node.js Connector](/connector#nodejs): driver for connecting to TDengine server from Node.js applications - [Node.js Connector](/connector#nodejs): driver for connecting to TDengine server from Node.js applications
- [C# Connector](/connector#csharp): driver for connecting to TDengine server from C# applications - [C# Connector](/connector#csharp): driver for connecting to TDengine server from C# applications
- [Windows Client](https://www.taosdata.com/blog/2019/07/26/514.html): compile your own Windows client, which is required by various connectors on the Windows environment - [Windows Client](https://www.taosdata.com/blog/2019/07/26/514.html): compile your own Windows client, which is required by various connectors on the Windows environment
- [Rust Connector](/connector/rust): A taosc/RESTful API based TDengine client for Rust
## [Connections with Other Tools](/connections) ## [Connections with Other Tools](/connections)
......
...@@ -154,7 +154,7 @@ The complete list of taosdemo command-line arguments can be displayed via taosde ...@@ -154,7 +154,7 @@ The complete list of taosdemo command-line arguments can be displayed via taosde
``` ```
$ taosdemo --help $ taosdemo --help
-f, --file=FILE The meta file to the execution procedure. -f, --file=FILE The meta file to the execution procedure. Currently, we support standard UTF-8 (without BOM) encoded files only.
-u, --user=USER The user name to use when connecting to the server. -u, --user=USER The user name to use when connecting to the server.
-p, --password The password to use when connecting to the server. -p, --password The password to use when connecting to the server.
-c, --config-dir=CONFIG_DIR Configuration directory. -c, --config-dir=CONFIG_DIR Configuration directory.
......
# Rust Connector
![Crates.io](https://img.shields.io/crates/v/libtaos) ![Crates.io](https://img.shields.io/crates/d/libtaos)
> Note that the rust connector is under active development and the APIs will changes a lot between versions. But we promise to ensure backward compatibility after version 1.0 .
Thanks [@songtianyi](https://github.com/songtianyi) for [libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) - a rust bindings project for [TDengine]. It's an new design for [TDengine] rust client based on C interface or the REST API. It'll will provide Rust-like APIs and all rust things (like async/stream/iterators and others).
## Dependencies
- [Rust](https://www.rust-lang.org/learn/get-started) of course.
if you use the default features, it'll depend on:
- [TDengine] Client library and headers.
- clang because bindgen will requires the clang AST library.
## Fetures
In-design features:
- [x] API for both C interface
- [x] REST API support by feature `rest`.
- [x] [r2d2] Pool support by feature `r2d2`
- [ ] Iterators for fields fetching
- [ ] Stream support
- [ ] Subscribe support
## Build and test
```sh
cargo build
cargo test
```
`test` will use default TDengine user and password on localhost (TDengine default).
Set variables if it's not default:
- `TEST_TAOS_IP`
- `TEST_TAOS_PORT`
- `TEST_TAOS_USER`
- `TEST_TAOS_PASS`
- `TEST_TAOS_DB`
## Usage
For default C-based client API, set in Cargo.toml
```toml
[dependencies]
libtaos = "v0.3.8"
```
For r2d2 support:
```toml
[dependencies]
libtaos = { version = "*", features = ["r2d2"] }
```
For REST client:
```toml
[dependencies]
libtaos = { version = "*", features = ["rest"] }
```
There's a [demo app]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) in examples directory, looks like this:
```rust
// ...
#[tokio::main]
async fn main() -> Result<(), Error> {
init();
let taos = taos_connect()?;
assert_eq!(
taos.query("drop database if exists demo").await.is_ok(),
true
);
assert_eq!(taos.query("create database demo").await.is_ok(), true);
assert_eq!(taos.query("use demo").await.is_ok(), true);
assert_eq!(
taos.query("create table m1 (ts timestamp, speed int)")
.await
.is_ok(),
true
);
for i in 0..10i32 {
assert_eq!(
taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
.await
.is_ok(),
true
);
}
let rows = taos.query("select * from m1").await?;
println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
for row in rows.rows {
println!("{}", row.into_iter().join(","));
}
Ok(())
}
```
You can check out the experimental [bailongma-rs](https://github.com/taosdata/bailongma-rs) - a TDengine adapters for prometheus written with Rust - as a more productive code example.
[libtaos-rs]: https://github.com/taosdata/libtaos-rs
[TDengine]: https://github.com/taosdata/TDengine
[bailongma-rs]: https://github.com/taosdata/bailongma-rs
[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
...@@ -12,12 +12,17 @@ https://grafana.com/grafana/download. ...@@ -12,12 +12,17 @@ https://grafana.com/grafana/download.
### Configure Grafana ### Configure Grafana
TDengine Grafana plugin is in the /usr/local/taos/connector/grafanaplugin directory. Download grafana plugin from <https://github.com/taosdata/grafanaplugin/releases/latest> .
```bash
GF_VERSION=3.1.1
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
```
Taking Centos 7.2 as an example, just copy grafanaplugin directory to /var/lib/grafana/plugins directory and restart Grafana. Taking Centos 7.2 as an example, just copy grafanaplugin directory to /var/lib/grafana/plugins directory and restart Grafana.
```bash ```bash
sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine sudo unzip tdengine-datasource-$GF_VERSION.zip /var/lib/grafana/plugins/
``` ```
### Use Grafana ### Use Grafana
...@@ -64,15 +69,15 @@ According to the default prompt, query the average system memory usage at the sp ...@@ -64,15 +69,15 @@ According to the default prompt, query the average system memory usage at the sp
#### Import Dashboard #### Import Dashboard
A `tdengine-grafana.json` importable dashboard is provided under the Grafana plug-in directory `/usr/local/taos/connector/grafanaplugin/dashboard`. We provide an example dashboard [Grafana Dashboard 15146](https://grafana.com/grafana/dashboards/15146)
Click the `Import` button on the left panel and upload the `tdengine-grafana.json` file: Click the `Import` button on the left panel and load the grafana id:
![img](../images/connections/import_dashboard1.jpg) ![img](../images/connections/import_dashboard1.jpg)
You can see as follows after Dashboard imported. You can see as follows after Dashboard imported.
![img](../images/connections/import_dashboard2.jpg) ![img](../images/connections/dashboard-15146.png)
## <a class="anchor" id="matlab"></a> MATLAB ## <a class="anchor" id="matlab"></a> MATLAB
......
...@@ -203,6 +203,9 @@ keepColumnName 1 ...@@ -203,6 +203,9 @@ keepColumnName 1
# database name must be specified in restful interface if the following parameter is set, off by default # database name must be specified in restful interface if the following parameter is set, off by default
# httpDbNameMandatory 1 # httpDbNameMandatory 1
# http keep alive, default is 30 seconds
# httpKeepAlive 30000
# The following parameter is used to limit the maximum number of lines in log files. # The following parameter is used to limit the maximum number of lines in log files.
# max number of lines per log filters # max number of lines per log filters
# numOfLogLines 10000000 # numOfLogLines 10000000
......
...@@ -128,12 +128,12 @@ function check_link() { ...@@ -128,12 +128,12 @@ function check_link() {
function check_main_path() { function check_main_path() {
#check install main dir and all sub dir #check install main dir and all sub dir
main_dir=("" "cfg" "bin" "connector" "driver" "examples" "include" "init.d") main_dir=("" "cfg" "bin" "connector" "driver" "examples" "include" "init.d")
for i in ${main_dir[@]};do for i in "${main_dir[@]}";do
check_file ${install_main_dir} $i check_file ${install_main_dir} $i
done done
if [ "$verMode" == "cluster" ]; then if [ "$verMode" == "cluster" ]; then
nginx_main_dir=("admin" "conf" "html" "sbin" "logs") nginx_main_dir=("admin" "conf" "html" "sbin" "logs")
for i in ${nginx_main_dir[@]};do for i in "${nginx_main_dir[@]}";do
check_file ${nginx_dir} $i check_file ${nginx_dir} $i
done done
fi fi
...@@ -142,12 +142,12 @@ function check_main_path() { ...@@ -142,12 +142,12 @@ function check_main_path() {
function check_bin_path() { function check_bin_path() {
# check install bin dir and all sub dir # check install bin dir and all sub dir
bin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh") bin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
for i in ${bin_dir[@]};do for i in "${bin_dir[@]}";do
check_file ${sbin_dir} $i check_file ${sbin_dir} $i
done done
lbin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core") lbin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
for i in ${lbin_dir[@]};do for i in "${lbin_dir[@]}";do
check_link ${bin_link_dir}/$i check_link ${bin_link_dir}/$i
done done
if [ "$verMode" == "cluster" ]; then if [ "$verMode" == "cluster" ]; then
...@@ -171,16 +171,17 @@ function check_lib_path() { ...@@ -171,16 +171,17 @@ function check_lib_path() {
function check_header_path() { function check_header_path() {
# check all header # check all header
header_dir=("taos.h" "taoserror.h") header_dir=("taos.h" "taoserror.h")
for i in ${header_dir[@]};do for i in "${header_dir[@]}";do
check_link ${inc_link_dir}/$i check_link ${inc_link_dir}/$i
done done
echo -e "Check bin path:\033[32mOK\033[0m!" echo -e "Check bin path:\033[32mOK\033[0m!"
} }
function check_blm3_config_dir() { function check_taosadapter_config_dir() {
# check all config # check all config
check_file ${cfg_install_dir} blm3.toml check_file ${cfg_install_dir} taosadapter.toml
check_file ${install_main_dir}/cfg blm.toml.org check_file ${cfg_install_dir} taosadapter.service
check_file ${install_main_dir}/cfg taosadapter.toml.org
echo -e "Check conf path:\033[32mOK\033[0m!" echo -e "Check conf path:\033[32mOK\033[0m!"
} }
...@@ -221,7 +222,7 @@ function test_TDengine() { ...@@ -221,7 +222,7 @@ function test_TDengine() {
check_lib_path check_lib_path
check_header_path check_header_path
check_config_dir check_config_dir
check_blm3_config_dir check_taosadapter_config_dir
check_log_path check_log_path
check_data_path check_data_path
result=`taos -s 'create database test ;create table test.tt(ts timestamp ,i int);insert into test.tt values(now,11);select * from test.tt' 2>&1 ||:` result=`taos -s 'create database test ;create table test.tt(ts timestamp ,i int);insert into test.tt values(now,11);select * from test.tt' 2>&1 ||:`
......
...@@ -11,4 +11,3 @@ Maintainer: support@taosdata.com ...@@ -11,4 +11,3 @@ Maintainer: support@taosdata.com
Provides: taosdata Provides: taosdata
Homepage: http://taosdata.com Homepage: http://taosdata.com
Description: Big Data Platform Designed and Optimized for IoT. Description: Big Data Platform Designed and Optimized for IoT.
...@@ -28,8 +28,12 @@ if [ -f "${install_main_dir}/taos.cfg" ]; then ...@@ -28,8 +28,12 @@ if [ -f "${install_main_dir}/taos.cfg" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taos.cfg || : ${csudo} rm -f ${install_main_dir}/cfg/taos.cfg || :
fi fi
if [ -f "${install_main_dir}/blm.toml" ]; then if [ -f "${install_main_dir}/taosadapter.toml" ]; then
${csudo} rm -f ${install_main_dir}/cfg/blm.toml || : ${csudo} rm -f ${install_main_dir}/cfg/taosadapter.toml || :
fi
if [ -f "${install_main_dir}/taosadapter.service" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taosadapter.service || :
fi fi
# there can not libtaos.so*, otherwise ln -s error # there can not libtaos.so*, otherwise ln -s error
......
...@@ -25,7 +25,7 @@ else ...@@ -25,7 +25,7 @@ else
# Remove all links # Remove all links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || : ${csudo} rm -f ${cfg_link_dir}/* || :
......
...@@ -44,8 +44,11 @@ mkdir -p ${pkg_dir}${install_home_path}/init.d ...@@ -44,8 +44,11 @@ mkdir -p ${pkg_dir}${install_home_path}/init.d
mkdir -p ${pkg_dir}${install_home_path}/script mkdir -p ${pkg_dir}${install_home_path}/script
cp ${compile_dir}/../packaging/cfg/taos.cfg ${pkg_dir}${install_home_path}/cfg cp ${compile_dir}/../packaging/cfg/taos.cfg ${pkg_dir}${install_home_path}/cfg
if [ -f "${compile_dir}/test/cfg/blm.toml" ]; then if [ -f "${compile_dir}/test/cfg/taosadapter.toml" ]; then
cp ${compile_dir}/test/cfg/blm.toml ${pkg_dir}${install_home_path}/cfg cp ${compile_dir}/test/cfg/taosadapter.toml ${pkg_dir}${install_home_path}/cfg
fi
if [ -f "${compile_dir}/test/cfg/taosadapter.service" ]; then
cp ${compile_dir}/test/cfg/taosadapter.service ${pkg_dir}${install_home_path}/cfg ||:
fi fi
cp ${compile_dir}/../packaging/deb/taosd ${pkg_dir}${install_home_path}/init.d cp ${compile_dir}/../packaging/deb/taosd ${pkg_dir}${install_home_path}/init.d
...@@ -59,8 +62,8 @@ cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_pat ...@@ -59,8 +62,8 @@ cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_pat
cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
if [ -f "${compile_dir}/build/bin/blm3" ]; then if [ -f "${compile_dir}/build/bin/taosadapter" ]; then
cp ${compile_dir}/build/bin/blm3 ${pkg_dir}${install_home_path}/bin ||: cp ${compile_dir}/build/bin/taosadapter ${pkg_dir}${install_home_path}/bin ||:
fi fi
cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin
...@@ -68,19 +71,24 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat ...@@ -68,19 +71,24 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
if [ -d "${top_dir}/src/connector/grafanaplugin/dist" ]; then
cp -r ${top_dir}/src/connector/grafanaplugin/dist ${pkg_dir}${install_home_path}/connector/grafanaplugin
else
echo "grafanaplugin bundled directory not found!"
exit 1
fi
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/nodejs ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/nodejs ${pkg_dir}${install_home_path}/connector
cp ${compile_dir}/build/lib/taos-jdbcdriver*.* ${pkg_dir}${install_home_path}/connector ||: cp ${compile_dir}/build/lib/taos-jdbcdriver*.* ${pkg_dir}${install_home_path}/connector ||:
install_user_local_path="/usr/local"
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
mkdir -p ${pkg_dir}${install_user_local_path}/lib
cp ${compile_dir}/build/lib/libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/
ln -sf libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/libavro.so.23
ln -sf libavro.so.23 ${pkg_dir}${install_user_local_path}/lib/libavro.so
fi
if [ -f ${compile_dir}/build/lib/libavro.a ]; then
cp ${compile_dir}/build/lib/libavro.a ${pkg_dir}${install_user_local_path}/lib/
fi
if [ -f ${compile_dir}/build/bin/jemalloc-config ]; then if [ -f ${compile_dir}/build/bin/jemalloc-config ]; then
install_user_local_path="/usr/local"
mkdir -p ${pkg_dir}${install_user_local_path}/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3} mkdir -p ${pkg_dir}${install_user_local_path}/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
cp ${compile_dir}/build/bin/jemalloc-config ${pkg_dir}${install_user_local_path}/bin/ cp ${compile_dir}/build/bin/jemalloc-config ${pkg_dir}${install_user_local_path}/bin/
if [ -f ${compile_dir}/build/bin/jemalloc.sh ]; then if [ -f ${compile_dir}/build/bin/jemalloc.sh ]; then
...@@ -120,6 +128,10 @@ chmod 755 ${pkg_dir}/DEBIAN/* ...@@ -120,6 +128,10 @@ chmod 755 ${pkg_dir}/DEBIAN/*
debver="Version: "$tdengine_ver debver="Version: "$tdengine_ver
sed -i "2c$debver" ${pkg_dir}/DEBIAN/control sed -i "2c$debver" ${pkg_dir}/DEBIAN/control
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak "s/#Depends: no/Depends: libjansson4, libsnappy1v5/g" ${pkg_dir}/DEBIAN/control
fi
#get taos version, then set deb name #get taos version, then set deb name
...@@ -151,4 +163,3 @@ cp ${pkg_dir}/*.deb ${output_dir} ...@@ -151,4 +163,3 @@ cp ${pkg_dir}/*.deb ${output_dir}
# clean tmep dir # clean tmep dir
rm -rf ${pkg_dir} rm -rf ${pkg_dir}
#!/bin/bash #!/bin/bash
# #
# Modified from original source: Elastic Search # Modified from original source: Elastic Search
# https://github.com/elasticsearch/elasticsearch # https://github.com/elasticsearch/elasticsearch
...@@ -25,7 +25,7 @@ GROUP="root" ...@@ -25,7 +25,7 @@ GROUP="root"
DAEMON="/usr/local/taos/bin/taosd" DAEMON="/usr/local/taos/bin/taosd"
DAEMON_OPTS="" DAEMON_OPTS=""
HTTPD_NAME="blm3" HTTPD_NAME="taosadapter"
DAEMON_HTTPD_NAME=$HTTPD_NAME DAEMON_HTTPD_NAME=$HTTPD_NAME
DAEMON_HTTPD="/usr/local/taos/bin/$HTTPD_NAME" DAEMON_HTTPD="/usr/local/taos/bin/$HTTPD_NAME"
......
...@@ -45,24 +45,32 @@ echo "version=${version}" ...@@ -45,24 +45,32 @@ echo "version=${version}"
#docker manifest rm tdengine/tdengine:${version} #docker manifest rm tdengine/tdengine:${version}
if [ "$verType" == "beta" ]; then if [ "$verType" == "beta" ]; then
docker manifest inspect tdengine/tdengine-beta:latest docker manifest inspect tdengine/tdengine-beta:latest
docker manifest inspect tdengine/tdengine-beta:${version}
docker manifest create -a tdengine/tdengine-beta:${version} tdengine/tdengine-amd64-beta:${version} tdengine/tdengine-aarch64-beta:${version} tdengine/tdengine-aarch32-beta:${version}
docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest
sleep 30
docker manifest rm tdengine/tdengine-beta:${version}
docker manifest rm tdengine/tdengine-beta:latest docker manifest rm tdengine/tdengine-beta:latest
docker manifest create -a tdengine/tdengine-beta:${version} tdengine/tdengine-amd64-beta:${version} tdengine/tdengine-aarch64-beta:${version} tdengine/tdengine-aarch32-beta:${version} docker manifest create -a tdengine/tdengine-beta:${version} tdengine/tdengine-amd64-beta:${version} tdengine/tdengine-aarch64-beta:${version} tdengine/tdengine-aarch32-beta:${version}
docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest
docker login -u tdengine -p ${passWord} #replace the docker registry username and password docker login -u tdengine -p ${passWord} #replace the docker registry username and password
docker manifest push tdengine/tdengine-beta:latest
docker manifest push tdengine/tdengine-beta:${version} docker manifest push tdengine/tdengine-beta:${version}
docker manifest push tdengine/tdengine-beta:latest
elif [ "$verType" == "stable" ]; then elif [ "$verType" == "stable" ]; then
docker manifest inspect tdengine/tdengine:latest docker manifest inspect tdengine/tdengine:latest
docker manifest inspect tdengine/tdengine:${version}
docker manifest create -a tdengine/tdengine:${version} tdengine/tdengine-amd64:${version} tdengine/tdengine-aarch64:${version} tdengine/tdengine-aarch32:${version}
docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
sleep 30
docker manifest rm tdengine/tdengine:latest docker manifest rm tdengine/tdengine:latest
docker manifest rm tdengine/tdengine:${version}
docker manifest inspect tdengine/tdengine:latest
docker manifest inspect tdengine/tdengine:${version}
docker manifest create -a tdengine/tdengine:${version} tdengine/tdengine-amd64:${version} tdengine/tdengine-aarch64:${version} tdengine/tdengine-aarch32:${version} docker manifest create -a tdengine/tdengine:${version} tdengine/tdengine-amd64:${version} tdengine/tdengine-aarch64:${version} tdengine/tdengine-aarch32:${version}
docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
docker login -u tdengine -p ${passWord} #replace the docker registry username and password docker login -u tdengine -p ${passWord} #replace the docker registry username and password
docker manifest push tdengine/tdengine:latest
docker manifest push tdengine/tdengine:${version} docker manifest push tdengine/tdengine:${version}
docker manifest push tdengine/tdengine:latest
else else
echo "unknow verType, nor stabel or beta" echo "unknow verType, nor stabel or beta"
exit 1 exit 1
......
...@@ -151,7 +151,7 @@ function vercomp () { ...@@ -151,7 +151,7 @@ function vercomp () {
} }
# 1. check version information # 1. check version information
if (( ! is_valid_version $verNumber ) || ( ! is_valid_version $verNumberComp ) || [[ "$(vercomp $verNumber $verNumberComp)" == '2' ]]); then if ( ( ! is_valid_version $verNumber ) || ( ! is_valid_version $verNumberComp ) || [[ "$(vercomp $verNumber $verNumberComp)" == '2' ]] ); then
echo "please enter correct version" echo "please enter correct version"
exit 0 exit 0
fi fi
...@@ -213,7 +213,7 @@ else ...@@ -213,7 +213,7 @@ else
exit 1 exit 1
fi fi
make -j8 make -j8 && ${csudo} make install
cd ${curr_dir} cd ${curr_dir}
......
...@@ -32,20 +32,20 @@ if command -v sudo > /dev/null; then ...@@ -32,20 +32,20 @@ if command -v sudo > /dev/null; then
fi fi
function cp_rpm_package() { function cp_rpm_package() {
local cur_dir local cur_dir
cd $1 cd $1
cur_dir=$(pwd) cur_dir=$(pwd)
for dirlist in $(ls ${cur_dir}); do for dirlist in "$(ls ${cur_dir})"; do
if test -d ${dirlist}; then if test -d ${dirlist}; then
cd ${dirlist} cd ${dirlist}
cp_rpm_package ${cur_dir}/${dirlist} cp_rpm_package ${cur_dir}/${dirlist}
cd .. cd ..
fi fi
if test -e ${dirlist}; then if test -e ${dirlist}; then
cp ${cur_dir}/${dirlist} ${output_dir}/TDengine-${tdengine_ver}.rpm cp ${cur_dir}/${dirlist} ${output_dir}/TDengine-${tdengine_ver}.rpm
fi fi
done done
} }
if [ -d ${pkg_dir} ]; then if [ -d ${pkg_dir} ]; then
...@@ -56,6 +56,10 @@ cd ${pkg_dir} ...@@ -56,6 +56,10 @@ cd ${pkg_dir}
${csudo} mkdir -p BUILD BUILDROOT RPMS SOURCES SPECS SRPMS ${csudo} mkdir -p BUILD BUILDROOT RPMS SOURCES SPECS SRPMS
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak 's/#Requires:/Requires: jansson snappy/g' ${spec_file}
fi
${csudo} rpmbuild --define="_version ${tdengine_ver}" --define="_topdir ${pkg_dir}" --define="_compiledir ${compile_dir}" -bb ${spec_file} ${csudo} rpmbuild --define="_version ${tdengine_ver}" --define="_topdir ${pkg_dir}" --define="_compiledir ${compile_dir}" -bb ${spec_file}
# copy rpm package to output_dir, and modify package name, then clean temp dir # copy rpm package to output_dir, and modify package name, then clean temp dir
......
...@@ -54,8 +54,11 @@ mkdir -p %{buildroot}%{homepath}/init.d ...@@ -54,8 +54,11 @@ mkdir -p %{buildroot}%{homepath}/init.d
mkdir -p %{buildroot}%{homepath}/script mkdir -p %{buildroot}%{homepath}/script
cp %{_compiledir}/../packaging/cfg/taos.cfg %{buildroot}%{homepath}/cfg cp %{_compiledir}/../packaging/cfg/taos.cfg %{buildroot}%{homepath}/cfg
if [ -f %{_compiledir}/test/cfg/blm.toml ]; then if [ -f %{_compiledir}/test/cfg/taosadapter.toml ]; then
cp %{_compiledir}/test/cfg/blm.toml %{buildroot}%{homepath}/cfg cp %{_compiledir}/test/cfg/taosadapter.toml %{buildroot}%{homepath}/cfg
fi
if [ -f %{_compiledir}/test/cfg/taosadapter.service ]; then
cp %{_compiledir}/test/cfg/taosadapter.service %{buildroot}%{homepath}/cfg
fi fi
cp %{_compiledir}/../packaging/rpm/taosd %{buildroot}%{homepath}/init.d cp %{_compiledir}/../packaging/rpm/taosd %{buildroot}%{homepath}/init.d
cp %{_compiledir}/../packaging/tools/post.sh %{buildroot}%{homepath}/script cp %{_compiledir}/../packaging/tools/post.sh %{buildroot}%{homepath}/script
...@@ -65,26 +68,28 @@ cp %{_compiledir}/../packaging/tools/set_core.sh %{buildroot}%{homepath}/bin ...@@ -65,26 +68,28 @@ cp %{_compiledir}/../packaging/tools/set_core.sh %{buildroot}%{homepath}/bin
cp %{_compiledir}/../packaging/tools/taosd-dump-cfg.gdb %{buildroot}%{homepath}/bin cp %{_compiledir}/../packaging/tools/taosd-dump-cfg.gdb %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taos %{buildroot}%{homepath}/bin cp %{_compiledir}/build/bin/taos %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin
if [ -f %{_compiledir}/build/bin/blm3 ]; then if [ -f %{_compiledir}/build/bin/taosadapter ]; then
cp %{_compiledir}/build/bin/blm3 %{buildroot}%{homepath}/bin ||: cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||:
fi fi
cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
if [ -d %{_compiledir}/../src/connector/grafanaplugin/dist ]; then
cp -r %{_compiledir}/../src/connector/grafanaplugin/dist %{buildroot}%{homepath}/connector/grafanaplugin
else
echo grafanaplugin bundled directory not found!
exit 1
fi
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/nodejs %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/nodejs %{buildroot}%{homepath}/connector
cp %{_compiledir}/build/lib/taos-jdbcdriver*.* %{buildroot}%{homepath}/connector ||: cp %{_compiledir}/build/lib/taos-jdbcdriver*.* %{buildroot}%{homepath}/connector ||:
cp -r %{_compiledir}/../tests/examples/* %{buildroot}%{homepath}/examples cp -r %{_compiledir}/../tests/examples/* %{buildroot}%{homepath}/examples
if [ -f %{_compiledir}/build/lib/libavro.so.23.0.0 ]; then
cp %{_compiledir}/build/lib/libavro.so.23.0.0 %{buildroot}%{homepath}/driver
ln -sf libavro.so.23.0.0 %{buildroot}%{homepath}/driver/libavro.so.23
ln -sf libavro.so.23 %{buildroot}%{homepath}/driver/libavro.so
fi
if [ -f %{_compiledir}/build/lib/libavro.a ]; then
cp %{_compiledir}/build/lib/libavro.a %{buildroot}%{homepath}/driver
fi
if [ -f %{_compiledir}/build/bin/jemalloc-config ]; then if [ -f %{_compiledir}/build/bin/jemalloc-config ]; then
mkdir -p %{buildroot}%{userlocalpath}/bin mkdir -p %{buildroot}%{userlocalpath}/bin
...@@ -151,14 +156,14 @@ if pidof taosd &> /dev/null; then ...@@ -151,14 +156,14 @@ if pidof taosd &> /dev/null; then
echo "Stop taosd service success!" echo "Stop taosd service success!"
sleep 1 sleep 1
fi fi
# if taos.cfg already softlink, remove it # if taos.cfg already exist, remove it
if [ -f %{cfg_install_dir}/taos.cfg ]; then if [ -f %{cfg_install_dir}/taos.cfg ]; then
${csudo} rm -f %{homepath}/cfg/taos.cfg || : ${csudo} rm -f %{cfg_install_dir}/cfg/taos.cfg || :
fi fi
# if blm.toml already softlink, remove it # if taosadapter.toml already exist, remove it
if [ -f %{cfg_install_dir}/blm.toml ]; then if [ -f %{cfg_install_dir}/taosadapter.toml ]; then
${csudo} rm -f %{homepath}/cfg/blm.toml || : ${csudo} rm -f %{cfg_install_dir}/cfg/taosadapter.toml || :
fi fi
# there can not libtaos.so*, otherwise ln -s error # there can not libtaos.so*, otherwise ln -s error
...@@ -199,7 +204,7 @@ if [ $1 -eq 0 ];then ...@@ -199,7 +204,7 @@ if [ $1 -eq 0 ];then
# Remove all links # Remove all links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || : ${csudo} rm -f ${cfg_link_dir}/* || :
......
# /bin/bash #!/bin/bash
# #
CSI=$(echo -e "\033[") CSI=$(echo -e "\033[")
CRED="${CSI}1;31m" CRED="${CSI}1;31m"
......
...@@ -185,7 +185,7 @@ function install_bin() { ...@@ -185,7 +185,7 @@ function install_bin() {
# Remove links # Remove links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || : ${csudo} rm -f ${bin_link_dir}/rmtaos || :
...@@ -197,7 +197,7 @@ function install_bin() { ...@@ -197,7 +197,7 @@ function install_bin() {
#Make link #Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || : [ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || : [ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || : [ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || : [ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || : [ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/remove.sh ] && ${csudo} ln -s ${install_main_dir}/bin/remove.sh ${bin_link_dir}/rmtaos || : [ -x ${install_main_dir}/bin/remove.sh ] && ${csudo} ln -s ${install_main_dir}/bin/remove.sh ${bin_link_dir}/rmtaos || :
...@@ -303,7 +303,7 @@ function add_newHostname_to_hosts() { ...@@ -303,7 +303,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}') iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost) arr=($iphost)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$localIp" ]]; then if [[ "$s" == "$localIp" ]]; then
return return
...@@ -358,7 +358,7 @@ function is_correct_ipaddr() { ...@@ -358,7 +358,7 @@ function is_correct_ipaddr() {
IFS=" " IFS=" "
arr=($iplist) arr=($iplist)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$newIp" ]]; then if [[ "$s" == "$newIp" ]]; then
return 0 return 0
...@@ -447,18 +447,18 @@ function local_fqdn_check() { ...@@ -447,18 +447,18 @@ function local_fqdn_check() {
fi fi
} }
function install_blm3_config() { function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir} ${csudo} mkdir -p ${cfg_install_dir}
[ -f ${script_dir}/cfg/blm.toml ] && ${csudo} cp ${script_dir}/cfg/blm.toml ${cfg_install_dir} [ -f ${script_dir}/cfg/taosadapter.toml ] && ${csudo} cp ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/blm.toml [ -f ${cfg_install_dir}/taosadapter.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi fi
[ -f ${script_dir}/cfg/blm.toml ] && [ -f ${script_dir}/cfg/taosadapter.toml ] &&
${csudo} cp -f ${script_dir}/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org ${csudo} cp -f ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}/taosadapter.toml.new
[ -f ${cfg_install_dir}/blm.toml ] && [ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml ${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${install_main_dir}/cfg/taosadapter.toml
[ ! -z $1 ] && return 0 || : # only install client [ ! -z $1 ] && return 0 || : # only install client
...@@ -473,7 +473,7 @@ function install_config() { ...@@ -473,7 +473,7 @@ function install_config() {
${csudo} chmod 644 ${cfg_install_dir}/* ${csudo} chmod 644 ${cfg_install_dir}/*
fi fi
${csudo} cp -f ${script_dir}/cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org ${csudo} cp -f ${script_dir}/cfg/taos.cfg ${cfg_install_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg ${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg
[ ! -z $1 ] && return 0 || : # only install client [ ! -z $1 ] && return 0 || : # only install client
...@@ -679,8 +679,8 @@ function install_service_on_systemd() { ...@@ -679,8 +679,8 @@ function install_service_on_systemd() {
taosd_service_config="${service_config_dir}/taosd.service" taosd_service_config="${service_config_dir}/taosd.service"
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}" ${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
...@@ -756,6 +756,11 @@ function install_service_on_systemd() { ...@@ -756,6 +756,11 @@ function install_service_on_systemd() {
fi fi
} }
function install_taosadapter_service() {
[ -f ${script_dir}/cfg/taosadapter.service ] &&\
${csudo} cp ${script_dir}/cfg/taosadapter.service ${service_config_dir}/
}
function install_service() { function install_service() {
if ((${service_mod}==0)); then if ((${service_mod}==0)); then
install_service_on_systemd install_service_on_systemd
...@@ -878,8 +883,9 @@ function update_TDengine() { ...@@ -878,8 +883,9 @@ function update_TDengine() {
if [ -z $1 ]; then if [ -z $1 ]; then
install_bin install_bin
install_service install_service
install_taosadapter_service
install_config install_config
install_blm3_config install_taosadapter_config
openresty_work=false openresty_work=false
if [ "$verMode" == "cluster" ]; then if [ "$verMode" == "cluster" ]; then
...@@ -959,6 +965,7 @@ function install_TDengine() { ...@@ -959,6 +965,7 @@ function install_TDengine() {
# For installing new # For installing new
install_bin install_bin
install_service install_service
install_taosadapter_service
openresty_work=false openresty_work=false
if [ "$verMode" == "cluster" ]; then if [ "$verMode" == "cluster" ]; then
......
...@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() { ...@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}') iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost) arr=($iphost)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$localIp" ]]; then if [[ "$s" == "$localIp" ]]; then
return return
...@@ -342,7 +342,7 @@ function is_correct_ipaddr() { ...@@ -342,7 +342,7 @@ function is_correct_ipaddr() {
IFS=" " IFS=" "
arr=($iplist) arr=($iplist)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$newIp" ]]; then if [[ "$s" == "$newIp" ]]; then
return 0 return 0
......
...@@ -278,7 +278,7 @@ function add_newHostname_to_hosts() { ...@@ -278,7 +278,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}') iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost) arr=($iphost)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$localIp" ]]; then if [[ "$s" == "$localIp" ]]; then
return return
...@@ -305,7 +305,7 @@ function set_hostname() { ...@@ -305,7 +305,7 @@ function set_hostname() {
echo "set hostname fail!" echo "set hostname fail!"
return return
fi fi
#ubuntu/centos /etc/hostname #ubuntu/centos /etc/hostname
if [[ -e /etc/hostname ]]; then if [[ -e /etc/hostname ]]; then
${csudo} echo $newHostname > /etc/hostname ||: ${csudo} echo $newHostname > /etc/hostname ||:
...@@ -330,7 +330,7 @@ function is_correct_ipaddr() { ...@@ -330,7 +330,7 @@ function is_correct_ipaddr() {
IFS=" " IFS=" "
arr=($iplist) arr=($iplist)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$newIp" ]]; then if [[ "$s" == "$newIp" ]]; then
return 0 return 0
......
...@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() { ...@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}') iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost) arr=($iphost)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$localIp" ]]; then if [[ "$s" == "$localIp" ]]; then
return return
...@@ -342,7 +342,7 @@ function is_correct_ipaddr() { ...@@ -342,7 +342,7 @@ function is_correct_ipaddr() {
IFS=" " IFS=" "
arr=($iplist) arr=($iplist)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$newIp" ]]; then if [[ "$s" == "$newIp" ]]; then
return 0 return 0
......
...@@ -114,8 +114,8 @@ if [ "$osType" != "Darwin" ]; then ...@@ -114,8 +114,8 @@ if [ "$osType" != "Darwin" ]; then
fi fi
fi fi
function kill_blm3() { function kill_taosadapter() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}') pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then if [ -n "$pid" ]; then
${csudo} kill -9 $pid || : ${csudo} kill -9 $pid || :
fi fi
...@@ -156,7 +156,7 @@ function install_bin() { ...@@ -156,7 +156,7 @@ function install_bin() {
# Remove links # Remove links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
...@@ -176,7 +176,7 @@ function install_bin() { ...@@ -176,7 +176,7 @@ function install_bin() {
#Make link #Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || : [ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || : [ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || : [ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || : [ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || : [ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/perfMonitor ] && ${csudo} ln -s ${install_main_dir}/bin/perfMonitor ${bin_link_dir}/perfMonitor || : [ -x ${install_main_dir}/bin/perfMonitor ] && ${csudo} ln -s ${install_main_dir}/bin/perfMonitor ${bin_link_dir}/perfMonitor || :
...@@ -191,7 +191,7 @@ function install_bin() { ...@@ -191,7 +191,7 @@ function install_bin() {
#Make link #Make link
[ -x ${install_main_dir}/bin/taos ] || [ -x ${install_main_2_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || ${csudo} ln -s ${install_main_2_dir}/bin/taos || : [ -x ${install_main_dir}/bin/taos ] || [ -x ${install_main_2_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || ${csudo} ln -s ${install_main_2_dir}/bin/taos || :
[ -x ${install_main_dir}/bin/taosd ] || [ -x ${install_main_2_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || ${csudo} ln -s ${install_main_2_dir}/bin/taosd || : [ -x ${install_main_dir}/bin/taosd ] || [ -x ${install_main_2_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || ${csudo} ln -s ${install_main_2_dir}/bin/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] || [ -x ${install_main_2_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || ${csudo} ln -s ${install_main_2_dir}/bin/blm3 || : [ -x ${install_main_dir}/bin/taosadapter ] || [ -x ${install_main_2_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || ${csudo} ln -s ${install_main_2_dir}/bin/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] || [ -x ${install_main_2_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || ln -s ${install_main_2_dir}/bin/taosdump ${bin_link_dir}/taosdump || : [ -x ${install_main_dir}/bin/taosdump ] || [ -x ${install_main_2_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || ln -s ${install_main_2_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] || [ -x ${install_main_2_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || ln -s ${install_main_2_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || : [ -x ${install_main_dir}/bin/taosdemo ] || [ -x ${install_main_2_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || ln -s ${install_main_2_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
fi fi
...@@ -212,7 +212,8 @@ function install_jemalloc() { ...@@ -212,7 +212,8 @@ function install_jemalloc() {
fi fi
if [ -f "${binary_dir}/build/include/jemalloc/jemalloc.h" ]; then if [ -f "${binary_dir}/build/include/jemalloc/jemalloc.h" ]; then
/usr/bin/install -c -d /usr/local/include/jemalloc /usr/bin/install -c -d /usr/local/include/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/include/jemalloc/jemalloc.h /usr/local/include/jemalloc /usr/bin/install -c -m 644 ${binary_dir}/build/include/jemalloc/jemalloc.h\
/usr/local/include/jemalloc
fi fi
if [ -f "${binary_dir}/build/lib/libjemalloc.so.2" ]; then if [ -f "${binary_dir}/build/lib/libjemalloc.so.2" ]; then
/usr/bin/install -c -d /usr/local/lib /usr/bin/install -c -d /usr/local/lib
...@@ -225,23 +226,47 @@ function install_jemalloc() { ...@@ -225,23 +226,47 @@ function install_jemalloc() {
/usr/bin/install -c -m 755 ${binary_dir}/build/lib/libjemalloc_pic.a /usr/local/lib /usr/bin/install -c -m 755 ${binary_dir}/build/lib/libjemalloc_pic.a /usr/local/lib
if [ -f "${binary_dir}/build/lib/pkgconfig/jemalloc.pc" ]; then if [ -f "${binary_dir}/build/lib/pkgconfig/jemalloc.pc" ]; then
/usr/bin/install -c -d /usr/local/lib/pkgconfig /usr/bin/install -c -d /usr/local/lib/pkgconfig
/usr/bin/install -c -m 644 ${binary_dir}/build/lib/pkgconfig/jemalloc.pc /usr/local/lib/pkgconfig /usr/bin/install -c -m 644 ${binary_dir}/build/lib/pkgconfig/jemalloc.pc\
/usr/local/lib/pkgconfig
fi
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
fi fi
fi fi
if [ -f "${binary_dir}/build/share/doc/jemalloc/jemalloc.html" ]; then if [ -f "${binary_dir}/build/share/doc/jemalloc/jemalloc.html" ]; then
/usr/bin/install -c -d /usr/local/share/doc/jemalloc /usr/bin/install -c -d /usr/local/share/doc/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/share/doc/jemalloc/jemalloc.html /usr/local/share/doc/jemalloc /usr/bin/install -c -m 644 ${binary_dir}/build/share/doc/jemalloc/jemalloc.html\
/usr/local/share/doc/jemalloc
fi fi
if [ -f "${binary_dir}/build/share/man/man3/jemalloc.3" ]; then if [ -f "${binary_dir}/build/share/man/man3/jemalloc.3" ]; then
/usr/bin/install -c -d /usr/local/share/man/man3 /usr/bin/install -c -d /usr/local/share/man/man3
/usr/bin/install -c -m 644 ${binary_dir}/build/share/man/man3/jemalloc.3 /usr/local/share/man/man3 /usr/bin/install -c -m 644 ${binary_dir}/build/share/man/man3/jemalloc.3\
/usr/local/share/man/man3
fi fi
if [ -d /etc/ld.so.conf.d ]; then fi
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf }
${csudo} ldconfig
else function install_avro() {
echo "/etc/ld.so.conf.d not found!" if [ "$osType" != "Darwin" ]; then
if [ -f "${binary_dir}/build/$1/libavro.so.23.0.0" ]; then
/usr/bin/install -c -d /usr/local/$1
/usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.so.23.0.0 /usr/local/$1
ln -sf libavro.so.23.0.0 /usr/local/$1/libavro.so.23
ln -sf libavro.so.23 /usr/local/$1/libavro.so
/usr/bin/install -c -d /usr/local/$1
[ -f ${binary_dir}/build/$1/libavro.a ] &&
/usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.a /usr/local/$1
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/$1" | ${csudo} tee /etc/ld.so.conf.d/libavro.conf
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
fi
fi fi
fi fi
} }
...@@ -292,6 +317,8 @@ function install_lib() { ...@@ -292,6 +317,8 @@ function install_lib() {
fi fi
install_jemalloc install_jemalloc
install_avro lib
install_avro lib64
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
${csudo} ldconfig ${csudo} ldconfig
...@@ -324,39 +351,33 @@ function install_config() { ...@@ -324,39 +351,33 @@ function install_config() {
[ -f ${script_dir}/../cfg/taos.cfg ] && [ -f ${script_dir}/../cfg/taos.cfg ] &&
${csudo} cp ${script_dir}/../cfg/taos.cfg ${cfg_install_dir} ${csudo} cp ${script_dir}/../cfg/taos.cfg ${cfg_install_dir}
${csudo} chmod 644 ${cfg_install_dir}/taos.cfg ${csudo} chmod 644 ${cfg_install_dir}/taos.cfg
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org ${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg/taos.cfg ${cfg_install_dir}/taos.cfg.${verNumber}
${csudo} ln -s ${cfg_install_dir}/taos.cfg \
${install_main_dir}/cfg/taos.cfg
else else
if [ "$osType" != "Darwin" ]; then ${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org ${cfg_install_dir}/taos.cfg.${verNumber}
else
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org\
|| ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_2_dir}/cfg/taos.cfg.org
fi
fi fi
} }
function install_blm3_config() { function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir} ${csudo} mkdir -p ${cfg_install_dir}
[ -f ${binary_dir}/test/cfg/blm.toml ] && [ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
${csudo} cp ${binary_dir}/test/cfg/blm.toml ${cfg_install_dir} ${csudo} cp ${binary_dir}/test/cfg/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] && [ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/blm.toml ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
[ -f ${binary_dir}/test/cfg/blm.toml ] && [ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org ${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
[ -f ${cfg_install_dir}/blm.toml ] && ${cfg_install_dir}/taosadapter.toml.${verNumber}
${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml [ -f ${cfg_install_dir}/taosadapter.toml ] && \
${csudo} ln -s ${cfg_install_dir}/taosadapter.toml \
${install_main_dir}/cfg/taosadapter.toml
else else
if [ -f "${binary_dir}/test/cfg/blm.toml" ]; then if [ -f "${binary_dir}/test/cfg/taosadapter.toml" ]; then
if [ "$osType" != "Darwin" ]; then ${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \ ${cfg_install_dir}/taosadapter.toml.${verNumber}
${install_main_dir}/cfg/blm.toml.org
else
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org \
|| ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \
${install_main_2_dir}/cfg/blm.toml.org
fi
fi fi
fi fi
} }
...@@ -381,11 +402,6 @@ function install_data() { ...@@ -381,11 +402,6 @@ function install_data() {
} }
function install_connector() { function install_connector() {
if [ -d "${source_dir}/src/connector/grafanaplugin/dist" ]; then
${csudo} cp -rf ${source_dir}/src/connector/grafanaplugin/dist ${install_main_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${source_dir}/src/connector/go -mindepth 1 -maxdepth 1 | read; then if find ${source_dir}/src/connector/go -mindepth 1 -maxdepth 1 | read; then
${csudo} cp -r ${source_dir}/src/connector/go ${install_main_dir}/connector ${csudo} cp -r ${source_dir}/src/connector/go ${install_main_dir}/connector
else else
...@@ -481,8 +497,8 @@ function install_service_on_systemd() { ...@@ -481,8 +497,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}" ${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
...@@ -503,6 +519,16 @@ function install_service_on_systemd() { ...@@ -503,6 +519,16 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd ${csudo} systemctl enable taosd
} }
function install_taosadapter_service() {
if ((${service_mod}==0)); then
[ -f ${binary_dir}/test/cfg/taosadapter.service ] &&\
${csudo} cp ${binary_dir}/test/cfg/taosadapter.service\
${service_config_dir}/ || :
else
kill_taosadapter
fi
}
function install_service() { function install_service() {
if ((${service_mod}==0)); then if ((${service_mod}==0)); then
install_service_on_systemd install_service_on_systemd
...@@ -510,7 +536,6 @@ function install_service() { ...@@ -510,7 +536,6 @@ function install_service() {
install_service_on_sysvinit install_service_on_sysvinit
else else
# must manual stop taosd # must manual stop taosd
kill_blm3
kill_taosd kill_taosd
fi fi
} }
...@@ -526,7 +551,7 @@ function update_TDengine() { ...@@ -526,7 +551,7 @@ function update_TDengine() {
elif ((${service_mod}==1)); then elif ((${service_mod}==1)); then
${csudo} service taosd stop || : ${csudo} service taosd stop || :
else else
kill_blm3 kill_taosadapter
kill_taosd kill_taosd
fi fi
sleep 1 sleep 1
...@@ -544,10 +569,11 @@ function update_TDengine() { ...@@ -544,10 +569,11 @@ function update_TDengine() {
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
install_service install_service
install_taosadapter_service
fi fi
install_config install_config
install_blm3_config install_taosadapter_config
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
echo echo
...@@ -555,7 +581,7 @@ function update_TDengine() { ...@@ -555,7 +581,7 @@ function update_TDengine() {
echo echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg" echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
echo -e "${GREEN_DARK}To configure blm3 (if has) ${NC}: edit /etc/taos/blm.toml" echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}" echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then elif ((${service_mod}==1)); then
...@@ -598,10 +624,11 @@ function install_TDengine() { ...@@ -598,10 +624,11 @@ function install_TDengine() {
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
install_service install_service
install_taosadapter_service
fi fi
install_config install_config
install_blm3_config install_taosadapter_config
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
# Ask if to start the service # Ask if to start the service
...@@ -609,7 +636,7 @@ function install_TDengine() { ...@@ -609,7 +636,7 @@ function install_TDengine() {
echo -e "\033[44;32;1mTDengine is installed successfully!${NC}" echo -e "\033[44;32;1mTDengine is installed successfully!${NC}"
echo echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg" echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
echo -e "${GREEN_DARK}To configure blm (if has) ${NC}: edit /etc/taos/blm.toml" echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}" echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then elif ((${service_mod}==1)); then
......
...@@ -150,11 +150,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then ...@@ -150,11 +150,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -210,11 +210,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then ...@@ -210,11 +210,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -172,11 +172,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then ...@@ -172,11 +172,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -177,11 +177,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then ...@@ -177,11 +177,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -35,12 +35,12 @@ fi ...@@ -35,12 +35,12 @@ fi
if [ "$pagMode" == "lite" ]; then if [ "$pagMode" == "lite" ]; then
strip ${build_dir}/bin/taosd strip ${build_dir}/bin/taosd
strip ${build_dir}/bin/taos strip ${build_dir}/bin/taos
# lite version doesn't include blm3, which will lead to no restful interface # lite version doesn't include taosadapter, which will lead to no restful interface
bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh" bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh"
else else
bin_files="${build_dir}/bin/taosd \ bin_files="${build_dir}/bin/taosd \
${build_dir}/bin/taos \ ${build_dir}/bin/taos \
${build_dir}/bin/blm3 \ ${build_dir}/bin/taosadapter \
${build_dir}/bin/taosdump \ ${build_dir}/bin/taosdump \
${build_dir}/bin/taosdemo \ ${build_dir}/bin/taosdemo \
${build_dir}/bin/tarbitrator\ ${build_dir}/bin/tarbitrator\
...@@ -78,7 +78,7 @@ mkdir -p ${install_dir} ...@@ -78,7 +78,7 @@ mkdir -p ${install_dir}
mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc
mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg
[ -f ${cfg_dir}/blm.toml ] && cp ${cfg_dir}/blm.toml ${install_dir}/cfg/blm.toml [ -f ${cfg_dir}/taosadapter.toml ] && cp ${cfg_dir}/taosadapter.toml ${install_dir}/cfg/taosadapter.toml
mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/* || : mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/* || :
mkdir -p ${install_dir}/init.d && cp ${init_file_deb} ${install_dir}/init.d/taosd.deb mkdir -p ${install_dir}/init.d && cp ${init_file_deb} ${install_dir}/init.d/taosd.deb
...@@ -195,11 +195,6 @@ connector_dir="${code_dir}/connector" ...@@ -195,11 +195,6 @@ connector_dir="${code_dir}/connector"
mkdir -p ${install_dir}/connector mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if you want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -81,7 +81,7 @@ else ...@@ -81,7 +81,7 @@ else
# bin_files="${build_dir}/bin/powerd ${build_dir}/bin/power ${build_dir}/bin/powerdemo ${build_dir}/bin/tarbitrator ${script_dir}/remove_power.sh ${script_dir}/set_core.sh" # bin_files="${build_dir}/bin/powerd ${build_dir}/bin/power ${build_dir}/bin/powerdemo ${build_dir}/bin/tarbitrator ${script_dir}/remove_power.sh ${script_dir}/set_core.sh"
cp ${build_dir}/bin/taos ${install_dir}/bin/power cp ${build_dir}/bin/taos ${install_dir}/bin/power
cp ${build_dir}/bin/taosd ${install_dir}/bin/powerd cp ${build_dir}/bin/taosd ${install_dir}/bin/powerd
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||: cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_power.sh ${install_dir}/bin cp ${script_dir}/remove_power.sh ${install_dir}/bin
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/powerdemo cp ${build_dir}/bin/taosdemo ${install_dir}/bin/powerdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/powerdump cp ${build_dir}/bin/taosdump ${install_dir}/bin/powerdump
...@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector ...@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -62,7 +62,7 @@ else ...@@ -62,7 +62,7 @@ else
fi fi
cp ${build_dir}/bin/taos ${install_dir}/bin/prodbc cp ${build_dir}/bin/taos ${install_dir}/bin/prodbc
cp ${build_dir}/bin/taosd ${install_dir}/bin/prodbs cp ${build_dir}/bin/taosd ${install_dir}/bin/prodbs
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||: cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_pro.sh ${install_dir}/bin cp ${script_dir}/remove_pro.sh ${install_dir}/bin
chmod a+x ${install_dir}/bin/* || : chmod a+x ${install_dir}/bin/* || :
...@@ -154,11 +154,6 @@ mkdir -p ${install_dir}/driver && cp ${lib_files} ${install_dir}/driver && echo ...@@ -154,11 +154,6 @@ mkdir -p ${install_dir}/driver && cp ${lib_files} ${install_dir}/driver && echo
#if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then #if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
# cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: # cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
# if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
# cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
# else
# echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
# fi
# if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then # if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
# cp -r ${connector_dir}/go ${install_dir}/connector # cp -r ${connector_dir}/go ${install_dir}/connector
# else # else
......
...@@ -82,7 +82,7 @@ else ...@@ -82,7 +82,7 @@ else
cp ${build_dir}/bin/taos ${install_dir}/bin/tq cp ${build_dir}/bin/taos ${install_dir}/bin/tq
cp ${build_dir}/bin/taosd ${install_dir}/bin/tqd cp ${build_dir}/bin/taosd ${install_dir}/bin/tqd
cp ${script_dir}/remove_tq.sh ${install_dir}/bin cp ${script_dir}/remove_tq.sh ${install_dir}/bin
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||: cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/tqdemo cp ${build_dir}/bin/taosdemo ${install_dir}/bin/tqdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/tqdump cp ${build_dir}/bin/taosdump ${install_dir}/bin/tqdump
cp ${build_dir}/bin/tarbitrator ${install_dir}/bin cp ${build_dir}/bin/tarbitrator ${install_dir}/bin
...@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector ...@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||: cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
else else
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# #
# This file is used to install tdengine rpm package on centos systems. The operating system # This file is used to install tdengine rpm package on centos systems. The operating system
# is required to use systemd to manage services at boot # is required to use systemd to manage services at boot
#set -x # set -x
iplist="" iplist=""
serverFqdn="" serverFqdn=""
...@@ -64,9 +64,9 @@ else ...@@ -64,9 +64,9 @@ else
service_mod=2 service_mod=2
fi fi
function kill_blm3() { function kill_taosadapter() {
# ${csudo} pkill -f blm3 || : # ${csudo} pkill -f taosadapter || :
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}') pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then if [ -n "$pid" ]; then
${csudo} kill -9 $pid || : ${csudo} kill -9 $pid || :
fi fi
...@@ -86,6 +86,24 @@ function install_include() { ...@@ -86,6 +86,24 @@ function install_include() {
${csudo} ln -s ${inc_dir}/taoserror.h ${inc_link_dir}/taoserror.h ${csudo} ln -s ${inc_dir}/taoserror.h ${inc_link_dir}/taoserror.h
} }
function install_avro_lib() {
${csudo} rm -f ${lib_link_dir}/libavro* || :
${csudo} rm -f ${lib64_link_dir}/libavro* || :
if [[ -f ${lib_dir}/libavro.so.23.0.0 ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23.0.0
${csudo} ln -s ${lib_link_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23
${csudo} ln -s ${lib_link_dir}/libavro.so.23 ${lib_link_dir}/libavro.so
if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libavro.so ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23.0.0 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23 ${lib64_link_dir}/libavro.so || :
fi
fi
${csudo} ldconfig
}
function install_lib() { function install_lib() {
${csudo} rm -f ${lib_link_dir}/libtaos* || : ${csudo} rm -f ${lib_link_dir}/libtaos* || :
${csudo} rm -f ${lib64_link_dir}/libtaos* || : ${csudo} rm -f ${lib64_link_dir}/libtaos* || :
...@@ -97,13 +115,15 @@ function install_lib() { ...@@ -97,13 +115,15 @@ function install_lib() {
${csudo} ln -s ${lib_dir}/libtaos.* ${lib64_link_dir}/libtaos.so.1 || : ${csudo} ln -s ${lib_dir}/libtaos.* ${lib64_link_dir}/libtaos.so.1 || :
${csudo} ln -s ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || : ${csudo} ln -s ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || :
fi fi
${csudo} ldconfig
} }
function install_bin() { function install_bin() {
# Remove links # Remove links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || : ${csudo} rm -f ${bin_link_dir}/rmtaos || :
...@@ -114,7 +134,7 @@ function install_bin() { ...@@ -114,7 +134,7 @@ function install_bin() {
#Make link #Make link
[ -x ${bin_dir}/taos ] && ${csudo} ln -s ${bin_dir}/taos ${bin_link_dir}/taos || : [ -x ${bin_dir}/taos ] && ${csudo} ln -s ${bin_dir}/taos ${bin_link_dir}/taos || :
[ -x ${bin_dir}/taosd ] && ${csudo} ln -s ${bin_dir}/taosd ${bin_link_dir}/taosd || : [ -x ${bin_dir}/taosd ] && ${csudo} ln -s ${bin_dir}/taosd ${bin_link_dir}/taosd || :
[ -x ${bin_dir}/blm3 ] && ${csudo} ln -s ${bin_dir}/blm3 ${bin_link_dir}/blm3 || : [ -x ${bin_dir}/taosadapter ] && ${csudo} ln -s ${bin_dir}/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${bin_dir}/taosdemo ] && ${csudo} ln -s ${bin_dir}/taosdemo ${bin_link_dir}/taosdemo || : [ -x ${bin_dir}/taosdemo ] && ${csudo} ln -s ${bin_dir}/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${bin_dir}/taosdump ] && ${csudo} ln -s ${bin_dir}/taosdump ${bin_link_dir}/taosdump || : [ -x ${bin_dir}/taosdump ] && ${csudo} ln -s ${bin_dir}/taosdump ${bin_link_dir}/taosdump || :
[ -x ${bin_dir}/set_core.sh ] && ${csudo} ln -s ${bin_dir}/set_core.sh ${bin_link_dir}/set_core || : [ -x ${bin_dir}/set_core.sh ] && ${csudo} ln -s ${bin_dir}/set_core.sh ${bin_link_dir}/set_core || :
...@@ -127,7 +147,7 @@ function add_newHostname_to_hosts() { ...@@ -127,7 +147,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}') iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost) arr=($iphost)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$localIp" ]]; then if [[ "$s" == "$localIp" ]]; then
return return
...@@ -182,7 +202,7 @@ function is_correct_ipaddr() { ...@@ -182,7 +202,7 @@ function is_correct_ipaddr() {
IFS=" " IFS=" "
arr=($iplist) arr=($iplist)
IFS="$OLD_IFS" IFS="$OLD_IFS"
for s in ${arr[@]} for s in "${arr[@]}"
do do
if [[ "$s" == "$newIp" ]]; then if [[ "$s" == "$newIp" ]]; then
return 0 return 0
...@@ -271,20 +291,20 @@ function local_fqdn_check() { ...@@ -271,20 +291,20 @@ function local_fqdn_check() {
fi fi
} }
function install_blm3_config() { function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
[ ! -d %{cfg_install_dir} ] && [ ! -d %{cfg_install_dir} ] &&
${csudo} ${csudo} mkdir -p ${cfg_install_dir} ${csudo} ${csudo} mkdir -p ${cfg_install_dir}
[ -f ${cfg_dir}/blm.toml ] && ${csudo} cp ${cfg_dir}/blm.toml ${cfg_install_dir} [ -f ${cfg_dir}/taosadapter.toml ] && ${csudo} cp ${cfg_dir}/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] && [ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/blm.toml ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi fi
[ -f ${cfg_dir}/blm.toml ] && [ -f ${cfg_dir}/taosadapter.toml ] &&
${csudo} mv ${cfg_dir}/blm.toml ${cfg_dir}/blm.toml.org ${csudo} mv ${cfg_dir}/taosadapter.toml ${cfg_dir}/taosadapter.toml.new
[ -f ${cfg_install_dir}/blm.toml ] && [ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} ln -s ${cfg_install_dir}/blm.toml ${cfg_dir} ${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${cfg_dir}
} }
function install_config() { function install_config() {
...@@ -302,7 +322,7 @@ function install_config() { ...@@ -302,7 +322,7 @@ function install_config() {
# restore the backup standard input, and turn off 6 # restore the backup standard input, and turn off 6
exec 0<&6 6<&- exec 0<&6 6<&-
${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.org ${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${cfg_dir} ${csudo} ln -s ${cfg_install_dir}/taos.cfg ${cfg_dir}
#FQDN_FORMAT="(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)" #FQDN_FORMAT="(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"
#FQDN_FORMAT="(:[1-6][0-9][0-9][0-9][0-9]$)" #FQDN_FORMAT="(:[1-6][0-9][0-9][0-9][0-9]$)"
...@@ -424,8 +444,8 @@ function install_service_on_systemd() { ...@@ -424,8 +444,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}" ${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}" ${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}" ${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
...@@ -446,6 +466,10 @@ function install_service_on_systemd() { ...@@ -446,6 +466,10 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd ${csudo} systemctl enable taosd
} }
function install_taosadapter_service() {
[ -f ${cfg_dir}/taosadapter.service ] && ${csudo} cp ${cfg_dir}/taosadapter.service ${service_config_dir}
}
function install_service() { function install_service() {
if ((${service_mod}==0)); then if ((${service_mod}==0)); then
install_service_on_systemd install_service_on_systemd
...@@ -453,7 +477,7 @@ function install_service() { ...@@ -453,7 +477,7 @@ function install_service() {
install_service_on_sysvinit install_service_on_sysvinit
else else
# manual start taosd # manual start taosd
kill_blm3 kill_taosadapter
kill_taosd kill_taosd
fi fi
} }
...@@ -474,10 +498,12 @@ function install_TDengine() { ...@@ -474,10 +498,12 @@ function install_TDengine() {
# Install include, lib, binary and service # Install include, lib, binary and service
install_include install_include
install_lib install_lib
install_avro_lib
install_bin install_bin
install_service
install_config install_config
install_blm3_config install_taosadapter_config
install_taosadapter_service
install_service
# Ask if to start the service # Ask if to start the service
#echo #echo
......
...@@ -43,8 +43,8 @@ else ...@@ -43,8 +43,8 @@ else
service_mod=2 service_mod=2
fi fi
function kill_blm3() { function kill_taosadapter() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}') pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then if [ -n "$pid" ]; then
${csudo} kill -9 $pid || : ${csudo} kill -9 $pid || :
fi fi
...@@ -58,6 +58,12 @@ function kill_taosd() { ...@@ -58,6 +58,12 @@ function kill_taosd() {
} }
function clean_service_on_systemd() { function clean_service_on_systemd() {
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet taosadapter; then
echo "taosadapter is running, stopping it..."
${csudo} systemctl stop taosadapter &> /dev/null || echo &> /dev/null
fi
taosd_service_config="${service_config_dir}/${taos_service_name}.service" taosd_service_config="${service_config_dir}/${taos_service_name}.service"
if systemctl is-active --quiet ${taos_service_name}; then if systemctl is-active --quiet ${taos_service_name}; then
...@@ -67,6 +73,9 @@ function clean_service_on_systemd() { ...@@ -67,6 +73,9 @@ function clean_service_on_systemd() {
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null ${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config} ${csudo} rm -f ${taosd_service_config}
[ -f ${taosadapter_service_config} ] && ${csudo} rm -f ${taosadapter_service_config}
} }
function clean_service_on_sysvinit() { function clean_service_on_sysvinit() {
...@@ -100,7 +109,7 @@ function clean_service() { ...@@ -100,7 +109,7 @@ function clean_service() {
clean_service_on_sysvinit clean_service_on_sysvinit
else else
# must manual stop taosd # must manual stop taosd
kill_blm3 kill_taosadapter
kill_taosd kill_taosd
fi fi
} }
...@@ -111,11 +120,11 @@ clean_service ...@@ -111,11 +120,11 @@ clean_service
# Remove all links # Remove all links
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/set_core || : ${csudo} rm -f ${bin_link_dir}/set_core || :
${csudo} rm -f ${cfg_link_dir}/* || : ${csudo} rm -f ${cfg_link_dir}/*.new || :
${csudo} rm -f ${inc_link_dir}/taos.h || : ${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || : ${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || : ${csudo} rm -f ${lib_link_dir}/libtaos.* || :
...@@ -125,7 +134,7 @@ ${csudo} rm -f ${log_link_dir} || : ...@@ -125,7 +134,7 @@ ${csudo} rm -f ${log_link_dir} || :
${csudo} rm -f ${data_link_dir} || : ${csudo} rm -f ${data_link_dir} || :
if ((${service_mod}==2)); then if ((${service_mod}==2)); then
kill_blm3 kill_taosadapter
kill_taosd kill_taosd
fi fi
......
...@@ -54,8 +54,8 @@ else ...@@ -54,8 +54,8 @@ else
service_mod=2 service_mod=2
fi fi
function kill_blm3() { function kill_taosadapter() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}') pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then if [ -n "$pid" ]; then
${csudo} kill -9 $pid || : ${csudo} kill -9 $pid || :
fi fi
...@@ -78,7 +78,7 @@ function clean_bin() { ...@@ -78,7 +78,7 @@ function clean_bin() {
# Remove link # Remove link
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || : ${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || : ${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || : ${csudo} rm -f ${bin_link_dir}/rmtaos || :
...@@ -111,12 +111,14 @@ function clean_log() { ...@@ -111,12 +111,14 @@ function clean_log() {
function clean_service_on_systemd() { function clean_service_on_systemd() {
taosd_service_config="${service_config_dir}/${taos_service_name}.service" taosd_service_config="${service_config_dir}/${taos_service_name}.service"
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet ${taos_service_name}; then if systemctl is-active --quiet ${taos_service_name}; then
echo "TDengine taosd is running, stopping it..." echo "TDengine taosd is running, stopping it..."
${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null ${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null
fi fi
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null ${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config} ${csudo} rm -f ${taosd_service_config}
[ -f ${taosadapter_service_config} ] && ${sudo} rm -f ${taosadapter_service_config}
tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service" tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service"
if systemctl is-active --quiet ${tarbitrator_service_name}; then if systemctl is-active --quiet ${tarbitrator_service_name}; then
...@@ -191,7 +193,7 @@ function clean_service() { ...@@ -191,7 +193,7 @@ function clean_service() {
clean_service_on_sysvinit clean_service_on_sysvinit
else else
# must manual stop taosd # must manual stop taosd
kill_blm3 kill_taosadapter
kill_taosd kill_taosd
kill_tarbitrator kill_tarbitrator
fi fi
......
...@@ -9,8 +9,8 @@ line=`grep StartLimitBurst ${taosd}` ...@@ -9,8 +9,8 @@ line=`grep StartLimitBurst ${taosd}`
num=${line##*=} num=${line##*=}
#echo "burst num: ${num}" #echo "burst num: ${num}"
startSeqFile=/usr/local/taos/.startSeq startSeqFile=/var/log/taos/.startSeq
recordFile=/usr/local/taos/.startRecord recordFile=/var/log/taos/.startRecord
startSeq=0 startSeq=0
...@@ -48,4 +48,3 @@ if [ ${coreFlag} = "unlimited" ];then ...@@ -48,4 +48,3 @@ if [ ${coreFlag} = "unlimited" ];then
fi fi
fi fi
/usr/bin/blm3 &
name: tdengine name: tdengine
base: core20 base: core20
version: '2.3.0.0' version: '2.3.1.0'
icon: snap/gui/t-dengine.svg icon: snap/gui/t-dengine.svg
summary: an open-source big data platform designed and optimized for IoT. summary: an open-source big data platform designed and optimized for IoT.
description: | description: |
......
...@@ -66,8 +66,7 @@ typedef struct { ...@@ -66,8 +66,7 @@ typedef struct {
int32_t affectedRows; int32_t affectedRows;
} SSmlLinesInfo; } SSmlLinesInfo;
char* addEscapeCharToString(char *str, int32_t len);
void addEscapeCharToString(char *str, int32_t len);
int tscSmlInsert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint, SSmlLinesInfo* info); int tscSmlInsert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint, SSmlLinesInfo* info);
bool checkDuplicateKey(char *key, SHashObj *pHash, SSmlLinesInfo* info); bool checkDuplicateKey(char *key, SHashObj *pHash, SSmlLinesInfo* info);
bool isValidInteger(char *str); bool isValidInteger(char *str);
......
...@@ -358,9 +358,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) { ...@@ -358,9 +358,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
int num_fields = taos_num_fields(pSql); int num_fields = taos_num_fields(pSql);
TAOS_FIELD *fields = taos_fetch_fields(pSql); TAOS_FIELD *fields = taos_fetch_fields(pSql);
char buf[TSDB_COL_NAME_LEN + 16];
for (int i = 0; i < num_fields; i++) { for (int i = 0; i < num_fields; i++) {
memset(buf, 0, sizeof(buf)); char *buf = calloc(1, lengths[i] + 1);
if (buf == NULL) {
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
memset(buf, 0, lengths[i] + 1);
int32_t ret = tscGetNthFieldResult(row, fields, lengths, i, buf); int32_t ret = tscGetNthFieldResult(row, fields, lengths, i, buf);
if (i == 0) { if (i == 0) {
...@@ -373,10 +377,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) { ...@@ -373,10 +377,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
} else { } else {
snprintf(result + strlen(result), TSDB_MAX_BINARY_LEN - strlen(result), "%s,", buf); snprintf(result + strlen(result), TSDB_MAX_BINARY_LEN - strlen(result), "%s,", buf);
} }
free(buf);
if (i == num_fields - 1) { if (i == num_fields - 1) {
sprintf(result + strlen(result) - 1, "%s", ")"); sprintf(result + strlen(result) - 1, "%s", ")");
} }
} }
if (0 == strlen(result)) { if (0 == strlen(result)) {
return TSDB_CODE_TSC_INVALID_TABLE_NAME; return TSDB_CODE_TSC_INVALID_TABLE_NAME;
......
...@@ -631,11 +631,11 @@ static int32_t modifyDBSchemas(TAOS* taos, SArray* stableSchemas, SSmlLinesInfo* ...@@ -631,11 +631,11 @@ static int32_t modifyDBSchemas(TAOS* taos, SArray* stableSchemas, SSmlLinesInfo*
if (code != 0) { if (code != 0) {
tscError("SML:0x%"PRIx64" reconcile point schema failed. can not create %s", info->id, pointSchema->sTableName); tscError("SML:0x%"PRIx64" reconcile point schema failed. can not create %s", info->id, pointSchema->sTableName);
return code; return code;
} else {
pointSchema->precision = dbSchema.precision;
destroySmlSTableSchema(&dbSchema);
} }
} else if (code == TSDB_CODE_SUCCESS) { }
if (code == TSDB_CODE_SUCCESS) {
pointSchema->precision = dbSchema.precision;
size_t pointTagSize = taosArrayGetSize(pointSchema->tags); size_t pointTagSize = taosArrayGetSize(pointSchema->tags);
size_t pointFieldSize = taosArrayGetSize(pointSchema->fields); size_t pointFieldSize = taosArrayGetSize(pointSchema->fields);
...@@ -1177,13 +1177,14 @@ static void escapeSpecialCharacter(uint8_t field, const char **pos) { ...@@ -1177,13 +1177,14 @@ static void escapeSpecialCharacter(uint8_t field, const char **pos) {
*pos = cur; *pos = cur;
} }
void addEscapeCharToString(char *str, int32_t len) { char* addEscapeCharToString(char *str, int32_t len) {
if (str == NULL) { if (str == NULL) {
return; return NULL;
} }
memmove(str + 1, str, len); memmove(str + 1, str, len);
str[0] = str[len + 1] = TS_ESCAPE_CHAR; str[0] = str[len + 1] = TS_ESCAPE_CHAR;
str[len + 2] = '\0'; str[len + 2] = '\0';
return str;
} }
bool isValidInteger(char *str) { bool isValidInteger(char *str) {
...@@ -1511,9 +1512,9 @@ static bool convertStrToNumber(TAOS_SML_KV *pVal, char *str, SSmlLinesInfo* info ...@@ -1511,9 +1512,9 @@ static bool convertStrToNumber(TAOS_SML_KV *pVal, char *str, SSmlLinesInfo* info
errno = 0; errno = 0;
uint8_t type = pVal->type; uint8_t type = pVal->type;
int16_t length = pVal->length; int16_t length = pVal->length;
int64_t val_s; int64_t val_s = 0;
uint64_t val_u; uint64_t val_u = 0;
double val_d; double val_d = 0.0;
strntolower_s(str, str, (int32_t)strlen(str)); strntolower_s(str, str, (int32_t)strlen(str));
if (IS_FLOAT_TYPE(type)) { if (IS_FLOAT_TYPE(type)) {
...@@ -1813,7 +1814,7 @@ static int32_t getTimeStampValue(char *value, uint16_t len, ...@@ -1813,7 +1814,7 @@ static int32_t getTimeStampValue(char *value, uint16_t len,
int32_t convertSmlTimeStamp(TAOS_SML_KV *pVal, char *value, int32_t convertSmlTimeStamp(TAOS_SML_KV *pVal, char *value,
uint16_t len, SSmlLinesInfo* info) { uint16_t len, SSmlLinesInfo* info) {
int32_t ret; int32_t ret;
SMLTimeStampType type; SMLTimeStampType type = SML_TIME_STAMP_NOW;
int64_t tsVal; int64_t tsVal;
ret = isTimeStamp(value, len, &type, info); ret = isTimeStamp(value, len, &type, info);
...@@ -1907,8 +1908,6 @@ static int32_t parseSmlKey(TAOS_SML_KV *pKV, const char **index, SHashObj *pHash ...@@ -1907,8 +1908,6 @@ static int32_t parseSmlKey(TAOS_SML_KV *pKV, const char **index, SHashObj *pHash
} }
//Escape special character //Escape special character
if (*cur == '\\') { if (*cur == '\\') {
//TODO: escape will work after column & tag
//support spcial characters
escapeSpecialCharacter(2, &cur); escapeSpecialCharacter(2, &cur);
} }
key[len] = *cur; key[len] = *cur;
...@@ -1985,6 +1984,7 @@ static int32_t parseSmlValue(TAOS_SML_KV *pKV, const char **index, ...@@ -1985,6 +1984,7 @@ static int32_t parseSmlValue(TAOS_SML_KV *pKV, const char **index,
//Escape special character //Escape special character
if (*cur == '\\') { if (*cur == '\\') {
escapeSpecialCharacter(isTag ? 2 : 3, &cur); escapeSpecialCharacter(isTag ? 2 : 3, &cur);
len++;
} }
cur++; cur++;
len++; len++;
...@@ -2107,6 +2107,13 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs, ...@@ -2107,6 +2107,13 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
pkv = *pKVs; pkv = *pKVs;
} }
size_t childTableNameLen = strlen(tsSmlChildTableName);
char childTableName[TSDB_TABLE_NAME_LEN + TS_ESCAPE_CHAR_SIZE] = {0};
if (childTableNameLen != 0) {
memcpy(childTableName, tsSmlChildTableName, childTableNameLen);
addEscapeCharToString(childTableName, (int32_t)(childTableNameLen));
}
while (*cur != '\0') { while (*cur != '\0') {
ret = parseSmlKey(pkv, &cur, pHash, info); ret = parseSmlKey(pkv, &cur, pHash, info);
if (ret) { if (ret) {
...@@ -2118,7 +2125,8 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs, ...@@ -2118,7 +2125,8 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
tscError("SML:0x%"PRIx64" Unable to parse value", info->id); tscError("SML:0x%"PRIx64" Unable to parse value", info->id);
goto error; goto error;
} }
if (!isField && (strcasecmp(pkv->key, "`ID`") == 0)) {
if (!isField && childTableNameLen != 0 && strcasecmp(pkv->key, childTableName) == 0) {
smlData->childTableName = malloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1); smlData->childTableName = malloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1);
memcpy(smlData->childTableName, pkv->value, pkv->length); memcpy(smlData->childTableName, pkv->value, pkv->length);
strntolower_s(smlData->childTableName, smlData->childTableName, (int32_t)pkv->length); strntolower_s(smlData->childTableName, smlData->childTableName, (int32_t)pkv->length);
...@@ -2405,7 +2413,7 @@ static SSqlObj* createSmlQueryObj(TAOS* taos, int32_t affected_rows, int32_t cod ...@@ -2405,7 +2413,7 @@ static SSqlObj* createSmlQueryObj(TAOS* taos, int32_t affected_rows, int32_t cod
TAOS_RES* taos_schemaless_insert(TAOS* taos, char* lines[], int numLines, int protocol, int precision) { TAOS_RES* taos_schemaless_insert(TAOS* taos, char* lines[], int numLines, int protocol, int precision) {
int code = TSDB_CODE_SUCCESS; int code = TSDB_CODE_SUCCESS;
int affected_rows = 0; int affected_rows = 0;
SMLTimeStampType tsType; SMLTimeStampType tsType = SML_TIME_STAMP_NOW;
if (protocol == TSDB_SML_LINE_PROTOCOL) { if (protocol == TSDB_SML_LINE_PROTOCOL) {
code = convertPrecisionType(precision, &tsType); code = convertPrecisionType(precision, &tsType);
......
...@@ -305,6 +305,12 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs, ...@@ -305,6 +305,12 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
*pKVs = tcalloc(capacity, sizeof(TAOS_SML_KV)); *pKVs = tcalloc(capacity, sizeof(TAOS_SML_KV));
pkv = *pKVs; pkv = *pKVs;
size_t childTableNameLen = strlen(tsSmlChildTableName);
char childTbName[TSDB_TABLE_NAME_LEN + TS_ESCAPE_CHAR_SIZE] = {0};
if (childTableNameLen != 0) {
memcpy(childTbName, tsSmlChildTableName, childTableNameLen);
addEscapeCharToString(childTbName, (int32_t)(childTableNameLen));
}
while (*cur != '\0') { while (*cur != '\0') {
ret = parseTelnetTagKey(pkv, &cur, pHash, info); ret = parseTelnetTagKey(pkv, &cur, pHash, info);
if (ret) { if (ret) {
...@@ -316,7 +322,7 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs, ...@@ -316,7 +322,7 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
tscError("OTD:0x%"PRIx64" Unable to parse value", info->id); tscError("OTD:0x%"PRIx64" Unable to parse value", info->id);
return ret; return ret;
} }
if ((strcasecmp(pkv->key, "`ID`") == 0)) { if (childTableNameLen != 0 && strcasecmp(pkv->key, childTbName) == 0) {
*childTableName = tcalloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1, 1); *childTableName = tcalloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1, 1);
memcpy(*childTableName, pkv->value, pkv->length); memcpy(*childTableName, pkv->value, pkv->length);
(*childTableName)[pkv->length] = '\0'; (*childTableName)[pkv->length] = '\0';
...@@ -892,26 +898,33 @@ static int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, ...@@ -892,26 +898,33 @@ static int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs,
if (tags == NULL || tags->type != cJSON_Object) { if (tags == NULL || tags->type != cJSON_Object) {
return TSDB_CODE_TSC_INVALID_JSON; return TSDB_CODE_TSC_INVALID_JSON;
} }
//only pick up the first ID value as child table name
cJSON *id = cJSON_GetObjectItem(tags, "ID"); //handle child table name
if (id != NULL) { size_t childTableNameLen = strlen(tsSmlChildTableName);
if (!cJSON_IsString(id)) { char childTbName[TSDB_TABLE_NAME_LEN] = {0};
tscError("OTD:0x%"PRIx64" ID must be JSON string", info->id); if (childTableNameLen != 0) {
return TSDB_CODE_TSC_INVALID_JSON; memcpy(childTbName, tsSmlChildTableName, childTableNameLen);
} cJSON *id = cJSON_GetObjectItem(tags, childTbName);
size_t idLen = strlen(id->valuestring);
*childTableName = tcalloc(idLen + TS_ESCAPE_CHAR_SIZE + 1, sizeof(char));
memcpy(*childTableName, id->valuestring, idLen);
strntolower_s(*childTableName, *childTableName, (int32_t)idLen);
addEscapeCharToString(*childTableName, (int32_t)idLen);
//check duplicate IDs
cJSON_DeleteItemFromObject(tags, "ID");
id = cJSON_GetObjectItem(tags, "ID");
if (id != NULL) { if (id != NULL) {
return TSDB_CODE_TSC_DUP_TAG_NAMES; if (!cJSON_IsString(id)) {
tscError("OTD:0x%"PRIx64" ID must be JSON string", info->id);
return TSDB_CODE_TSC_INVALID_JSON;
}
size_t idLen = strlen(id->valuestring);
*childTableName = tcalloc(idLen + TS_ESCAPE_CHAR_SIZE + 1, sizeof(char));
memcpy(*childTableName, id->valuestring, idLen);
strntolower_s(*childTableName, *childTableName, (int32_t)idLen);
addEscapeCharToString(*childTableName, (int32_t)idLen);
//check duplicate IDs
cJSON_DeleteItemFromObject(tags, childTbName);
id = cJSON_GetObjectItem(tags, childTbName);
if (id != NULL) {
return TSDB_CODE_TSC_DUP_TAG_NAMES;
}
} }
} }
int32_t tagNum = cJSON_GetArraySize(tags); int32_t tagNum = cJSON_GetArraySize(tags);
//at least one tag pair required //at least one tag pair required
if (tagNum <= 0) { if (tagNum <= 0) {
......
...@@ -2454,6 +2454,7 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col ...@@ -2454,6 +2454,7 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
const char* msg12 = "parameter is out of range [1, 100]"; const char* msg12 = "parameter is out of range [1, 100]";
const char* msg13 = "parameter list required"; const char* msg13 = "parameter list required";
const char* msg14 = "third parameter algorithm must be 'default' or 't-digest'"; const char* msg14 = "third parameter algorithm must be 'default' or 't-digest'";
const char* msg15 = "parameter is out of range [1, 1000]";
switch (functionId) { switch (functionId) {
case TSDB_FUNC_COUNT: { case TSDB_FUNC_COUNT: {
...@@ -2901,11 +2902,15 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col ...@@ -2901,11 +2902,15 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
} }
} }
} else if (functionId == TSDB_FUNC_MAVG || functionId == TSDB_FUNC_SAMPLE) { } else if (functionId == TSDB_FUNC_MAVG || functionId == TSDB_FUNC_SAMPLE) {
if (pVariant->nType != TSDB_DATA_TYPE_BIGINT) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg2);
}
tVariantDump(pVariant, val, TSDB_DATA_TYPE_BIGINT, true); tVariantDump(pVariant, val, TSDB_DATA_TYPE_BIGINT, true);
int64_t numRowsSelected = GET_INT32_VAL(val); int64_t numRowsSelected = GET_INT64_VAL(val);
if (numRowsSelected <= 0 || numRowsSelected > 1000) { if (numRowsSelected <= 0 || numRowsSelected > 1000) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg12); return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg15);
} }
// todo REFACTOR // todo REFACTOR
...@@ -5748,6 +5753,7 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq ...@@ -5748,6 +5753,7 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
const char* msg9 = "orderby column must projected in subquery"; const char* msg9 = "orderby column must projected in subquery";
const char* msg10 = "not support distinct mixed with order by"; const char* msg10 = "not support distinct mixed with order by";
const char* msg11 = "not support order with udf"; const char* msg11 = "not support order with udf";
const char* msg12 = "order by tags not supported with diff/derivative/csum/mavg";
setDefaultOrderInfo(pQueryInfo); setDefaultOrderInfo(pQueryInfo);
STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, 0); STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, 0);
...@@ -5846,6 +5852,9 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq ...@@ -5846,6 +5852,9 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
size_t s = taosArrayGetSize(pSortOrder); size_t s = taosArrayGetSize(pSortOrder);
if (s == 1) { if (s == 1) {
if (orderByTags) { if (orderByTags) {
if (tscIsDiffDerivLikeQuery(pQueryInfo)) {
return invalidOperationMsg(pMsgBuf, msg12);
}
pQueryInfo->groupbyExpr.orderIndex = index.columnIndex - tscGetNumOfColumns(pTableMetaInfo->pTableMeta); pQueryInfo->groupbyExpr.orderIndex = index.columnIndex - tscGetNumOfColumns(pTableMetaInfo->pTableMeta);
tVariantListItem* p1 = taosArrayGet(pSqlNode->pSortOrder, 0); tVariantListItem* p1 = taosArrayGet(pSqlNode->pSortOrder, 0);
......
...@@ -2955,7 +2955,8 @@ int32_t tscGetTableMetaImpl(SSqlObj* pSql, STableMetaInfo *pTableMetaInfo, bool ...@@ -2955,7 +2955,8 @@ int32_t tscGetTableMetaImpl(SSqlObj* pSql, STableMetaInfo *pTableMetaInfo, bool
// in case of child table, here only get the // in case of child table, here only get the
if (pMeta->tableType == TSDB_CHILD_TABLE) { if (pMeta->tableType == TSDB_CHILD_TABLE) {
int32_t code = tscCreateTableMetaFromSTableMeta(pSql, &pTableMetaInfo->pTableMeta, name, &pTableMetaInfo->tableMetaCapacity, (STableMeta **)(&pSTMeta)); int32_t code = tscCreateTableMetaFromSTableMeta(pSql, &pTableMetaInfo->pTableMeta, name, &pTableMetaInfo->tableMetaCapacity, (STableMeta **)(&pSTMeta));
pSql->pBuf = (void *)(pSTMeta); pSql->pBuf = (void *)(pSTMeta);
pMeta = pTableMetaInfo->pTableMeta;
if (code != TSDB_CODE_SUCCESS) { if (code != TSDB_CODE_SUCCESS) {
return getTableMetaFromMnode(pSql, pTableMetaInfo, autocreate); return getTableMetaFromMnode(pSql, pTableMetaInfo, autocreate);
} }
......
...@@ -781,6 +781,16 @@ bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col) { ...@@ -781,6 +781,16 @@ bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col) {
return isNull(((char*) pSql->res.urow[col]) + row * pInfo->field.bytes, pInfo->field.type); return isNull(((char*) pSql->res.urow[col]) + row * pInfo->field.bytes, pInfo->field.type);
} }
bool taos_is_update_query(TAOS_RES *res) {
SSqlObj *pSql = (SSqlObj *)res;
if (pSql == NULL || pSql->signature != pSql) {
return false;
}
SSqlCmd* pCmd = &pSql->cmd;
return ((pCmd->command >= TSDB_SQL_INSERT && pCmd->command <= TSDB_SQL_DROP_DNODE) || TSDB_SQL_RESET_CACHE == pCmd->command || TSDB_SQL_USE_DB == pCmd->command);
}
int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields) { int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields) {
int len = 0; int len = 0;
...@@ -909,7 +919,6 @@ int taos_validate_sql(TAOS *taos, const char *sql) { ...@@ -909,7 +919,6 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
strtolower(pSql->sqlstr, sql); strtolower(pSql->sqlstr, sql);
// pCmd->curSql = NULL;
if (NULL != pCmd->insertParam.pTableBlockHashList) { if (NULL != pCmd->insertParam.pTableBlockHashList) {
taosHashCleanup(pCmd->insertParam.pTableBlockHashList); taosHashCleanup(pCmd->insertParam.pTableBlockHashList);
pCmd->insertParam.pTableBlockHashList = NULL; pCmd->insertParam.pTableBlockHashList = NULL;
...@@ -934,6 +943,17 @@ int taos_validate_sql(TAOS *taos, const char *sql) { ...@@ -934,6 +943,17 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
return code; return code;
} }
void taos_reset_current_db(TAOS *taos) {
STscObj* pObj = (STscObj*) taos;
if (pObj == NULL || pObj->signature != pObj) {
return;
}
pthread_mutex_lock(&pObj->mutex);
memset(pObj->db, 0, tListLen(pObj->db));
pthread_mutex_unlock(&pObj->mutex);
}
void loadMultiTableMetaCallback(void *param, TAOS_RES *res, int code) { void loadMultiTableMetaCallback(void *param, TAOS_RES *res, int code) {
SSqlObj* pSql = (SSqlObj*)taosAcquireRef(tscObjRef, (int64_t)param); SSqlObj* pSql = (SSqlObj*)taosAcquireRef(tscObjRef, (int64_t)param);
if (pSql == NULL) { if (pSql == NULL) {
......
...@@ -46,7 +46,7 @@ extern int64_t tsDnodeStartTime; ...@@ -46,7 +46,7 @@ extern int64_t tsDnodeStartTime;
// common // common
extern int tsRpcTimer; extern int tsRpcTimer;
extern int tsRpcMaxTime; extern int tsRpcMaxTime;
extern int tsRpcForceTcp; // all commands go to tcp protocol if this is enabled extern int tsRpcForceTcp; // all commands go to tcp protocol if this is enabled
extern int32_t tsMaxConnections; extern int32_t tsMaxConnections;
extern int32_t tsMaxShellConns; extern int32_t tsMaxShellConns;
extern int32_t tsShellActivityTimer; extern int32_t tsShellActivityTimer;
...@@ -57,19 +57,20 @@ extern float tsRatioOfQueryCores; ...@@ -57,19 +57,20 @@ extern float tsRatioOfQueryCores;
extern int8_t tsDaylight; extern int8_t tsDaylight;
extern char tsTimezone[]; extern char tsTimezone[];
extern char tsLocale[]; extern char tsLocale[];
extern char tsCharset[]; // default encode string extern char tsCharset[]; // default encode string
extern int8_t tsEnableCoreFile; extern int8_t tsEnableCoreFile;
extern int32_t tsCompressMsgSize; extern int32_t tsCompressMsgSize;
extern int32_t tsCompressColData; extern int32_t tsCompressColData;
extern int32_t tsMaxNumOfDistinctResults; extern int32_t tsMaxNumOfDistinctResults;
extern char tsTempDir[]; extern char tsTempDir[];
//query buffer management // query buffer management
extern int32_t tsQueryBufferSize; // maximum allowed usage buffer size in MB for each data node during query processing extern int32_t tsQueryBufferSize; // maximum allowed usage buffer size in MB for each data node during query processing
extern int64_t tsQueryBufferSizeBytes; // maximum allowed usage buffer size in byte for each data node during query processing extern int64_t
extern int32_t tsRetrieveBlockingModel;// retrieve threads will be blocked tsQueryBufferSizeBytes; // maximum allowed usage buffer size in byte for each data node during query processing
extern int32_t tsRetrieveBlockingModel; // retrieve threads will be blocked
extern int8_t tsKeepOriginalColumnName; extern int8_t tsKeepOriginalColumnName;
// client // client
extern int32_t tsMaxSQLStringLen; extern int32_t tsMaxSQLStringLen;
...@@ -108,7 +109,7 @@ extern int32_t tsQuorum; ...@@ -108,7 +109,7 @@ extern int32_t tsQuorum;
extern int8_t tsUpdate; extern int8_t tsUpdate;
extern int8_t tsCacheLastRow; extern int8_t tsCacheLastRow;
//tsdb // tsdb
extern bool tsdbForceKeepFile; extern bool tsdbForceKeepFile;
extern bool tsdbForceCompactFile; extern bool tsdbForceCompactFile;
extern int32_t tsdbWalFlushSize; extern int32_t tsdbWalFlushSize;
...@@ -134,6 +135,7 @@ extern int8_t tsHttpEnableCompress; ...@@ -134,6 +135,7 @@ extern int8_t tsHttpEnableCompress;
extern int8_t tsHttpEnableRecordSql; extern int8_t tsHttpEnableRecordSql;
extern int8_t tsTelegrafUseFieldNum; extern int8_t tsTelegrafUseFieldNum;
extern int8_t tsHttpDbNameMandatory; extern int8_t tsHttpDbNameMandatory;
extern int32_t tsHttpKeepAlive;
// mqtt // mqtt
extern int8_t tsEnableMqttModule; extern int8_t tsEnableMqttModule;
...@@ -170,22 +172,22 @@ extern int64_t tsTickPerDay[3]; ...@@ -170,22 +172,22 @@ extern int64_t tsTickPerDay[3];
extern int32_t tsTopicBianryLen; extern int32_t tsTopicBianryLen;
// system info // system info
extern char tsOsName[]; extern char tsOsName[];
extern int64_t tsPageSize; extern int64_t tsPageSize;
extern int64_t tsOpenMax; extern int64_t tsOpenMax;
extern int64_t tsStreamMax; extern int64_t tsStreamMax;
extern int32_t tsNumOfCores; extern int32_t tsNumOfCores;
extern float tsTotalLogDirGB; extern float tsTotalLogDirGB;
extern float tsTotalTmpDirGB; extern float tsTotalTmpDirGB;
extern float tsTotalDataDirGB; extern float tsTotalDataDirGB;
extern float tsAvailLogDirGB; extern float tsAvailLogDirGB;
extern float tsAvailTmpDirectorySpace; extern float tsAvailTmpDirectorySpace;
extern float tsAvailDataDirGB; extern float tsAvailDataDirGB;
extern float tsUsedDataDirGB; extern float tsUsedDataDirGB;
extern float tsMinimalLogDirGB; extern float tsMinimalLogDirGB;
extern float tsReservedTmpDirectorySpace; extern float tsReservedTmpDirectorySpace;
extern float tsMinimalDataDirGB; extern float tsMinimalDataDirGB;
extern int32_t tsTotalMemoryMB; extern int32_t tsTotalMemoryMB;
extern uint32_t tsVersion; extern uint32_t tsVersion;
// build info // build info
...@@ -196,43 +198,44 @@ extern char gitinfoOfInternal[]; ...@@ -196,43 +198,44 @@ extern char gitinfoOfInternal[];
extern char buildinfo[]; extern char buildinfo[];
// log // log
extern int8_t tsAsyncLog; extern int8_t tsAsyncLog;
extern int32_t tsNumOfLogLines; extern int32_t tsNumOfLogLines;
extern int32_t tsLogKeepDays; extern int32_t tsLogKeepDays;
extern int32_t dDebugFlag; extern int32_t dDebugFlag;
extern int32_t vDebugFlag; extern int32_t vDebugFlag;
extern int32_t mDebugFlag; extern int32_t mDebugFlag;
extern uint32_t cDebugFlag; extern uint32_t cDebugFlag;
extern int32_t jniDebugFlag; extern int32_t jniDebugFlag;
extern int32_t tmrDebugFlag; extern int32_t tmrDebugFlag;
extern int32_t sdbDebugFlag; extern int32_t sdbDebugFlag;
extern int32_t httpDebugFlag; extern int32_t httpDebugFlag;
extern int32_t mqttDebugFlag; extern int32_t mqttDebugFlag;
extern int32_t monDebugFlag; extern int32_t monDebugFlag;
extern int32_t uDebugFlag; extern int32_t uDebugFlag;
extern int32_t rpcDebugFlag; extern int32_t rpcDebugFlag;
extern int32_t odbcDebugFlag; extern int32_t odbcDebugFlag;
extern uint32_t qDebugFlag; extern uint32_t qDebugFlag;
extern int32_t wDebugFlag; extern int32_t wDebugFlag;
extern int32_t cqDebugFlag; extern int32_t cqDebugFlag;
extern int32_t debugFlag; extern int32_t debugFlag;
extern int8_t tsClientMerge; extern int8_t tsClientMerge;
#ifdef TD_TSZ #ifdef TD_TSZ
// lossy // lossy
extern char lossyColumns[]; extern char lossyColumns[];
extern double fPrecision; extern double fPrecision;
extern double dPrecision; extern double dPrecision;
extern uint32_t maxRange; extern uint32_t maxRange;
extern uint32_t curRange; extern uint32_t curRange;
extern char Compressor[]; extern char Compressor[];
#endif #endif
// long query // long query
extern int8_t tsDeadLockKillQuery; extern int8_t tsDeadLockKillQuery;
// schemaless // schemaless
extern char tsDefaultJSONStrType[]; extern char tsDefaultJSONStrType[];
extern char tsSmlChildTableName[];
typedef struct { typedef struct {
......
...@@ -14,18 +14,18 @@ ...@@ -14,18 +14,18 @@
*/ */
#define _DEFAULT_SOURCE #define _DEFAULT_SOURCE
#include "tglobal.h"
#include "monitor.h"
#include "os.h" #include "os.h"
#include "taosdef.h" #include "taosdef.h"
#include "taoserror.h" #include "taoserror.h"
#include "tulog.h" #include "tcompare.h"
#include "tconfig.h" #include "tconfig.h"
#include "tglobal.h"
#include "monitor.h"
#include "tsocket.h"
#include "tutil.h"
#include "tlocale.h" #include "tlocale.h"
#include "tsocket.h"
#include "ttimezone.h" #include "ttimezone.h"
#include "tcompare.h" #include "tulog.h"
#include "tutil.h"
// cluster // cluster
char tsFirst[TSDB_EP_LEN] = {0}; char tsFirst[TSDB_EP_LEN] = {0};
...@@ -49,16 +49,16 @@ int32_t tsDnodeId = 0; ...@@ -49,16 +49,16 @@ int32_t tsDnodeId = 0;
int64_t tsDnodeStartTime = 0; int64_t tsDnodeStartTime = 0;
// common // common
int32_t tsRpcTimer = 300; int32_t tsRpcTimer = 300;
int32_t tsRpcMaxTime = 600; // seconds; int32_t tsRpcMaxTime = 600; // seconds;
int32_t tsRpcForceTcp = 0; //disable this, means query, show command use udp protocol as default int32_t tsRpcForceTcp = 0; // disable this, means query, show command use udp protocol as default
int32_t tsMaxShellConns = 50000; int32_t tsMaxShellConns = 50000;
int32_t tsMaxConnections = 5000; int32_t tsMaxConnections = 5000;
int32_t tsShellActivityTimer = 3; // second int32_t tsShellActivityTimer = 3; // second
float tsNumOfThreadsPerCore = 1.0f; float tsNumOfThreadsPerCore = 1.0f;
int32_t tsNumOfCommitThreads = 4; int32_t tsNumOfCommitThreads = 4;
float tsRatioOfQueryCores = 1.0f; float tsRatioOfQueryCores = 1.0f;
int8_t tsDaylight = 0; int8_t tsDaylight = 0;
char tsTimezone[TSDB_TIMEZONE_LEN] = {0}; char tsTimezone[TSDB_TIMEZONE_LEN] = {0};
char tsLocale[TSDB_LOCALE_LEN] = {0}; char tsLocale[TSDB_LOCALE_LEN] = {0};
char tsCharset[TSDB_LOCALE_LEN] = {0}; // default encode string char tsCharset[TSDB_LOCALE_LEN] = {0}; // default encode string
...@@ -87,7 +87,7 @@ int32_t tsMaxSQLStringLen = TSDB_MAX_ALLOWED_SQL_LEN; ...@@ -87,7 +87,7 @@ int32_t tsMaxSQLStringLen = TSDB_MAX_ALLOWED_SQL_LEN;
int32_t tsMaxWildCardsLen = TSDB_PATTERN_STRING_DEFAULT_LEN; int32_t tsMaxWildCardsLen = TSDB_PATTERN_STRING_DEFAULT_LEN;
int32_t tsMaxRegexStringLen = TSDB_REGEX_STRING_DEFAULT_LEN; int32_t tsMaxRegexStringLen = TSDB_REGEX_STRING_DEFAULT_LEN;
int8_t tsTscEnableRecordSql = 0; int8_t tsTscEnableRecordSql = 0;
// the maximum number of results for projection query on super table that are returned from // the maximum number of results for projection query on super table that are returned from
// one virtual node, to order according to timestamp // one virtual node, to order according to timestamp
...@@ -97,7 +97,7 @@ int32_t tsMaxNumOfOrderedResults = 1000000; ...@@ -97,7 +97,7 @@ int32_t tsMaxNumOfOrderedResults = 1000000;
int32_t tsMinSlidingTime = 10; int32_t tsMinSlidingTime = 10;
// the maxinum number of distict query result // the maxinum number of distict query result
int32_t tsMaxNumOfDistinctResults = 1000 * 10000; int32_t tsMaxNumOfDistinctResults = 1000 * 10000;
// 1 us for interval time range, changed accordingly // 1 us for interval time range, changed accordingly
int32_t tsMinIntervalTime = 1; int32_t tsMinIntervalTime = 1;
...@@ -109,7 +109,7 @@ int32_t tsMaxStreamComputDelay = 20000; ...@@ -109,7 +109,7 @@ int32_t tsMaxStreamComputDelay = 20000;
int32_t tsStreamCompStartDelay = 10000; int32_t tsStreamCompStartDelay = 10000;
// the stream computing delay time after executing failed, change accordingly // the stream computing delay time after executing failed, change accordingly
int32_t tsRetryStreamCompDelay = 10*1000; int32_t tsRetryStreamCompDelay = 10 * 1000;
// The delayed computing ration. 10% of the whole computing time window by default. // The delayed computing ration. 10% of the whole computing time window by default.
float tsStreamComputDelayRatio = 0.1f; float tsStreamComputDelayRatio = 0.1f;
...@@ -128,41 +128,41 @@ int64_t tsQueryBufferSizeBytes = -1; ...@@ -128,41 +128,41 @@ int64_t tsQueryBufferSizeBytes = -1;
int32_t tsRetrieveBlockingModel = 0; int32_t tsRetrieveBlockingModel = 0;
// last_row(*), first(*), last_row(ts, col1, col2) query, the result fields will be the original column name // last_row(*), first(*), last_row(ts, col1, col2) query, the result fields will be the original column name
int8_t tsKeepOriginalColumnName = 0; int8_t tsKeepOriginalColumnName = 0;
// db parameters // db parameters
int32_t tsCacheBlockSize = TSDB_DEFAULT_CACHE_BLOCK_SIZE; int32_t tsCacheBlockSize = TSDB_DEFAULT_CACHE_BLOCK_SIZE;
int32_t tsBlocksPerVnode = TSDB_DEFAULT_TOTAL_BLOCKS; int32_t tsBlocksPerVnode = TSDB_DEFAULT_TOTAL_BLOCKS;
int16_t tsDaysPerFile = TSDB_DEFAULT_DAYS_PER_FILE; int16_t tsDaysPerFile = TSDB_DEFAULT_DAYS_PER_FILE;
int32_t tsDaysToKeep = TSDB_DEFAULT_KEEP; int32_t tsDaysToKeep = TSDB_DEFAULT_KEEP;
int32_t tsMinRowsInFileBlock = TSDB_DEFAULT_MIN_ROW_FBLOCK; int32_t tsMinRowsInFileBlock = TSDB_DEFAULT_MIN_ROW_FBLOCK;
int32_t tsMaxRowsInFileBlock = TSDB_DEFAULT_MAX_ROW_FBLOCK; int32_t tsMaxRowsInFileBlock = TSDB_DEFAULT_MAX_ROW_FBLOCK;
int16_t tsCommitTime = TSDB_DEFAULT_COMMIT_TIME; // seconds int16_t tsCommitTime = TSDB_DEFAULT_COMMIT_TIME; // seconds
int32_t tsTimePrecision = TSDB_DEFAULT_PRECISION; int32_t tsTimePrecision = TSDB_DEFAULT_PRECISION;
int8_t tsCompression = TSDB_DEFAULT_COMP_LEVEL; int8_t tsCompression = TSDB_DEFAULT_COMP_LEVEL;
int8_t tsWAL = TSDB_DEFAULT_WAL_LEVEL; int8_t tsWAL = TSDB_DEFAULT_WAL_LEVEL;
int32_t tsFsyncPeriod = TSDB_DEFAULT_FSYNC_PERIOD; int32_t tsFsyncPeriod = TSDB_DEFAULT_FSYNC_PERIOD;
int32_t tsReplications = TSDB_DEFAULT_DB_REPLICA_OPTION; int32_t tsReplications = TSDB_DEFAULT_DB_REPLICA_OPTION;
int32_t tsQuorum = TSDB_DEFAULT_DB_QUORUM_OPTION; int32_t tsQuorum = TSDB_DEFAULT_DB_QUORUM_OPTION;
int16_t tsPartitons = TSDB_DEFAULT_DB_PARTITON_OPTION; int16_t tsPartitons = TSDB_DEFAULT_DB_PARTITON_OPTION;
int8_t tsUpdate = TSDB_DEFAULT_DB_UPDATE_OPTION; int8_t tsUpdate = TSDB_DEFAULT_DB_UPDATE_OPTION;
int8_t tsCacheLastRow = TSDB_DEFAULT_CACHE_LAST_ROW; int8_t tsCacheLastRow = TSDB_DEFAULT_CACHE_LAST_ROW;
int32_t tsMaxVgroupsPerDb = 0; int32_t tsMaxVgroupsPerDb = 0;
int32_t tsMinTablePerVnode = TSDB_TABLES_STEP; int32_t tsMinTablePerVnode = TSDB_TABLES_STEP;
int32_t tsMaxTablePerVnode = TSDB_DEFAULT_TABLES; int32_t tsMaxTablePerVnode = TSDB_DEFAULT_TABLES;
int32_t tsTableIncStepPerVnode = TSDB_TABLES_STEP; int32_t tsTableIncStepPerVnode = TSDB_TABLES_STEP;
int32_t tsTsdbMetaCompactRatio = TSDB_META_COMPACT_RATIO; int32_t tsTsdbMetaCompactRatio = TSDB_META_COMPACT_RATIO;
// tsdb config // tsdb config
// For backward compatibility // For backward compatibility
bool tsdbForceKeepFile = false; bool tsdbForceKeepFile = false;
bool tsdbForceCompactFile = false; // compact TSDB fileset forcibly bool tsdbForceCompactFile = false; // compact TSDB fileset forcibly
int32_t tsdbWalFlushSize = TSDB_DEFAULT_WAL_FLUSH_SIZE; // MB int32_t tsdbWalFlushSize = TSDB_DEFAULT_WAL_FLUSH_SIZE; // MB
// balance // balance
int8_t tsEnableBalance = 1; int8_t tsEnableBalance = 1;
int8_t tsAlternativeRole = 0; int8_t tsAlternativeRole = 0;
int32_t tsBalanceInterval = 300; // seconds int32_t tsBalanceInterval = 300; // seconds
int32_t tsOfflineThreshold = 86400 * 10; // seconds of 10 days int32_t tsOfflineThreshold = 86400 * 10; // seconds of 10 days
int32_t tsMnodeEqualVnodeNum = 4; int32_t tsMnodeEqualVnodeNum = 4;
int8_t tsEnableFlowCtrl = 1; int8_t tsEnableFlowCtrl = 1;
...@@ -180,15 +180,16 @@ int8_t tsHttpEnableCompress = 1; ...@@ -180,15 +180,16 @@ int8_t tsHttpEnableCompress = 1;
int8_t tsHttpEnableRecordSql = 0; int8_t tsHttpEnableRecordSql = 0;
int8_t tsTelegrafUseFieldNum = 0; int8_t tsTelegrafUseFieldNum = 0;
int8_t tsHttpDbNameMandatory = 0; int8_t tsHttpDbNameMandatory = 0;
int32_t tsHttpKeepAlive = 30000;
// mqtt // mqtt
int8_t tsEnableMqttModule = 0; // not finished yet, not started it by default int8_t tsEnableMqttModule = 0; // not finished yet, not started it by default
char tsMqttHostName[TSDB_MQTT_HOSTNAME_LEN] = "test.mosquitto.org"; char tsMqttHostName[TSDB_MQTT_HOSTNAME_LEN] = "test.mosquitto.org";
char tsMqttPort[TSDB_MQTT_PORT_LEN] = "1883"; char tsMqttPort[TSDB_MQTT_PORT_LEN] = "1883";
char tsMqttUser[TSDB_MQTT_USER_LEN] = {0}; char tsMqttUser[TSDB_MQTT_USER_LEN] = {0};
char tsMqttPass[TSDB_MQTT_PASS_LEN] = {0}; char tsMqttPass[TSDB_MQTT_PASS_LEN] = {0};
char tsMqttClientId[TSDB_MQTT_CLIENT_ID_LEN] = "TDengineMqttSubscriber"; char tsMqttClientId[TSDB_MQTT_CLIENT_ID_LEN] = "TDengineMqttSubscriber";
char tsMqttTopic[TSDB_MQTT_TOPIC_LEN] = "/test"; // # char tsMqttTopic[TSDB_MQTT_TOPIC_LEN] = "/test"; // #
// monitor // monitor
int8_t tsEnableMonitorModule = 1; int8_t tsEnableMonitorModule = 1;
...@@ -197,7 +198,7 @@ char tsInternalPass[] = "secretkey"; ...@@ -197,7 +198,7 @@ char tsInternalPass[] = "secretkey";
int32_t tsMonitorInterval = 30; // seconds int32_t tsMonitorInterval = 30; // seconds
// stream // stream
int8_t tsEnableStream = 1; int8_t tsEnableStream = 1;
// internal // internal
int8_t tsCompactMnodeWal = 0; int8_t tsCompactMnodeWal = 0;
...@@ -213,7 +214,7 @@ char tsDataDir[PATH_MAX] = {0}; ...@@ -213,7 +214,7 @@ char tsDataDir[PATH_MAX] = {0};
char tsScriptDir[PATH_MAX] = {0}; char tsScriptDir[PATH_MAX] = {0};
char tsTempDir[PATH_MAX] = "/tmp/"; char tsTempDir[PATH_MAX] = "/tmp/";
int32_t tsDiskCfgNum = 0; int32_t tsDiskCfgNum = 0;
int32_t tsTopicBianryLen = 16000; int32_t tsTopicBianryLen = 16000;
#ifndef _STORAGE #ifndef _STORAGE
...@@ -231,42 +232,42 @@ SDiskCfg tsDiskCfg[TSDB_MAX_DISKS]; ...@@ -231,42 +232,42 @@ SDiskCfg tsDiskCfg[TSDB_MAX_DISKS];
int64_t tsTickPerDay[] = {86400000L, 86400000000L, 86400000000000L}; int64_t tsTickPerDay[] = {86400000L, 86400000000L, 86400000000000L};
// system info // system info
char tsOsName[10] = "Linux"; char tsOsName[10] = "Linux";
int64_t tsPageSize; int64_t tsPageSize;
int64_t tsOpenMax; int64_t tsOpenMax;
int64_t tsStreamMax; int64_t tsStreamMax;
int32_t tsNumOfCores = 1; int32_t tsNumOfCores = 1;
float tsTotalTmpDirGB = 0; float tsTotalTmpDirGB = 0;
float tsTotalDataDirGB = 0; float tsTotalDataDirGB = 0;
float tsAvailTmpDirectorySpace = 0; float tsAvailTmpDirectorySpace = 0;
float tsAvailDataDirGB = 0; float tsAvailDataDirGB = 0;
float tsUsedDataDirGB = 0; float tsUsedDataDirGB = 0;
float tsReservedTmpDirectorySpace = 1.0f; float tsReservedTmpDirectorySpace = 1.0f;
float tsMinimalDataDirGB = 2.0f; float tsMinimalDataDirGB = 2.0f;
int32_t tsTotalMemoryMB = 0; int32_t tsTotalMemoryMB = 0;
uint32_t tsVersion = 0; uint32_t tsVersion = 0;
// log // log
int32_t tsNumOfLogLines = 10000000; int32_t tsNumOfLogLines = 10000000;
int32_t mDebugFlag = 131; int32_t mDebugFlag = 131;
int32_t sdbDebugFlag = 131; int32_t sdbDebugFlag = 131;
int32_t dDebugFlag = 135; int32_t dDebugFlag = 135;
int32_t vDebugFlag = 135; int32_t vDebugFlag = 135;
uint32_t cDebugFlag = 131; uint32_t cDebugFlag = 131;
int32_t jniDebugFlag = 131; int32_t jniDebugFlag = 131;
int32_t odbcDebugFlag = 131; int32_t odbcDebugFlag = 131;
int32_t httpDebugFlag = 131; int32_t httpDebugFlag = 131;
int32_t mqttDebugFlag = 131; int32_t mqttDebugFlag = 131;
int32_t monDebugFlag = 131; int32_t monDebugFlag = 131;
uint32_t qDebugFlag = 131; uint32_t qDebugFlag = 131;
int32_t rpcDebugFlag = 131; int32_t rpcDebugFlag = 131;
int32_t uDebugFlag = 131; int32_t uDebugFlag = 131;
int32_t debugFlag = 0; int32_t debugFlag = 0;
int32_t sDebugFlag = 135; int32_t sDebugFlag = 135;
int32_t wDebugFlag = 135; int32_t wDebugFlag = 135;
int32_t tsdbDebugFlag = 131; int32_t tsdbDebugFlag = 131;
int32_t cqDebugFlag = 131; int32_t cqDebugFlag = 131;
int32_t fsDebugFlag = 135; int32_t fsDebugFlag = 135;
int8_t tsClientMerge = 0; int8_t tsClientMerge = 0;
...@@ -274,13 +275,14 @@ int8_t tsClientMerge = 0; ...@@ -274,13 +275,14 @@ int8_t tsClientMerge = 0;
// //
// lossy compress 6 // lossy compress 6
// //
char lossyColumns[32] = ""; // "float|double" means all float and double columns can be lossy compressed. set empty can close lossy compress. char lossyColumns[32] = ""; // "float|double" means all float and double columns can be lossy compressed. set empty
// below option can take effect when tsLossyColumns not empty // can close lossy compress.
double fPrecision = 1E-8; // float column precision // below option can take effect when tsLossyColumns not empty
double dPrecision = 1E-16; // double column precision double fPrecision = 1E-8; // float column precision
uint32_t maxRange = 500; // max range double dPrecision = 1E-16; // double column precision
uint32_t curRange = 100; // range uint32_t maxRange = 500; // max range
char Compressor[32] = "ZSTD_COMPRESSOR"; // ZSTD_COMPRESSOR or GZIP_COMPRESSOR uint32_t curRange = 100; // range
char Compressor[32] = "ZSTD_COMPRESSOR"; // ZSTD_COMPRESSOR or GZIP_COMPRESSOR
#endif #endif
// long query death-lock // long query death-lock
...@@ -288,6 +290,7 @@ int8_t tsDeadLockKillQuery = 0; ...@@ -288,6 +290,7 @@ int8_t tsDeadLockKillQuery = 0;
// default JSON string type // default JSON string type
char tsDefaultJSONStrType[7] = "binary"; char tsDefaultJSONStrType[7] = "binary";
char tsSmlChildTableName[TSDB_TABLE_NAME_LEN] = ""; //user defined child table name can be specified in tag value. If set to empty system will generate table name using MD5 hash.
int32_t (*monStartSystemFp)() = NULL; int32_t (*monStartSystemFp)() = NULL;
void (*monStopSystemFp)() = NULL; void (*monStopSystemFp)() = NULL;
...@@ -298,7 +301,7 @@ char *qtypeStr[] = {"rpc", "fwd", "wal", "cq", "query"}; ...@@ -298,7 +301,7 @@ char *qtypeStr[] = {"rpc", "fwd", "wal", "cq", "query"};
static pthread_once_t tsInitGlobalCfgOnce = PTHREAD_ONCE_INIT; static pthread_once_t tsInitGlobalCfgOnce = PTHREAD_ONCE_INIT;
void taosSetAllDebugFlag() { void taosSetAllDebugFlag() {
if (debugFlag != 0) { if (debugFlag != 0) {
mDebugFlag = debugFlag; mDebugFlag = debugFlag;
sdbDebugFlag = debugFlag; sdbDebugFlag = debugFlag;
dDebugFlag = debugFlag; dDebugFlag = debugFlag;
...@@ -309,7 +312,7 @@ void taosSetAllDebugFlag() { ...@@ -309,7 +312,7 @@ void taosSetAllDebugFlag() {
httpDebugFlag = debugFlag; httpDebugFlag = debugFlag;
mqttDebugFlag = debugFlag; mqttDebugFlag = debugFlag;
monDebugFlag = debugFlag; monDebugFlag = debugFlag;
qDebugFlag = debugFlag; qDebugFlag = debugFlag;
rpcDebugFlag = debugFlag; rpcDebugFlag = debugFlag;
uDebugFlag = debugFlag; uDebugFlag = debugFlag;
sDebugFlag = debugFlag; sDebugFlag = debugFlag;
...@@ -321,12 +324,13 @@ void taosSetAllDebugFlag() { ...@@ -321,12 +324,13 @@ void taosSetAllDebugFlag() {
} }
bool taosCfgDynamicOptions(char *msg) { bool taosCfgDynamicOptions(char *msg) {
char *option, *value; char *option, *value;
int32_t olen, vlen; int32_t olen, vlen;
int32_t vint = 0; int32_t vint = 0;
paGetToken(msg, &option, &olen); paGetToken(msg, &option, &olen);
if (olen == 0) return false;; if (olen == 0) return false;
;
paGetToken(option + olen + 1, &value, &vlen); paGetToken(option + olen + 1, &value, &vlen);
if (vlen == 0) if (vlen == 0)
...@@ -339,9 +343,9 @@ bool taosCfgDynamicOptions(char *msg) { ...@@ -339,9 +343,9 @@ bool taosCfgDynamicOptions(char *msg) {
for (int32_t i = 0; i < tsGlobalConfigNum; ++i) { for (int32_t i = 0; i < tsGlobalConfigNum; ++i) {
SGlobalCfg *cfg = tsGlobalConfig + i; SGlobalCfg *cfg = tsGlobalConfig + i;
//if (!(cfg->cfgType & TSDB_CFG_CTYPE_B_LOG)) continue; // if (!(cfg->cfgType & TSDB_CFG_CTYPE_B_LOG)) continue;
if (cfg->valType != TAOS_CFG_VTYPE_INT32 && cfg->valType != TAOS_CFG_VTYPE_INT8) continue; if (cfg->valType != TAOS_CFG_VTYPE_INT32 && cfg->valType != TAOS_CFG_VTYPE_INT8) continue;
int32_t cfgLen = (int32_t)strlen(cfg->option); int32_t cfgLen = (int32_t)strlen(cfg->option);
if (cfgLen != olen) continue; if (cfgLen != olen) continue;
if (strncasecmp(option, cfg->option, olen) != 0) continue; if (strncasecmp(option, cfg->option, olen) != 0) continue;
...@@ -370,7 +374,7 @@ bool taosCfgDynamicOptions(char *msg) { ...@@ -370,7 +374,7 @@ bool taosCfgDynamicOptions(char *msg) {
return true; return true;
} }
if (strncasecmp(cfg->option, "debugFlag", olen) == 0) { if (strncasecmp(cfg->option, "debugFlag", olen) == 0) {
taosSetAllDebugFlag(); taosSetAllDebugFlag();
} }
return true; return true;
} }
...@@ -427,7 +431,7 @@ static void taosCheckDataDirCfg() { ...@@ -427,7 +431,7 @@ static void taosCheckDataDirCfg() {
} }
static int32_t taosCheckTmpDir(void) { static int32_t taosCheckTmpDir(void) {
if (strlen(tsTempDir) <= 0){ if (strlen(tsTempDir) <= 0) {
uError("tempDir is not set"); uError("tempDir is not set");
return -1; return -1;
} }
...@@ -448,7 +452,7 @@ static void doInitGlobalConfig(void) { ...@@ -448,7 +452,7 @@ static void doInitGlobalConfig(void) {
srand(taosSafeRand()); srand(taosSafeRand());
SGlobalCfg cfg = {0}; SGlobalCfg cfg = {0};
// ip address // ip address
cfg.option = "firstEp"; cfg.option = "firstEp";
cfg.ptr = tsFirst; cfg.ptr = tsFirst;
...@@ -577,12 +581,12 @@ static void doInitGlobalConfig(void) { ...@@ -577,12 +581,12 @@ static void doInitGlobalConfig(void) {
cfg.ptr = &tsMaxNumOfDistinctResults; cfg.ptr = &tsMaxNumOfDistinctResults;
cfg.valType = TAOS_CFG_VTYPE_INT32; cfg.valType = TAOS_CFG_VTYPE_INT32;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG | TSDB_CFG_CTYPE_B_SHOW | TSDB_CFG_CTYPE_B_CLIENT; cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG | TSDB_CFG_CTYPE_B_SHOW | TSDB_CFG_CTYPE_B_CLIENT;
cfg.minValue = 10*10000; cfg.minValue = 10 * 10000;
cfg.maxValue = 10000*10000; cfg.maxValue = 10000 * 10000;
cfg.ptrLength = 0; cfg.ptrLength = 0;
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
cfg.option = "numOfMnodes"; cfg.option = "numOfMnodes";
cfg.ptr = &tsNumOfMnodes; cfg.ptr = &tsNumOfMnodes;
cfg.valType = TAOS_CFG_VTYPE_INT32; cfg.valType = TAOS_CFG_VTYPE_INT32;
...@@ -1189,7 +1193,7 @@ static void doInitGlobalConfig(void) { ...@@ -1189,7 +1193,7 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
// module configs // module configs
cfg.option = "flowctrl"; cfg.option = "flowctrl";
cfg.ptr = &tsEnableFlowCtrl; cfg.ptr = &tsEnableFlowCtrl;
cfg.valType = TAOS_CFG_VTYPE_INT8; cfg.valType = TAOS_CFG_VTYPE_INT8;
...@@ -1320,6 +1324,17 @@ static void doInitGlobalConfig(void) { ...@@ -1320,6 +1324,17 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
// pContext in cache
cfg.option = "httpKeepAlive";
cfg.ptr = &tsHttpKeepAlive;
cfg.valType = TAOS_CFG_VTYPE_INT32;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG;
cfg.minValue = 3000;
cfg.maxValue = 3600000;
cfg.ptrLength = 0;
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// debug flag // debug flag
cfg.option = "numOfLogLines"; cfg.option = "numOfLogLines";
cfg.ptr = &tsNumOfLogLines; cfg.ptr = &tsNumOfLogLines;
...@@ -1401,7 +1416,6 @@ static void doInitGlobalConfig(void) { ...@@ -1401,7 +1416,6 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
cfg.option = "sdbDebugFlag"; cfg.option = "sdbDebugFlag";
cfg.ptr = &sdbDebugFlag; cfg.ptr = &sdbDebugFlag;
cfg.valType = TAOS_CFG_VTYPE_INT32; cfg.valType = TAOS_CFG_VTYPE_INT32;
...@@ -1633,7 +1647,7 @@ static void doInitGlobalConfig(void) { ...@@ -1633,7 +1647,7 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
// enable kill long query // enable kill long query
cfg.option = "deadLockKillQuery"; cfg.option = "deadLockKillQuery";
cfg.ptr = &tsDeadLockKillQuery; cfg.ptr = &tsDeadLockKillQuery;
cfg.valType = TAOS_CFG_VTYPE_INT8; cfg.valType = TAOS_CFG_VTYPE_INT8;
...@@ -1665,6 +1679,17 @@ static void doInitGlobalConfig(void) { ...@@ -1665,6 +1679,17 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE; cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg); taosInitConfigOption(cfg);
// child talbe name specified in schemaless tag value
cfg.option = "smlChildTableName";
cfg.ptr = tsSmlChildTableName;
cfg.valType = TAOS_CFG_VTYPE_STRING;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG | TSDB_CFG_CTYPE_B_SHOW | TSDB_CFG_CTYPE_B_CLIENT;
cfg.minValue = 0;
cfg.maxValue = 0;
cfg.ptrLength = tListLen(tsSmlChildTableName);
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// flush vnode wal file if walSize > walFlushSize and walSize > cache*0.5*blocks // flush vnode wal file if walSize > walFlushSize and walSize > cache*0.5*blocks
cfg.option = "walFlushSize"; cfg.option = "walFlushSize";
cfg.ptr = &tsdbWalFlushSize; cfg.ptr = &tsdbWalFlushSize;
...@@ -1731,21 +1756,18 @@ static void doInitGlobalConfig(void) { ...@@ -1731,21 +1756,18 @@ static void doInitGlobalConfig(void) {
#else #else
assert(tsGlobalConfigNum < TSDB_CFG_MAX_NUM); assert(tsGlobalConfigNum < TSDB_CFG_MAX_NUM);
#endif #endif
} }
void taosInitGlobalCfg() { void taosInitGlobalCfg() { pthread_once(&tsInitGlobalCfgOnce, doInitGlobalConfig); }
pthread_once(&tsInitGlobalCfgOnce, doInitGlobalConfig);
}
int32_t taosCheckGlobalCfg() { int32_t taosCheckGlobalCfg() {
char fqdn[TSDB_FQDN_LEN]; char fqdn[TSDB_FQDN_LEN];
uint16_t port; uint16_t port;
if (debugFlag & DEBUG_TRACE || debugFlag & DEBUG_DEBUG || debugFlag & DEBUG_DUMP) { if (debugFlag & DEBUG_TRACE || debugFlag & DEBUG_DEBUG || debugFlag & DEBUG_DUMP) {
taosSetAllDebugFlag(); taosSetAllDebugFlag();
} }
if (tsLocalFqdn[0] == 0) { if (tsLocalFqdn[0] == 0) {
taosGetFqdn(tsLocalFqdn); taosGetFqdn(tsLocalFqdn);
} }
...@@ -1772,7 +1794,7 @@ int32_t taosCheckGlobalCfg() { ...@@ -1772,7 +1794,7 @@ int32_t taosCheckGlobalCfg() {
if (taosCheckTmpDir()) { if (taosCheckTmpDir()) {
return -1; return -1;
} }
taosGetSystemInfo(); taosGetSystemInfo();
tsSetLocale(); tsSetLocale();
...@@ -1794,8 +1816,8 @@ int32_t taosCheckGlobalCfg() { ...@@ -1794,8 +1816,8 @@ int32_t taosCheckGlobalCfg() {
} }
if (tsMaxTablePerVnode < tsMinTablePerVnode) { if (tsMaxTablePerVnode < tsMinTablePerVnode) {
uError("maxTablesPerVnode(%d) < minTablesPerVnode(%d), reset to minTablesPerVnode(%d)", uError("maxTablesPerVnode(%d) < minTablesPerVnode(%d), reset to minTablesPerVnode(%d)", tsMaxTablePerVnode,
tsMaxTablePerVnode, tsMinTablePerVnode, tsMinTablePerVnode); tsMinTablePerVnode, tsMinTablePerVnode);
tsMaxTablePerVnode = tsMinTablePerVnode; tsMaxTablePerVnode = tsMinTablePerVnode;
} }
...@@ -1817,7 +1839,7 @@ int32_t taosCheckGlobalCfg() { ...@@ -1817,7 +1839,7 @@ int32_t taosCheckGlobalCfg() {
} }
tsDnodeShellPort = tsServerPort + TSDB_PORT_DNODESHELL; // udp[6035-6039] tcp[6035] tsDnodeShellPort = tsServerPort + TSDB_PORT_DNODESHELL; // udp[6035-6039] tcp[6035]
tsDnodeDnodePort = tsServerPort + TSDB_PORT_DNODEDNODE; // udp/tcp tsDnodeDnodePort = tsServerPort + TSDB_PORT_DNODEDNODE; // udp/tcp
tsSyncPort = tsServerPort + TSDB_PORT_SYNC; tsSyncPort = tsServerPort + TSDB_PORT_SYNC;
tsHttpPort = tsServerPort + TSDB_PORT_HTTP; tsHttpPort = tsServerPort + TSDB_PORT_HTTP;
...@@ -1837,17 +1859,17 @@ int taosGetFqdnPortFromEp(const char *ep, char *fqdn, uint16_t *port) { ...@@ -1837,17 +1859,17 @@ int taosGetFqdnPortFromEp(const char *ep, char *fqdn, uint16_t *port) {
strcpy(fqdn, ep); strcpy(fqdn, ep);
char *temp = strchr(fqdn, ':'); char *temp = strchr(fqdn, ':');
if (temp) { if (temp) {
*temp = 0; *temp = 0;
*port = atoi(temp+1); *port = atoi(temp + 1);
} }
if (*port == 0) { if (*port == 0) {
*port = tsServerPort; *port = tsServerPort;
return -1; return -1;
} }
return 0; return 0;
} }
/* /*
......
Subproject commit 9ae793ad2d567eb11d10627b65698f612542e988 Subproject commit 792ef7c3036f15068796e09883d3f4d47a038fe2
...@@ -8,7 +8,7 @@ IF (TD_MVN_INSTALLED) ...@@ -8,7 +8,7 @@ IF (TD_MVN_INSTALLED)
ADD_CUSTOM_COMMAND(OUTPUT ${JDBC_CMD_NAME} ADD_CUSTOM_COMMAND(OUTPUT ${JDBC_CMD_NAME}
POST_BUILD POST_BUILD
COMMAND mvn -Dmaven.test.skip=true install -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml COMMAND mvn -Dmaven.test.skip=true install -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.35-dist.jar ${LIBRARY_OUTPUT_PATH} COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.36-dist.jar ${LIBRARY_OUTPUT_PATH}
COMMAND mvn -Dmaven.test.skip=true clean -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml COMMAND mvn -Dmaven.test.skip=true clean -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMENT "build jdbc driver") COMMENT "build jdbc driver")
ADD_CUSTOM_TARGET(${JDBC_TARGET_NAME} ALL WORKING_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} DEPENDS ${JDBC_CMD_NAME}) ADD_CUSTOM_TARGET(${JDBC_TARGET_NAME} ALL WORKING_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} DEPENDS ${JDBC_CMD_NAME})
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
<groupId>com.taosdata.jdbc</groupId> <groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId> <artifactId>taos-jdbcdriver</artifactId>
<version>2.0.35</version> <version>2.0.36</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<name>JDBCDriver</name> <name>JDBCDriver</name>
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>com.taosdata.jdbc</groupId> <groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId> <artifactId>taos-jdbcdriver</artifactId>
<version>2.0.35</version> <version>2.0.36</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<name>JDBCDriver</name> <name>JDBCDriver</name>
<url>https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc</url> <url>https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc</url>
...@@ -58,6 +58,13 @@ ...@@ -58,6 +58,13 @@
<version>4.13.1</version> <version>4.13.1</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
...@@ -70,6 +77,18 @@ ...@@ -70,6 +77,18 @@
</resource> </resource>
</resources> </resources>
<plugins> <plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
......
...@@ -8,16 +8,35 @@ import java.sql.Connection; ...@@ -8,16 +8,35 @@ import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement; import java.sql.Statement;
/**
* @author huolibo@qq.com
* @version v1.0.0
* @JDK: 1.8
* @description: this class is an extension of {@link Statement}. use like:
* Statement statement = conn.createStatement();
* SchemalessStatement schemalessStatement = new SchemalessStatement(statement);
* schemalessStatement.execute(sql);
* schemalessStatement.executeSchemaless(lines, SchemalessProtocolType, SchemalessTimestampType);
* @since 2021-11-03 17:10
*/
public class SchemalessStatement extends AbstractStatementWrapper { public class SchemalessStatement extends AbstractStatementWrapper {
public SchemalessStatement(Statement statement) { public SchemalessStatement(Statement statement) {
super(statement); super(statement);
} }
public void executeSchemaless(String[] strings, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException { /**
* batch insert schemaless lines
*
* @param lines schemaless data
* @param protocolType schemaless type {@link SchemalessProtocolType}
* @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception
*/
public void executeSchemaless(String[] lines, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
Connection connection = this.getConnection(); Connection connection = this.getConnection();
if (connection instanceof TSDBConnection) { if (connection instanceof TSDBConnection) {
TSDBConnection tsdbConnection = (TSDBConnection) connection; TSDBConnection tsdbConnection = (TSDBConnection) connection;
tsdbConnection.getConnector().insertLines(strings, protocolType, timestampType); tsdbConnection.getConnector().insertLines(lines, protocolType, timestampType);
} else if (connection instanceof RestfulConnection) { } else if (connection instanceof RestfulConnection) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD, "restful connection is not supported currently"); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD, "restful connection is not supported currently");
} else { } else {
...@@ -25,7 +44,15 @@ public class SchemalessStatement extends AbstractStatementWrapper { ...@@ -25,7 +44,15 @@ public class SchemalessStatement extends AbstractStatementWrapper {
} }
} }
public void executeSchemaless(String sql, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException { /**
executeSchemaless(new String[]{sql}, protocolType, timestampType); * only one insert
*
* @param line schemaless line
* @param protocolType schemaless type {@link SchemalessProtocolType}
* @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception
*/
public void executeSchemaless(String line, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
executeSchemaless(new String[]{line}, protocolType, timestampType);
} }
} }
...@@ -135,7 +135,6 @@ public class TSDBDriver extends AbstractDriver { ...@@ -135,7 +135,6 @@ public class TSDBDriver extends AbstractDriver {
TSDBJNIConnector.init(props); TSDBJNIConnector.init(props);
return new TSDBConnection(props, this.dbMetaData); return new TSDBConnection(props, this.dbMetaData);
} catch (SQLWarning sqlWarning) { } catch (SQLWarning sqlWarning) {
sqlWarning.printStackTrace();
return new TSDBConnection(props, this.dbMetaData); return new TSDBConnection(props, this.dbMetaData);
} catch (SQLException sqlEx) { } catch (SQLException sqlEx) {
throw sqlEx; throw sqlEx;
......
...@@ -36,15 +36,15 @@ import java.util.regex.Pattern; ...@@ -36,15 +36,15 @@ import java.util.regex.Pattern;
* compatibility needs. * compatibility needs.
*/ */
public class TSDBPreparedStatement extends TSDBStatement implements PreparedStatement { public class TSDBPreparedStatement extends TSDBStatement implements PreparedStatement {
// for jdbc preparedStatement interface
private String rawSql; private String rawSql;
private Object[] parameters; private Object[] parameters;
// for parameter binding
private ArrayList<ColumnInfo> colData; private long nativeStmtHandle = 0;
private String tableName;
private ArrayList<TableTagInfo> tableTags; private ArrayList<TableTagInfo> tableTags;
private int tagValueLength; private int tagValueLength;
private ArrayList<ColumnInfo> colData;
private String tableName;
private long nativeStmtHandle = 0;
TSDBPreparedStatement(TSDBConnection connection, String sql) { TSDBPreparedStatement(TSDBConnection connection, String sql) {
super(connection); super(connection);
...@@ -72,10 +72,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -72,10 +72,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
preprocessSql(); preprocessSql();
} }
/*
*
*/
/** /**
* Some of the SQLs sent by other popular frameworks or tools like Spark, contains syntax that cannot be parsed by * Some of the SQLs sent by other popular frameworks or tools like Spark, contains syntax that cannot be parsed by
* the TDengine client. Thus, some simple parsers/filters are intentionally added in this JDBC implementation in * the TDengine client. Thus, some simple parsers/filters are intentionally added in this JDBC implementation in
...@@ -250,13 +246,10 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -250,13 +246,10 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
@Override @Override
public void setObject(int parameterIndex, Object x) throws SQLException { public void setObject(int parameterIndex, Object x) throws SQLException {
if (isClosed()) { if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
} if (parameterIndex < 1 && parameterIndex >= parameters.length)
if (parameterIndex < 1 && parameterIndex >= parameters.length) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_PARAMETER_INDEX_OUT_RANGE); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_PARAMETER_INDEX_OUT_RANGE);
}
parameters[parameterIndex - 1] = x; parameters[parameterIndex - 1] = x;
} }
...@@ -335,7 +328,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -335,7 +328,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
if (isClosed()) if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
// TODO:
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
} }
...@@ -419,7 +411,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -419,7 +411,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
if (isClosed()) if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
//TODO:
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
} }
...@@ -477,7 +468,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -477,7 +468,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
if (isClosed()) if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
} }
@Override @Override
...@@ -496,7 +486,7 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -496,7 +486,7 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
/////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////
// NOTE: the following APIs are not JDBC compatible // NOTE: the following APIs are not JDBC compatible
// set the bind table name // parameter binding
private static class ColumnInfo { private static class ColumnInfo {
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
private ArrayList data; private ArrayList data;
...@@ -539,7 +529,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -539,7 +529,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
} }
} }
public void setTableName(String name) { public void setTableName(String name) throws SQLException {
if (this.tableName != null) {
this.columnDataExecuteBatch();
this.columnDataClearBatchInternal();
}
this.tableName = name; this.tableName = name;
} }
...@@ -960,17 +954,22 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -960,17 +954,22 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void columnDataExecuteBatch() throws SQLException { public void columnDataExecuteBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector(); TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.executeBatch(this.nativeStmtHandle); connector.executeBatch(this.nativeStmtHandle);
this.columnDataClearBatch(); this.columnDataClearBatchInternal();
} }
@Deprecated
public void columnDataClearBatch() { public void columnDataClearBatch() {
columnDataClearBatchInternal();
}
private void columnDataClearBatchInternal() {
int size = this.colData.size(); int size = this.colData.size();
this.colData.clear(); this.colData.clear();
this.colData.addAll(Collections.nCopies(size, null)); this.colData.addAll(Collections.nCopies(size, null));
this.tableName = null; // clear the table name this.tableName = null; // clear the table name
} }
public void columnDataCloseBatch() throws SQLException { public void columnDataCloseBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector(); TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.closeBatch(this.nativeStmtHandle); connector.closeBatch(this.nativeStmtHandle);
...@@ -978,4 +977,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat ...@@ -978,4 +977,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
this.nativeStmtHandle = 0L; this.nativeStmtHandle = 0L;
this.tableName = null; this.tableName = null;
} }
@Override
public void close() throws SQLException {
this.columnDataClearBatchInternal();
this.columnDataCloseBatch();
super.close();
}
} }
package com.taosdata.jdbc.enums; package com.taosdata.jdbc.enums;
import java.util.Arrays;
public enum SchemalessProtocolType { public enum SchemalessProtocolType {
UNKNOWN, UNKNOWN,
LINE, LINE,
...@@ -7,4 +9,10 @@ public enum SchemalessProtocolType { ...@@ -7,4 +9,10 @@ public enum SchemalessProtocolType {
JSON, JSON,
; ;
public static SchemalessProtocolType parse(String type) {
return Arrays.stream(SchemalessProtocolType.values())
.filter(protocol -> type.equalsIgnoreCase(protocol.name()))
.findFirst().orElse(UNKNOWN);
}
} }
package com.taosdata.jdbc.enums; package com.taosdata.jdbc.enums;
public enum SchemalessTimestampType { public enum SchemalessTimestampType {
// Let the database decide
NOT_CONFIGURED, NOT_CONFIGURED,
HOURS, HOURS,
MINUTES, MINUTES,
......
...@@ -50,9 +50,13 @@ public class RestfulDriver extends AbstractDriver { ...@@ -50,9 +50,13 @@ public class RestfulDriver extends AbstractDriver {
String password = URLEncoder.encode(props.getProperty(TSDBDriver.PROPERTY_KEY_PASSWORD), StandardCharsets.UTF_8.displayName()); String password = URLEncoder.encode(props.getProperty(TSDBDriver.PROPERTY_KEY_PASSWORD), StandardCharsets.UTF_8.displayName());
loginUrl = "http://" + props.getProperty(TSDBDriver.PROPERTY_KEY_HOST) + ":" + props.getProperty(TSDBDriver.PROPERTY_KEY_PORT) + "/rest/login/" + user + "/" + password + ""; loginUrl = "http://" + props.getProperty(TSDBDriver.PROPERTY_KEY_HOST) + ":" + props.getProperty(TSDBDriver.PROPERTY_KEY_PORT) + "/rest/login/" + user + "/" + password + "";
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
e.printStackTrace(); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_INVALID_VARIABLE, "unsupported UTF-8 concoding, user: " + props.getProperty(TSDBDriver.PROPERTY_KEY_USER) + ", password: " + props.getProperty(TSDBDriver.PROPERTY_KEY_PASSWORD));
} }
int poolSize = Integer.valueOf(props.getProperty("httpPoolSize", HttpClientPoolUtil.DEFAULT_MAX_PER_ROUTE));
boolean keepAlive = Boolean.valueOf(props.getProperty("httpKeepAlive", HttpClientPoolUtil.DEFAULT_HTTP_KEEP_ALIVE));
HttpClientPoolUtil.init(poolSize, keepAlive);
String result = HttpClientPoolUtil.execute(loginUrl); String result = HttpClientPoolUtil.execute(loginUrl);
JSONObject jsonResult = JSON.parseObject(result); JSONObject jsonResult = JSON.parseObject(result);
String status = jsonResult.getString("status"); String status = jsonResult.getString("status");
......
...@@ -5,12 +5,11 @@ import com.taosdata.jdbc.TSDBErrorNumbers; ...@@ -5,12 +5,11 @@ import com.taosdata.jdbc.TSDBErrorNumbers;
import org.apache.http.HeaderElement; import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator; import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.NoHttpResponseException;
import org.apache.http.client.ClientProtocolException; import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.config.RequestConfig; import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.*; import org.apache.http.client.methods.*;
import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.ConnectionKeepAliveStrategy; import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
...@@ -21,21 +20,20 @@ import org.apache.http.protocol.HTTP; ...@@ -21,21 +20,20 @@ import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils; import org.apache.http.util.EntityUtils;
import javax.net.ssl.SSLException;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.concurrent.TimeUnit;
public class HttpClientPoolUtil { public class HttpClientPoolUtil {
private static final String DEFAULT_CONTENT_TYPE = "application/json"; private static final String DEFAULT_CONTENT_TYPE = "application/json";
private static final int DEFAULT_MAX_RETRY_COUNT = 5; private static final int DEFAULT_MAX_RETRY_COUNT = 5;
private static final int DEFAULT_MAX_TOTAL = 50; public static final String DEFAULT_HTTP_KEEP_ALIVE = "true";
private static final int DEFAULT_MAX_PER_ROUTE = 5; public static final String DEFAULT_MAX_PER_ROUTE = "20";
private static final int DEFAULT_HTTP_KEEP_TIME = -1; private static final int DEFAULT_HTTP_KEEP_TIME = -1;
private static String isKeepAlive;
private static final ConnectionKeepAliveStrategy DEFAULT_KEEP_ALIVE_STRATEGY = (response, context) -> { private static final ConnectionKeepAliveStrategy DEFAULT_KEEP_ALIVE_STRATEGY = (response, context) -> {
HeaderElementIterator it = new BasicHeaderElementIterator(response.headerIterator(HTTP.CONN_KEEP_ALIVE)); HeaderElementIterator it = new BasicHeaderElementIterator(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
...@@ -55,36 +53,39 @@ public class HttpClientPoolUtil { ...@@ -55,36 +53,39 @@ public class HttpClientPoolUtil {
private static CloseableHttpClient httpClient; private static CloseableHttpClient httpClient;
static { public static void init(Integer connPoolSize, boolean keepAlive) {
if (httpClient == null) {
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(); synchronized (HttpClientPoolUtil.class) {
connectionManager.setMaxTotal(DEFAULT_MAX_TOTAL); if (httpClient == null) {
connectionManager.setDefaultMaxPerRoute(DEFAULT_MAX_PER_ROUTE); isKeepAlive = keepAlive ? HTTP.CONN_KEEP_ALIVE : HTTP.CONN_CLOSE;
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
httpClient = HttpClients.custom() connectionManager.setMaxTotal(connPoolSize * 10);
.setKeepAliveStrategy(DEFAULT_KEEP_ALIVE_STRATEGY) connectionManager.setDefaultMaxPerRoute(connPoolSize);
.setConnectionManager(connectionManager) httpClient = HttpClients.custom()
.setRetryHandler((exception, executionCount, httpContext) -> executionCount < DEFAULT_MAX_RETRY_COUNT) .setKeepAliveStrategy(DEFAULT_KEEP_ALIVE_STRATEGY)
.build(); .setConnectionManager(connectionManager)
.setRetryHandler((exception, executionCount, httpContext) -> executionCount < DEFAULT_MAX_RETRY_COUNT)
.build();
}
}
}
} }
/*** execute GET request ***/ /*** execute GET request ***/
public static String execute(String uri) throws SQLException { public static String execute(String uri) throws SQLException {
HttpEntity httpEntity = null; HttpEntity httpEntity = null;
String responseBody = ""; String responseBody = "";
try { HttpRequestBase method = getRequest(uri, HttpGet.METHOD_NAME);
HttpRequestBase method = getRequest(uri, HttpGet.METHOD_NAME); HttpContext context = HttpClientContext.create();
HttpContext context = HttpClientContext.create();
CloseableHttpResponse httpResponse = httpClient.execute(method, context); try (CloseableHttpResponse httpResponse = httpClient.execute(method, context)) {
httpEntity = httpResponse.getEntity(); httpEntity = httpResponse.getEntity();
if (httpEntity != null) { if (httpEntity != null) {
responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8); responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8);
} }
} catch (ClientProtocolException e) { } catch (ClientProtocolException e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage()); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage());
} catch (IOException exception) { } catch (IOException exception) {
exception.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage()); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage());
} finally { } finally {
if (httpEntity != null) { if (httpEntity != null) {
...@@ -94,30 +95,27 @@ public class HttpClientPoolUtil { ...@@ -94,30 +95,27 @@ public class HttpClientPoolUtil {
return responseBody; return responseBody;
} }
/*** execute POST request ***/ /*** execute POST request ***/
public static String execute(String uri, String data, String token) throws SQLException { public static String execute(String uri, String data, String token) throws SQLException {
HttpEntityEnclosingRequestBase method = (HttpEntityEnclosingRequestBase) getRequest(uri, HttpPost.METHOD_NAME);
method.setHeader(HTTP.CONTENT_TYPE, "text/plain");
method.setHeader(HTTP.CONN_DIRECTIVE, isKeepAlive);
method.setHeader("Authorization", "Taosd " + token);
method.setEntity(new StringEntity(data, StandardCharsets.UTF_8));
HttpContext context = HttpClientContext.create();
HttpEntity httpEntity = null; HttpEntity httpEntity = null;
String responseBody = ""; String responseBody = "";
try { try (CloseableHttpResponse httpResponse = httpClient.execute(method, context)) {
HttpEntityEnclosingRequestBase method = (HttpEntityEnclosingRequestBase) getRequest(uri, HttpPost.METHOD_NAME);
method.setHeader(HTTP.CONTENT_TYPE, "text/plain");
method.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_KEEP_ALIVE);
method.setHeader("Authorization", "Taosd " + token);
method.setEntity(new StringEntity(data, StandardCharsets.UTF_8));
HttpContext context = HttpClientContext.create();
CloseableHttpResponse httpResponse = httpClient.execute(method, context);
httpEntity = httpResponse.getEntity(); httpEntity = httpResponse.getEntity();
if (httpEntity == null) { if (httpEntity == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_HTTP_ENTITY_IS_NULL, "httpEntity is null, sql: " + data); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_HTTP_ENTITY_IS_NULL, "httpEntity is null, sql: " + data);
} }
responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8); responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8);
} catch (ClientProtocolException e) { } catch (ClientProtocolException e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage()); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage());
} catch (IOException exception) { } catch (IOException exception) {
exception.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage()); throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage());
} finally { } finally {
if (httpEntity != null) { if (httpEntity != null) {
...@@ -148,4 +146,12 @@ public class HttpClientPoolUtil { ...@@ -148,4 +146,12 @@ public class HttpClientPoolUtil {
return method; return method;
} }
public static void reset() {
synchronized (HttpClientPoolUtil.class) {
ClientConnectionManager cm = httpClient.getConnectionManager();
cm.closeExpiredConnections();
cm.closeIdleConnections(100, TimeUnit.MILLISECONDS);
}
}
} }
\ No newline at end of file
...@@ -16,7 +16,6 @@ public class TaosInfo implements TaosInfoMBean { ...@@ -16,7 +16,6 @@ public class TaosInfo implements TaosInfoMBean {
MBeanServer server = ManagementFactory.getPlatformMBeanServer(); MBeanServer server = ManagementFactory.getPlatformMBeanServer();
ObjectName name = new ObjectName("TaosInfoMBean:name=TaosInfo"); ObjectName name = new ObjectName("TaosInfoMBean:name=TaosInfo");
server.registerMBean(TaosInfo.getInstance(), name); server.registerMBean(TaosInfo.getInstance(), name);
} catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) { } catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
e.printStackTrace(); e.printStackTrace();
} }
......
...@@ -49,14 +49,9 @@ public class Utils { ...@@ -49,14 +49,9 @@ public class Utils {
try { try {
return parseMicroSecTimestamp(timeStampStr); return parseMicroSecTimestamp(timeStampStr);
} catch (DateTimeParseException ee) { } catch (DateTimeParseException ee) {
try { return parseNanoSecTimestamp(timeStampStr);
return parseNanoSecTimestamp(timeStampStr);
} catch (DateTimeParseException eee) {
eee.printStackTrace();
}
} }
} }
return null;
} }
private static LocalDateTime parseMilliSecTimestamp(String timeStampStr) throws DateTimeParseException { private static LocalDateTime parseMilliSecTimestamp(String timeStampStr) throws DateTimeParseException {
......
package com.taosdata.jdbc;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.sql.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
public class ParameterBindTest {
private static final String host = "127.0.0.1";
private static final String stable = "weather";
private Connection conn;
private final Random random = new Random(System.currentTimeMillis());
@Test
public void test() {
// given
String[] tbnames = {"t1", "t2", "t3"};
int rows = 10;
// when
insertIntoTables(tbnames, 10);
// then
assertRows(stable, tbnames.length * rows);
for (String t : tbnames) {
assertRows(t, rows);
}
}
@Test
public void testMultiThreads() {
// given
String[][] tables = {{"t1", "t2", "t3"}, {"t4", "t5", "t6"}, {"t7", "t8", "t9"}, {"t10"}};
int rows = 10;
// when
List<Thread> threads = Arrays.stream(tables).map(tbnames -> new Thread(() -> insertIntoTables(tbnames, rows))).collect(Collectors.toList());
threads.forEach(Thread::start);
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
// then
for (String[] table : tables) {
for (String t : table) {
assertRows(t, rows);
}
}
}
private void assertRows(String tbname, int rows) {
try (Statement stmt = conn.createStatement()) {
ResultSet rs = stmt.executeQuery("select count(*) from " + tbname);
while (rs.next()) {
int count = rs.getInt(1);
Assert.assertEquals(rows, count);
}
} catch (SQLException e) {
e.printStackTrace();
}
}
private void insertIntoTables(String[] tbnames, int rowsEachTable) {
long current = System.currentTimeMillis();
String sql = "insert into ? using " + stable + " tags(?, ?) values(?, ?, ?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 0; i < tbnames.length; i++) {
pstmt.setTableName(tbnames[i]);
pstmt.setTagInt(0, random.nextInt(100));
pstmt.setTagInt(1, random.nextInt(100));
ArrayList<Long> timestampList = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
timestampList.add(current + i * 1000 + j);
}
pstmt.setTimestamp(0, timestampList);
ArrayList<Integer> f1List = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
f1List.add(random.nextInt(100));
}
pstmt.setInt(1, f1List);
ArrayList<Integer> f2List = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
f2List.add(random.nextInt(100));
}
pstmt.setInt(2, f2List);
pstmt.columnDataAddBatch();
}
pstmt.columnDataExecuteBatch();
} catch (SQLException e) {
e.printStackTrace();
}
}
@Before
public void before() {
String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata";
try {
conn = DriverManager.getConnection(url);
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists test_pd");
stmt.execute("create database if not exists test_pd");
stmt.execute("use test_pd");
stmt.execute("create table " + stable + "(ts timestamp, f1 int, f2 int) tags(t1 int, t2 int)");
} catch (SQLException e) {
e.printStackTrace();
}
}
@After
public void after() {
try {
// Statement stmt = conn.createStatement();
// stmt.execute("drop database if exists test_pd");
if (conn != null)
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
package com.taosdata.jdbc; package com.taosdata.jdbc;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.taosdata.jdbc.enums.SchemalessProtocolType; import com.taosdata.jdbc.enums.SchemalessProtocolType;
import com.taosdata.jdbc.enums.SchemalessTimestampType; import com.taosdata.jdbc.enums.SchemalessTimestampType;
import org.junit.After; import org.junit.After;
...@@ -10,10 +12,14 @@ import org.junit.Test; ...@@ -10,10 +12,14 @@ import org.junit.Test;
import java.sql.*; import java.sql.*;
public class SchemalessInsertTest { public class SchemalessInsertTest {
private String host = "127.0.0.1"; private final String dbname = "test_schemaless_insert";
private String dbname = "test_schemaless_insert";
private Connection conn; private Connection conn;
/**
* schemaless insert compatible with influxdb
*
* @throws SQLException execute error
*/
@Test @Test
public void schemalessInsert() throws SQLException { public void schemalessInsert() throws SQLException {
// given // given
...@@ -41,6 +47,11 @@ public class SchemalessInsertTest { ...@@ -41,6 +47,11 @@ public class SchemalessInsertTest {
statement.close(); statement.close();
} }
/**
* telnet insert compatible with opentsdb
*
* @throws SQLException execute error
*/
@Test @Test
public void telnetInsert() throws SQLException { public void telnetInsert() throws SQLException {
// given // given
...@@ -71,6 +82,11 @@ public class SchemalessInsertTest { ...@@ -71,6 +82,11 @@ public class SchemalessInsertTest {
statement.close(); statement.close();
} }
/**
* json insert compatible with opentsdb json format
*
* @throws SQLException execute error
*/
@Test @Test
public void jsonInsert() throws SQLException { public void jsonInsert() throws SQLException {
// given // given
...@@ -113,13 +129,15 @@ public class SchemalessInsertTest { ...@@ -113,13 +129,15 @@ public class SchemalessInsertTest {
while (rs.next()) { while (rs.next()) {
rowCnt++; rowCnt++;
} }
// Assert.assertEquals(json.length, rowCnt);
Assert.assertEquals(((JSONArray) JSONObject.parse(json)).size(), rowCnt);
rs.close(); rs.close();
statement.close(); statement.close();
} }
@Before @Before
public void before() { public void before() {
String host = "127.0.0.1";
final String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata"; final String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata";
try { try {
conn = DriverManager.getConnection(url); conn = DriverManager.getConnection(url);
......
package com.taosdata.jdbc.rs;
import org.junit.Assert;
import org.junit.Test;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class HttpKeepAliveTest {
private static final String host = "127.0.0.1";
@Test
public void test() throws SQLException {
//given
int multi = 4000;
AtomicInteger exceptionCount = new AtomicInteger();
//when
Properties props = new Properties();
props.setProperty("httpKeepAlive", "false");
props.setProperty("httpPoolSize", "20");
Connection connection = DriverManager.getConnection("jdbc:TAOS-RS://" + host + ":6041/?user=root&password=taosdata", props);
List<Thread> threads = IntStream.range(0, multi).mapToObj(i -> new Thread(
() -> {
try (Statement stmt = connection.createStatement()) {
stmt.execute("insert into log.tb_not_exists values(now, 1)");
stmt.execute("select last(*) from log.dn");
} catch (SQLException throwables) {
exceptionCount.getAndIncrement();
}
}
)).collect(Collectors.toList());
threads.forEach(Thread::start);
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//then
Assert.assertEquals(multi, exceptionCount.get());
}
}
...@@ -6,8 +6,7 @@ import java.sql.*; ...@@ -6,8 +6,7 @@ import java.sql.*;
public class WasNullTest { public class WasNullTest {
// private static final String host = "127.0.0.1"; private static final String host = "127.0.0.1";
private static final String host = "master";
private Connection conn; private Connection conn;
......
...@@ -2,8 +2,6 @@ package com.taosdata.jdbc.utils; ...@@ -2,8 +2,6 @@ package com.taosdata.jdbc.utils;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.taosdata.jdbc.TSDBDriver;
import com.taosdata.jdbc.TSDBError;
import org.junit.Test; import org.junit.Test;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
...@@ -11,7 +9,6 @@ import java.net.URLEncoder; ...@@ -11,7 +9,6 @@ import java.net.URLEncoder;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
...@@ -20,18 +17,21 @@ public class HttpClientPoolUtilTest { ...@@ -20,18 +17,21 @@ public class HttpClientPoolUtilTest {
String user = "root"; String user = "root";
String password = "taosdata"; String password = "taosdata";
String host = "127.0.0.1"; String host = "127.0.0.1";
String dbname = "log"; // String host = "master";
@Test @Test
public void test() { public void useLog() {
// given // given
List<Thread> threads = IntStream.range(0, 4000).mapToObj(i -> new Thread(() -> { int multi = 10;
useDB();
// try { // when
// TimeUnit.SECONDS.sleep(10); List<Thread> threads = IntStream.range(0, multi).mapToObj(i -> new Thread(() -> {
// } catch (InterruptedException e) { try {
// e.printStackTrace(); String token = login(multi);
// } executeOneSql("use log", token);
} catch (SQLException | UnsupportedEncodingException e) {
e.printStackTrace();
}
})).collect(Collectors.toList()); })).collect(Collectors.toList());
threads.forEach(Thread::start); threads.forEach(Thread::start);
...@@ -43,34 +43,62 @@ public class HttpClientPoolUtilTest { ...@@ -43,34 +43,62 @@ public class HttpClientPoolUtilTest {
e.printStackTrace(); e.printStackTrace();
} }
} }
} }
private void useDB() { @Test
try { public void tableNotExist() {
user = URLEncoder.encode(user, StandardCharsets.UTF_8.displayName()); // given
password = URLEncoder.encode(password, StandardCharsets.UTF_8.displayName()); int multi = 20;
String loginUrl = "http://" + host + ":" + 6041 + "/rest/login/" + user + "/" + password + "";
String result = HttpClientPoolUtil.execute(loginUrl); // when
JSONObject jsonResult = JSON.parseObject(result); List<Thread> threads = IntStream.range(0, multi * 25).mapToObj(i -> new Thread(() -> {
String status = jsonResult.getString("status"); try {
String token = jsonResult.getString("desc"); // String token = "/KfeAzX/f9na8qdtNZmtONryp201ma04bEl8LcvLUd7a8qdtNZmtONryp201ma04";
if (!status.equals("succ")) { String token = login(multi);
throw new SQLException(jsonResult.getString("desc")); executeOneSql("insert into log.tb_not_exist values(now, 1)", token);
executeOneSql("select last(*) from log.dn", token);
} catch (SQLException | UnsupportedEncodingException e) {
e.printStackTrace();
} }
})).collect(Collectors.toList());
String url = "http://" + host + ":6041/rest/sql"; threads.forEach(Thread::start);
String sql = "use " + dbname;
result = HttpClientPoolUtil.execute(url, sql, token);
JSONObject resultJson = JSON.parseObject(result); for (Thread thread : threads) {
if (resultJson.getString("status").equals("error")) { try {
throw TSDBError.createSQLException(resultJson.getInteger("code"), resultJson.getString("desc")); thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} }
} catch (UnsupportedEncodingException | SQLException e) {
e.printStackTrace();
} }
} }
private String login(int connPoolSize) throws SQLException, UnsupportedEncodingException {
user = URLEncoder.encode(user, StandardCharsets.UTF_8.displayName());
password = URLEncoder.encode(password, StandardCharsets.UTF_8.displayName());
String loginUrl = "http://" + host + ":" + 6041 + "/rest/login/" + user + "/" + password + "";
HttpClientPoolUtil.init(connPoolSize, false);
String result = HttpClientPoolUtil.execute(loginUrl);
JSONObject jsonResult = JSON.parseObject(result);
String status = jsonResult.getString("status");
String token = jsonResult.getString("desc");
if (!status.equals("succ")) {
throw new SQLException(jsonResult.getString("desc"));
}
return token;
}
private boolean executeOneSql(String sql, String token) throws SQLException {
String url = "http://" + host + ":6041/rest/sql";
String result = HttpClientPoolUtil.execute(url, sql, token);
JSONObject resultJson = JSON.parseObject(result);
if (resultJson.getString("status").equals("error")) {
// HttpClientPoolUtil.reset();
// throw TSDBError.createSQLException(resultJson.getInteger("code"), resultJson.getString("desc"));
return false;
}
return true;
}
} }
\ No newline at end of file
#org.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog
org.apache.commons.logging.simplelog.defaultlog=TRACE
org.apache.commons.logging.simplelog.showlogname=true
org.apache.commons.logging.simplelog.showShortLogname=restful
org.apache.commons.logging.simplelog.showdatetime=true
org.apache.commons.logging.simplelog.dateTimeFormat=yyyy-mm-dd hh:MM:ss.SSS
\ No newline at end of file
...@@ -12,6 +12,7 @@ const FieldTypes = require('./constants'); ...@@ -12,6 +12,7 @@ const FieldTypes = require('./constants');
const errors = require('./error'); const errors = require('./error');
const TaosObjects = require('./taosobjects'); const TaosObjects = require('./taosobjects');
const { NULL_POINTER } = require('ref-napi'); const { NULL_POINTER } = require('ref-napi');
const { Console } = require('console');
module.exports = CTaosInterface; module.exports = CTaosInterface;
...@@ -53,6 +54,18 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0 ...@@ -53,6 +54,18 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0
} }
return res; return res;
} }
function convertTinyintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUIntLE(currOffset, 1);
res.push(d == FieldTypes.C_TINYINT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = []; let res = [];
...@@ -64,6 +77,18 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = ...@@ -64,6 +77,18 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision =
} }
return res; return res;
} }
function convertSmallintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUIntLE(currOffset, 2);
res.push(d == FieldTypes.C_SMALLINT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = []; let res = [];
...@@ -75,6 +100,19 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { ...@@ -75,6 +100,19 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
} }
return res; return res;
} }
function convertIntUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUInt32LE(currOffset);
res.push(d == FieldTypes.C_INT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = []; let res = [];
...@@ -86,6 +124,19 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) ...@@ -86,6 +124,19 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0)
} }
return res; return res;
} }
function convertBigintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUInt64LE(currOffset);
res.push(d == FieldTypes.C_BIGINT_UNSIGNED_NULL ? null : BigInt(d));
currOffset += nbytes;
}
return res;
}
function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) { function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset); data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = []; let res = [];
...@@ -156,7 +207,11 @@ let convertFunctions = { ...@@ -156,7 +207,11 @@ let convertFunctions = {
[FieldTypes.C_DOUBLE]: convertDouble, [FieldTypes.C_DOUBLE]: convertDouble,
[FieldTypes.C_BINARY]: convertBinary, [FieldTypes.C_BINARY]: convertBinary,
[FieldTypes.C_TIMESTAMP]: convertTimestamp, [FieldTypes.C_TIMESTAMP]: convertTimestamp,
[FieldTypes.C_NCHAR]: convertNchar [FieldTypes.C_NCHAR]: convertNchar,
[FieldTypes.C_TINYINT_UNSIGNED]: convertTinyintUnsigned,
[FieldTypes.C_SMALLINT_UNSIGNED]: convertSmallintUnsigned,
[FieldTypes.C_INT_UNSIGNED]: convertIntUnsigned,
[FieldTypes.C_BIGINT_UNSIGNED]: convertBigintUnsigned
} }
// Define TaosField structure // Define TaosField structure
...@@ -321,6 +376,7 @@ CTaosInterface.prototype.close = function close(connection) { ...@@ -321,6 +376,7 @@ CTaosInterface.prototype.close = function close(connection) {
CTaosInterface.prototype.query = function query(connection, sql) { CTaosInterface.prototype.query = function query(connection, sql) {
return this.libtaos.taos_query(connection, ref.allocCString(sql)); return this.libtaos.taos_query(connection, ref.allocCString(sql));
} }
CTaosInterface.prototype.affectedRows = function affectedRows(result) { CTaosInterface.prototype.affectedRows = function affectedRows(result) {
return this.libtaos.taos_affected_rows(result); return this.libtaos.taos_affected_rows(result);
} }
...@@ -413,6 +469,7 @@ CTaosInterface.prototype.query_a = function query_a(connection, sql, callback, p ...@@ -413,6 +469,7 @@ CTaosInterface.prototype.query_a = function query_a(connection, sql, callback, p
this.libtaos.taos_query_a(connection, ref.allocCString(sql), callback, param); this.libtaos.taos_query_a(connection, ref.allocCString(sql), callback, param);
return param; return param;
} }
/** Asynchrnously fetches the next block of rows. Wraps callback and transfers a 4th argument to the cursor, the row data as blocks in javascript form /** Asynchrnously fetches the next block of rows. Wraps callback and transfers a 4th argument to the cursor, the row data as blocks in javascript form
* Note: This isn't a recursive function, in order to fetch all data either use the TDengine cursor object, TaosQuery object, or implement a recrusive * Note: This isn't a recursive function, in order to fetch all data either use the TDengine cursor object, TaosQuery object, or implement a recrusive
* function yourself using the libtaos.taos_fetch_rows_a function * function yourself using the libtaos.taos_fetch_rows_a function
......
...@@ -36,13 +36,21 @@ module.exports = { ...@@ -36,13 +36,21 @@ module.exports = {
C_BINARY : 8, C_BINARY : 8,
C_TIMESTAMP : 9, C_TIMESTAMP : 9,
C_NCHAR : 10, C_NCHAR : 10,
C_TINYINT_UNSIGNED : 11,
C_SMALLINT_UNSIGNED : 12,
C_INT_UNSIGNED : 13,
C_BIGINT_UNSIGNED : 14,
// NULL value definition // NULL value definition
// NOTE: These values should change according to C definition in tsdb.h // NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL : 2, C_BOOL_NULL : 2,
C_TINYINT_NULL : -128, C_TINYINT_NULL : -128,
C_TINYINT_UNSIGNED_NULL : 255,
C_SMALLINT_NULL : -32768, C_SMALLINT_NULL : -32768,
C_SMALLINT_UNSIGNED_NULL : 65535,
C_INT_NULL : -2147483648, C_INT_NULL : -2147483648,
C_BIGINT_NULL : -9223372036854775808, C_INT_UNSIGNED_NULL : 4294967295,
C_BIGINT_NULL : -9223372036854775808n,
C_BIGINT_UNSIGNED_NULL : 18446744073709551615n,
C_FLOAT_NULL : 2146435072, C_FLOAT_NULL : 2146435072,
C_DOUBLE_NULL : -9223370937343148032, C_DOUBLE_NULL : -9223370937343148032,
C_NCHAR_NULL : 4294967295, C_NCHAR_NULL : 4294967295,
...@@ -64,6 +72,10 @@ const typeCodesToName = { ...@@ -64,6 +72,10 @@ const typeCodesToName = {
8 : 'Binary', 8 : 'Binary',
9 : 'Timestamp', 9 : 'Timestamp',
10 : 'Nchar', 10 : 'Nchar',
11 : 'TINYINT_UNSIGNED',
12 : 'SMALLINT_UNSIGNED',
13 : 'INT_UNSIGNED',
14 : 'BIGINT_UNSIGNED',
} }
/** /**
......
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
"test": "test" "test": "test"
}, },
"scripts": { "scripts": {
"test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js" "test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js && node test/testUnsignedType.js "
}, },
"repository": { "repository": {
"type": "git", "type": "git",
......
...@@ -90,7 +90,7 @@ c1.execute("create table if not exists td_connector_test.weather(ts timestamp, t ...@@ -90,7 +90,7 @@ c1.execute("create table if not exists td_connector_test.weather(ts timestamp, t
c1.execute("insert into t1 using weather tags('北京') values(now, 11.11, 11)"); c1.execute("insert into t1 using weather tags('北京') values(now, 11.11, 11)");
c1.execute("insert into t1(ts, temperature) values(now, 22.22)"); c1.execute("insert into t1(ts, temperature) values(now, 22.22)");
c1.execute("insert into t1(ts, humidity) values(now, 33)"); c1.execute("insert into t1(ts, humidity) values(now, 33)");
c1.query('select * from test.t1', true).then(function (result) { c1.query('select * from td_connector_test.t1', true).then(function (result) {
result.pretty(); result.pretty();
}); });
......
const taos = require('../tdengine');
var conn = taos.connect({ host: "127.0.0.1", user: "root", password: "taosdata", config: "/etc/taos", port: 10 });
var c1 = conn.cursor();
executeUpdate("create database nodedb;");
executeUpdate("use nodedb;");
executeUpdate("create table unsigntest(ts timestamp,ut tinyint unsigned,us smallint unsigned,ui int unsigned,ub bigint unsigned,bi bigint);");
executeUpdate("insert into unsigntest values (now, 254,65534,4294967294,18446744073709551614,9223372036854775807);");
executeUpdate("insert into unsigntest values (now, 0,0,0,0,-9223372036854775807);");
executeQuery("select * from unsigntest;");
executeUpdate("drop database nodedb;");
function executeUpdate(sql) {
console.log(sql);
c1.execute(sql);
}
function executeQuery(sql) {
c1.execute(sql)
var data = c1.fetchall();
// Latest query's Field metadata is stored in cursor.fields
console.log(c1.fields);
// Latest query's result data is stored in cursor.data, also returned by fetchall.
console.log(c1.data);
}
setTimeout(()=>conn.close(),2000);
...@@ -5,14 +5,27 @@ ...@@ -5,14 +5,27 @@
## Install ## Install
```sh You can use `pip` to install the connector from PyPI:
git clone --depth 1 https://github.com/taosdata/TDengine.git
pip install ./TDengine/src/connector/python ```bash
pip install taospy
```
Or with git url:
```bash
pip install git+https://github.com/taosdata/taos-connector-python.git
```
If you have installed TDengine server or client with prebuilt packages, then you can install the connector from path:
```bash
pip install /usr/local/taos/connector/python
``` ```
## Source Code ## Source Code
[TDengine](https://github.com/taosdata/TDengine) connector for Python source code is hosted on [GitHub](https://github.com/taosdata/TDengine/tree/develop/src/connector/python). [TDengine](https://github.com/taosdata/TDengine) connector for Python source code is hosted on [GitHub](https://github.com/taosdata/taos-connector-python).
## Examples ## Examples
......
[tool.poetry] [tool.poetry]
name = "taos" name = "taospy"
version = "2.1.1" version = "2.1.2"
description = "TDengine connector for python" description = "TDengine connector for python"
authors = ["Taosdata Inc. <support@taosdata.com>"] authors = ["Taosdata Inc. <support@taosdata.com>"]
license = "AGPL-3.0" license = "AGPL-3.0"
readme = "README.md" readme = "README.md"
packages = [
{include = "taos"}
]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^2.7 || ^3.4" python = "^2.7 || ^3.4"
...@@ -12,12 +15,12 @@ typing = "*" ...@@ -12,12 +15,12 @@ typing = "*"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pytest = [ pytest = [
{ version = "^4.6", python = "^2.7" }, { version = "^4.6", python = ">=2.7,<3.0" },
{ version = "^6.2", python = "^3.7" } { version = "^6.2", python = ">=3.7,<4.0" }
] ]
pdoc = { version = "^7.1.1", python = "^3.7" } pdoc = { version = "^7.1.1", python = "^3.7" }
mypy = { version = "^0.910", python = "^3.6" } mypy = { version = "^0.910", python = "^3.6" }
black = { version = "^21.7b0", python = "^3.6" } black = [{ version = "^21.*", python = ">=3.6.2,<4.0" }]
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]
......
...@@ -442,18 +442,14 @@ from .statement import * ...@@ -442,18 +442,14 @@ from .statement import *
from .subscription import * from .subscription import *
from .schemaless import * from .schemaless import *
try: from taos._version import __version__
import importlib.metadata
__version__ = importlib.metadata.version("taos")
except:
None
# Globals # Globals
threadsafety = 0 threadsafety = 0
paramstyle = "pyformat" paramstyle = "pyformat"
__all__ = [ __all__ = [
"__version__",
# functions # functions
"connect", "connect",
"new_bind_param", "new_bind_param",
......
...@@ -2,8 +2,9 @@ ...@@ -2,8 +2,9 @@
import ctypes import ctypes
import platform import platform
import sys import inspect
from ctypes import * from ctypes import *
try: try:
from typing import Any from typing import Any
except: except:
...@@ -14,6 +15,7 @@ from .bind import * ...@@ -14,6 +15,7 @@ from .bind import *
from .field import * from .field import *
from .schemaless import * from .schemaless import *
_UNSUPPORTED = {}
# stream callback # stream callback
stream_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p) stream_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p)
...@@ -47,10 +49,13 @@ def _load_taos(): ...@@ -47,10 +49,13 @@ def _load_taos():
"Darwin": _load_taos_darwin, "Darwin": _load_taos_darwin,
"Windows": _load_taos_windows, "Windows": _load_taos_windows,
} }
pf = platform.system()
if load_func[pf] is None:
raise InterfaceError("unsupported platform: %s" % pf)
try: try:
return load_func[platform.system()]() return load_func[pf]()
except: except Exception as err:
raise InterfaceError('unsupported platform or failed to load taos client library') raise InterfaceError("unable to load taos C library: %s" % err)
_libtaos = _load_taos() _libtaos = _load_taos()
...@@ -65,7 +70,6 @@ _libtaos.taos_consume.restype = ctypes.c_void_p ...@@ -65,7 +70,6 @@ _libtaos.taos_consume.restype = ctypes.c_void_p
_libtaos.taos_fetch_lengths.restype = ctypes.POINTER(ctypes.c_int) _libtaos.taos_fetch_lengths.restype = ctypes.POINTER(ctypes.c_int)
_libtaos.taos_free_result.restype = None _libtaos.taos_free_result.restype = None
_libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p) _libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
_libtaos.taos_schemaless_insert.restype = ctypes.c_void_p
try: try:
_libtaos.taos_stmt_errstr.restype = c_char_p _libtaos.taos_stmt_errstr.restype = c_char_p
...@@ -181,6 +185,7 @@ def taos_connect(host=None, user="root", password="taosdata", db=None, port=0): ...@@ -181,6 +185,7 @@ def taos_connect(host=None, user="root", password="taosdata", db=None, port=0):
raise ConnectionError("connect to TDengine failed") raise ConnectionError("connect to TDengine failed")
return connection return connection
_libtaos.taos_connect_auth.restype = c_void_p _libtaos.taos_connect_auth.restype = c_void_p
_libtaos.taos_connect_auth.argtypes = c_char_p, c_char_p, c_char_p, c_char_p, c_uint16 _libtaos.taos_connect_auth.argtypes = c_char_p, c_char_p, c_char_p, c_char_p, c_uint16
...@@ -236,6 +241,7 @@ def taos_connect_auth(host=None, user="root", auth="", db=None, port=0): ...@@ -236,6 +241,7 @@ def taos_connect_auth(host=None, user="root", auth="", db=None, port=0):
raise ConnectionError("connect to TDengine failed") raise ConnectionError("connect to TDengine failed")
return connection return connection
_libtaos.taos_query.restype = c_void_p _libtaos.taos_query.restype = c_void_p
_libtaos.taos_query.argtypes = c_void_p, c_char_p _libtaos.taos_query.argtypes = c_void_p, c_char_p
...@@ -287,6 +293,7 @@ def taos_affected_rows(result): ...@@ -287,6 +293,7 @@ def taos_affected_rows(result):
"""The affected rows after runing query""" """The affected rows after runing query"""
return _libtaos.taos_affected_rows(result) return _libtaos.taos_affected_rows(result)
subscribe_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p, c_int) subscribe_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p, c_int)
_libtaos.taos_subscribe.restype = c_void_p _libtaos.taos_subscribe.restype = c_void_p
# _libtaos.taos_subscribe.argtypes = c_void_p, c_int, c_char_p, c_char_p, subscribe_callback_type, c_void_p, c_int # _libtaos.taos_subscribe.argtypes = c_void_p, c_int, c_char_p, c_char_p, subscribe_callback_type, c_void_p, c_int
...@@ -317,7 +324,7 @@ def taos_subscribe(connection, restart, topic, sql, interval, callback=None, par ...@@ -317,7 +324,7 @@ def taos_subscribe(connection, restart, topic, sql, interval, callback=None, par
_libtaos.taos_consume.restype = c_void_p _libtaos.taos_consume.restype = c_void_p
_libtaos.taos_consume.argstype = c_void_p, _libtaos.taos_consume.argstype = (c_void_p,)
def taos_consume(sub): def taos_consume(sub):
...@@ -503,13 +510,17 @@ def taos_stop_query(result): ...@@ -503,13 +510,17 @@ def taos_stop_query(result):
return _libtaos.taos_stop_query(result) return _libtaos.taos_stop_query(result)
_libtaos.taos_load_table_info.restype = c_int try:
_libtaos.taos_load_table_info.argstype = (c_void_p, c_char_p) _libtaos.taos_load_table_info.restype = c_int
_libtaos.taos_load_table_info.argstype = (c_void_p, c_char_p)
except Exception as err:
_UNSUPPORTED["taos_open_stream"] = err
def taos_load_table_info(connection, tables): def taos_load_table_info(connection, tables):
# type: (ctypes.c_void_p, str) -> None # type: (ctypes.c_void_p, str) -> None
"""Stop current query""" """Stop current query"""
_check_if_supported()
errno = _libtaos.taos_load_table_info(connection, c_char_p(tables.encode("utf-8"))) errno = _libtaos.taos_load_table_info(connection, c_char_p(tables.encode("utf-8")))
if errno != 0: if errno != 0:
msg = taos_errstr() msg = taos_errstr()
...@@ -562,12 +573,13 @@ def taos_select_db(connection, db): ...@@ -562,12 +573,13 @@ def taos_select_db(connection, db):
try: try:
_libtaos.taos_open_stream.restype = c_void_p _libtaos.taos_open_stream.restype = c_void_p
_libtaos.taos_open_stream.argstype = c_void_p, c_char_p, stream_callback_type, c_int64, c_void_p, Any _libtaos.taos_open_stream.argstype = c_void_p, c_char_p, stream_callback_type, c_int64, c_void_p, Any
except: except Exception as err:
pass _UNSUPPORTED["taos_open_stream"] = err
def taos_open_stream(connection, sql, callback, stime=0, param=None, callback2=None): def taos_open_stream(connection, sql, callback, stime=0, param=None, callback2=None):
# type: (ctypes.c_void_p, str, stream_callback_type, c_int64, c_void_p, c_void_p) -> ctypes.pointer # type: (ctypes.c_void_p, str, stream_callback_type, c_int64, c_void_p, c_void_p) -> ctypes.pointer
_check_if_supported()
if callback2 != None: if callback2 != None:
callback2 = stream_callback2_type(callback2) callback2 = stream_callback2_type(callback2)
"""Open an stream""" """Open an stream"""
...@@ -600,6 +612,7 @@ def taos_stmt_init(connection): ...@@ -600,6 +612,7 @@ def taos_stmt_init(connection):
""" """
return c_void_p(_libtaos.taos_stmt_init(connection)) return c_void_p(_libtaos.taos_stmt_init(connection))
_libtaos.taos_stmt_prepare.restype = c_int _libtaos.taos_stmt_prepare.restype = c_int
_libtaos.taos_stmt_prepare.argstype = (c_void_p, c_char_p, c_int) _libtaos.taos_stmt_prepare.argstype = (c_void_p, c_char_p, c_int)
...@@ -618,6 +631,7 @@ def taos_stmt_prepare(stmt, sql): ...@@ -618,6 +631,7 @@ def taos_stmt_prepare(stmt, sql):
_libtaos.taos_stmt_close.restype = c_int _libtaos.taos_stmt_close.restype = c_int
_libtaos.taos_stmt_close.argstype = (c_void_p,) _libtaos.taos_stmt_close.argstype = (c_void_p,)
def taos_stmt_close(stmt): def taos_stmt_close(stmt):
# type: (ctypes.c_void_p) -> None # type: (ctypes.c_void_p) -> None
"""Close a statement query """Close a statement query
...@@ -627,17 +641,12 @@ def taos_stmt_close(stmt): ...@@ -627,17 +641,12 @@ def taos_stmt_close(stmt):
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try:
_libtaos.taos_stmt_errstr.restype = c_char_p
_libtaos.taos_stmt_errstr.argstype = (c_void_p,)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_errstr" % taos_get_client_info())
try: try:
_libtaos.taos_stmt_errstr.restype = c_char_p _libtaos.taos_stmt_errstr.restype = c_char_p
_libtaos.taos_stmt_errstr.argstype = (c_void_p,) _libtaos.taos_stmt_errstr.argstype = (c_void_p,)
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_stmt_errstr" % taos_get_client_info()) _UNSUPPORTED["taos_stmt_set_tbname"] = err
def taos_stmt_errstr(stmt): def taos_stmt_errstr(stmt):
...@@ -645,16 +654,17 @@ def taos_stmt_errstr(stmt): ...@@ -645,16 +654,17 @@ def taos_stmt_errstr(stmt):
"""Get error message from stetement query """Get error message from stetement query
@stmt: c_void_p TAOS_STMT* @stmt: c_void_p TAOS_STMT*
""" """
_check_if_supported()
err = c_char_p(_libtaos.taos_stmt_errstr(stmt)) err = c_char_p(_libtaos.taos_stmt_errstr(stmt))
if err: if err:
return err.value.decode("utf-8") return err.value.decode("utf-8")
try: try:
_libtaos.taos_stmt_set_tbname.restype = c_int _libtaos.taos_stmt_set_tbname.restype = c_int
_libtaos.taos_stmt_set_tbname.argstype = (c_void_p, c_char_p) _libtaos.taos_stmt_set_tbname.argstype = (c_void_p, c_char_p)
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_stmt_set_tbname" % taos_get_client_info()) _UNSUPPORTED["taos_stmt_set_tbname"] = err
def taos_stmt_set_tbname(stmt, name): def taos_stmt_set_tbname(stmt, name):
...@@ -662,15 +672,17 @@ def taos_stmt_set_tbname(stmt, name): ...@@ -662,15 +672,17 @@ def taos_stmt_set_tbname(stmt, name):
"""Set table name of a statement query if exists. """Set table name of a statement query if exists.
@stmt: c_void_p TAOS_STMT* @stmt: c_void_p TAOS_STMT*
""" """
_check_if_supported()
res = _libtaos.taos_stmt_set_tbname(stmt, c_char_p(name.encode("utf-8"))) res = _libtaos.taos_stmt_set_tbname(stmt, c_char_p(name.encode("utf-8")))
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try: try:
_libtaos.taos_stmt_set_tbname_tags.restype = c_int _libtaos.taos_stmt_set_tbname_tags.restype = c_int
_libtaos.taos_stmt_set_tbname_tags.argstype = (c_void_p, c_char_p, c_void_p) _libtaos.taos_stmt_set_tbname_tags.argstype = (c_void_p, c_char_p, c_void_p)
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_stmt_set_tbname_tags" % taos_get_client_info()) _UNSUPPORTED["taos_stmt_set_tbname_tags"] = err
def taos_stmt_set_tbname_tags(stmt, name, tags): def taos_stmt_set_tbname_tags(stmt, name, tags):
...@@ -678,11 +690,13 @@ def taos_stmt_set_tbname_tags(stmt, name, tags): ...@@ -678,11 +690,13 @@ def taos_stmt_set_tbname_tags(stmt, name, tags):
"""Set table name with tags bind params. """Set table name with tags bind params.
@stmt: c_void_p TAOS_STMT* @stmt: c_void_p TAOS_STMT*
""" """
_check_if_supported()
res = _libtaos.taos_stmt_set_tbname_tags(stmt, ctypes.c_char_p(name.encode("utf-8")), tags) res = _libtaos.taos_stmt_set_tbname_tags(stmt, ctypes.c_char_p(name.encode("utf-8")), tags)
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
_libtaos.taos_stmt_is_insert.restype = c_int _libtaos.taos_stmt_is_insert.restype = c_int
_libtaos.taos_stmt_is_insert.argstype = (c_void_p, POINTER(c_int)) _libtaos.taos_stmt_is_insert.argstype = (c_void_p, POINTER(c_int))
...@@ -702,6 +716,7 @@ def taos_stmt_is_insert(stmt): ...@@ -702,6 +716,7 @@ def taos_stmt_is_insert(stmt):
_libtaos.taos_stmt_num_params.restype = c_int _libtaos.taos_stmt_num_params.restype = c_int
_libtaos.taos_stmt_num_params.argstype = (c_void_p, POINTER(c_int)) _libtaos.taos_stmt_num_params.argstype = (c_void_p, POINTER(c_int))
def taos_stmt_num_params(stmt): def taos_stmt_num_params(stmt):
# type: (ctypes.c_void_p) -> int # type: (ctypes.c_void_p) -> int
"""Params number of the current statement query. """Params number of the current statement query.
...@@ -713,6 +728,7 @@ def taos_stmt_num_params(stmt): ...@@ -713,6 +728,7 @@ def taos_stmt_num_params(stmt):
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
return num_params.value return num_params.value
_libtaos.taos_stmt_bind_param.restype = c_int _libtaos.taos_stmt_bind_param.restype = c_int
_libtaos.taos_stmt_bind_param.argstype = (c_void_p, c_void_p) _libtaos.taos_stmt_bind_param.argstype = (c_void_p, c_void_p)
...@@ -729,12 +745,12 @@ def taos_stmt_bind_param(stmt, bind): ...@@ -729,12 +745,12 @@ def taos_stmt_bind_param(stmt, bind):
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try: try:
_libtaos.taos_stmt_bind_param_batch.restype = c_int _libtaos.taos_stmt_bind_param_batch.restype = c_int
_libtaos.taos_stmt_bind_param_batch.argstype = (c_void_p, c_void_p) _libtaos.taos_stmt_bind_param_batch.argstype = (c_void_p, c_void_p)
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_stmt_bind_param_batch" % taos_get_client_info()) _UNSUPPORTED["taos_stmt_bind_param_batch"] = err
def taos_stmt_bind_param_batch(stmt, bind): def taos_stmt_bind_param_batch(stmt, bind):
...@@ -745,15 +761,17 @@ def taos_stmt_bind_param_batch(stmt, bind): ...@@ -745,15 +761,17 @@ def taos_stmt_bind_param_batch(stmt, bind):
""" """
# ptr = ctypes.cast(bind, POINTER(TaosMultiBind)) # ptr = ctypes.cast(bind, POINTER(TaosMultiBind))
# ptr = pointer(bind) # ptr = pointer(bind)
_check_if_supported()
res = _libtaos.taos_stmt_bind_param_batch(stmt, bind) res = _libtaos.taos_stmt_bind_param_batch(stmt, bind)
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try: try:
_libtaos.taos_stmt_bind_single_param_batch.restype = c_int _libtaos.taos_stmt_bind_single_param_batch.restype = c_int
_libtaos.taos_stmt_bind_single_param_batch.argstype = (c_void_p, c_void_p, c_int) _libtaos.taos_stmt_bind_single_param_batch.argstype = (c_void_p, c_void_p, c_int)
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_stmt_bind_single_param_batch" % taos_get_client_info()) _UNSUPPORTED["taos_stmt_bind_single_param_batch"] = err
def taos_stmt_bind_single_param_batch(stmt, bind, col): def taos_stmt_bind_single_param_batch(stmt, bind, col):
...@@ -763,6 +781,7 @@ def taos_stmt_bind_single_param_batch(stmt, bind, col): ...@@ -763,6 +781,7 @@ def taos_stmt_bind_single_param_batch(stmt, bind, col):
@bind: TAOS_MULTI_BIND* @bind: TAOS_MULTI_BIND*
@col: column index @col: column index
""" """
_check_if_supported()
res = _libtaos.taos_stmt_bind_single_param_batch(stmt, bind, col) res = _libtaos.taos_stmt_bind_single_param_batch(stmt, bind, col)
if res != 0: if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res) raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
...@@ -810,14 +829,17 @@ def taos_stmt_use_result(stmt): ...@@ -810,14 +829,17 @@ def taos_stmt_use_result(stmt):
raise StatementError(taos_stmt_errstr(stmt)) raise StatementError(taos_stmt_errstr(stmt))
return result return result
try: try:
_libtaos.taos_schemaless_insert.restype = c_void_p _libtaos.taos_schemaless_insert.restype = c_void_p
_libtaos.taos_schemaless_insert.argstype = c_void_p, c_void_p, c_int, c_int, c_int _libtaos.taos_schemaless_insert.argstype = c_void_p, c_void_p, c_int, c_int, c_int
except AttributeError: except Exception as err:
print("WARNING: libtaos(%s) does not support taos_schemaless_insert" % taos_get_client_info()) _UNSUPPORTED["taos_schemaless_insert"] = err
def taos_schemaless_insert(connection, lines, protocol, precision): def taos_schemaless_insert(connection, lines, protocol, precision):
# type: (c_void_p, list[str] | tuple(str), SmlProtocol, SmlPrecision) -> int # type: (c_void_p, list[str] | tuple(str), SmlProtocol, SmlPrecision) -> int
_check_if_supported()
num_of_lines = len(lines) num_of_lines = len(lines)
lines = (c_char_p(line.encode("utf-8")) for line in lines) lines = (c_char_p(line.encode("utf-8")) for line in lines)
lines_type = ctypes.c_char_p * num_of_lines lines_type = ctypes.c_char_p * num_of_lines
...@@ -833,6 +855,18 @@ def taos_schemaless_insert(connection, lines, protocol, precision): ...@@ -833,6 +855,18 @@ def taos_schemaless_insert(connection, lines, protocol, precision):
taos_free_result(res) taos_free_result(res)
return affected_rows return affected_rows
def _check_if_supported():
func = inspect.stack()[1][3]
if func in _UNSUPPORTED:
raise InterfaceError("C function %s is not supported in v%s: %s" % (func, taos_get_client_info(), _UNSUPPORTED[func]))
def unsupported_methods():
for m, e in range(_UNSUPPORTED):
print("unsupported %s: %s", m, e)
class CTaosInterface(object): class CTaosInterface(object):
def __init__(self, config=None): def __init__(self, config=None):
""" """
......
...@@ -175,11 +175,13 @@ DLL_EXPORT int taos_select_db(TAOS *taos, const char *db); ...@@ -175,11 +175,13 @@ DLL_EXPORT int taos_select_db(TAOS *taos, const char *db);
DLL_EXPORT int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields); DLL_EXPORT int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields);
DLL_EXPORT void taos_stop_query(TAOS_RES *res); DLL_EXPORT void taos_stop_query(TAOS_RES *res);
DLL_EXPORT bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col); DLL_EXPORT bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col);
DLL_EXPORT bool taos_is_update_query(TAOS_RES *res);
DLL_EXPORT int taos_fetch_block(TAOS_RES *res, TAOS_ROW *rows); DLL_EXPORT int taos_fetch_block(TAOS_RES *res, TAOS_ROW *rows);
DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
DLL_EXPORT int* taos_fetch_lengths(TAOS_RES *res); DLL_EXPORT int* taos_fetch_lengths(TAOS_RES *res);
DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
DLL_EXPORT void taos_reset_current_db(TAOS *taos);
// TAOS_RES *taos_list_tables(TAOS *mysql, const char *wild); // TAOS_RES *taos_list_tables(TAOS *mysql, const char *wild);
// TAOS_RES *taos_list_dbs(TAOS *mysql, const char *wild); // TAOS_RES *taos_list_dbs(TAOS *mysql, const char *wild);
...@@ -192,7 +194,6 @@ DLL_EXPORT int taos_errno(TAOS_RES *tres); ...@@ -192,7 +194,6 @@ DLL_EXPORT int taos_errno(TAOS_RES *tres);
DLL_EXPORT void taos_query_a(TAOS *taos, const char *sql, void (*fp)(void *param, TAOS_RES *, int code), void *param); DLL_EXPORT void taos_query_a(TAOS *taos, const char *sql, void (*fp)(void *param, TAOS_RES *, int code), void *param);
DLL_EXPORT void taos_fetch_rows_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, int numOfRows), void *param); DLL_EXPORT void taos_fetch_rows_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, int numOfRows), void *param);
//DLL_EXPORT void taos_fetch_row_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, TAOS_ROW row), void *param);
typedef void (*TAOS_SUBSCRIBE_CALLBACK)(TAOS_SUB* tsub, TAOS_RES *res, void* param, int code); typedef void (*TAOS_SUBSCRIBE_CALLBACK)(TAOS_SUB* tsub, TAOS_RES *res, void* param, int code);
DLL_EXPORT TAOS_SUB *taos_subscribe(TAOS* taos, int restart, const char* topic, const char *sql, TAOS_SUBSCRIBE_CALLBACK fp, void *param, int interval); DLL_EXPORT TAOS_SUB *taos_subscribe(TAOS* taos, int restart, const char* topic, const char *sql, TAOS_SUBSCRIBE_CALLBACK fp, void *param, int interval);
......
...@@ -400,7 +400,7 @@ typedef struct SColIndex { ...@@ -400,7 +400,7 @@ typedef struct SColIndex {
int16_t colId; // column id int16_t colId; // column id
int16_t colIndex; // column index in colList if it is a normal column or index in tagColList if a tag int16_t colIndex; // column index in colList if it is a normal column or index in tagColList if a tag
uint16_t flag; // denote if it is a tag or a normal column uint16_t flag; // denote if it is a tag or a normal column
char name[TSDB_COL_NAME_LEN + TSDB_DB_NAME_LEN + 1]; char name[TSDB_COL_NAME_LEN + TSDB_TABLE_NAME_LEN + 1];
} SColIndex; } SColIndex;
typedef struct SColumnFilterInfo { typedef struct SColumnFilterInfo {
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
static char **shellSQLFiles = NULL; static char **shellSQLFiles = NULL;
static int32_t shellSQLFileNum = 0; static int32_t shellSQLFileNum = 0;
static char shellTablesSQLFile[TSDB_FILENAME_LEN] = {0}; static char shellTablesSQLFile[4096] = {0};
typedef struct { typedef struct {
pthread_t threadID; pthread_t threadID;
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册