提交 0aaa4500 编写于 作者: G Ganlin Zhao

Merge branch 'develop' into feature/TD-6452

......@@ -16,6 +16,9 @@
[submodule "deps/TSZ"]
path = deps/TSZ
url = https://github.com/taosdata/TSZ.git
[submodule "src/plugins/blm3"]
path = src/plugins/blm3
url = https://github.com/taosdata/blm3
[submodule "deps/avro"]
path = deps/avro
url = https://github.com/apache/avro
[submodule "src/plugins/taosadapter"]
path = src/plugins/taosadapter
url = https://github.com/taosdata/taosadapter
......@@ -15,6 +15,26 @@ ELSE ()
CMAKE_MINIMUM_REQUIRED(VERSION 3.0)
ENDIF ()
if(NOT WIN32)
string(ASCII 27 Esc)
set(ColourReset "${Esc}[m")
set(ColourBold "${Esc}[1m")
set(Red "${Esc}[31m")
set(Green "${Esc}[32m")
set(Yellow "${Esc}[33m")
set(Blue "${Esc}[34m")
set(Magenta "${Esc}[35m")
set(Cyan "${Esc}[36m")
set(White "${Esc}[37m")
set(BoldRed "${Esc}[1;31m")
set(BoldGreen "${Esc}[1;32m")
set(BoldYellow "${Esc}[1;33m")
set(BoldBlue "${Esc}[1;34m")
set(BoldMagenta "${Esc}[1;35m")
set(BoldCyan "${Esc}[1;36m")
set(BoldWhite "${Esc}[1;37m")
endif()
SET(TD_ACCOUNT FALSE)
SET(TD_ADMIN FALSE)
SET(TD_GRANT FALSE)
......
......@@ -107,7 +107,147 @@ def pre_test(){
make > /dev/null
make install > /dev/null
cd ${WKC}/tests
pip3 install ${WKC}/src/connector/python/ || echo "not install"
pip3 install ${WKC}/src/connector/python/
'''
return 1
}
def pre_test_noinstall(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. > /dev/null
make
'''
return 1
}
def pre_test_mac(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. > /dev/null
cmake --build .
'''
return 1
}
......@@ -460,31 +600,61 @@ pipeline {
stage('arm64centos7') {
agent{label " arm64centos7 "}
steps {
pre_test()
pre_test_noinstall()
}
}
stage('arm64centos8') {
agent{label " arm64centos8 "}
steps {
pre_test()
pre_test_noinstall()
}
}
stage('arm32bionic') {
agent{label " arm32bionic "}
steps {
pre_test()
pre_test_noinstall()
}
}
stage('arm64bionic') {
agent{label " arm64bionic "}
steps {
pre_test()
pre_test_noinstall()
}
}
stage('arm64focal') {
agent{label " arm64focal "}
steps {
pre_test()
pre_test_noinstall()
}
}
stage('centos7') {
agent{label " centos7 "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:trusty') {
agent{label " trusty "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:xenial') {
agent{label " xenial "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:bionic') {
agent{label " bionic "}
steps {
pre_test_noinstall()
}
}
stage('Mac_build') {
agent{label " catalina "}
steps {
pre_test_mac()
}
}
......
......@@ -129,7 +129,7 @@ mkdir debug && cd debug
cmake .. && cmake --build .
```
Note TDengine 2.3.0.0 and later use a component named 'blm3' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The blm3 is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull blm3 source code. Please install go language 1.14 or above for compiling blm3. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem.
Note TDengine 2.3.x.0 and later use a component named 'taosadapter' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The taosadapter is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull taosadapter source code. Please install go language 1.14 or above for compiling taosadapter. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem.
```
go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (TD_ACCOUNT)
......@@ -121,14 +121,13 @@ IF (TD_MIPS_32)
SET(COMMON_FLAGS "-Wall -Werror -fPIC -D_FILE_OFFSET_BITS=64 -D_LARGE_FILE")
ENDIF ()
IF (TD_APLHINE)
IF (TD_ALPINE)
SET(COMMON_FLAGS "${COMMON_FLAGS} -largp")
link_libraries(/usr/lib/libargp.a)
ADD_DEFINITIONS(-D_ALPINE)
MESSAGE(STATUS "aplhine is defined")
ENDIF ()
MESSAGE("before BUILD_HTTP: " ${BUILD_HTTP})
IF ("${BUILD_HTTP}" STREQUAL "")
IF (TD_LINUX)
IF (TD_ARM_32)
......@@ -140,16 +139,27 @@ IF ("${BUILD_HTTP}" STREQUAL "")
SET(BUILD_HTTP "true")
ENDIF ()
ENDIF ()
MESSAGE("after BUILD_HTTP: " ${BUILD_HTTP})
IF (${BUILD_HTTP} MATCHES "true")
SET(TD_BUILD_HTTP TRUE)
ELSEIF (${BUILD_HTTP} MATCHES "false")
SET(TD_BUILD_HTTP FALSE)
ENDIF ()
IF (TD_BUILD_HTTP)
ADD_DEFINITIONS(-DHTTP_EMBEDDED)
ENDIF ()
IF ("${AVRO_SUPPORT}" MATCHES "true")
SET(TD_AVRO_SUPPORT TRUE)
ELSEIF ("${AVRO_SUPPORT}" MATCHES "false")
SET(TD_AVRO_SUPPORT FALSE)
ENDIF ()
IF (TD_AVRO_SUPPORT)
ADD_DEFINITIONS(-DAVRO_SUPPORT)
ENDIF ()
IF (TD_LINUX)
ADD_DEFINITIONS(-DLINUX)
ADD_DEFINITIONS(-D_LINUX)
......@@ -162,11 +172,14 @@ IF (TD_LINUX)
ENDIF ()
IF (TD_MEMORY_SANITIZER)
IF (TD_ARCHLINUX)
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -O0 -g3 -DDEBUG")
ELSE ()
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -static-libasan -O0 -g3 -DDEBUG")
MESSAGE(STATUS "memory sanitizer detected as true")
ENDIF ()
MESSAGE(STATUS "${BoldRed}Will compile with memory sanitizer! ${ColourReset}")
ELSE ()
SET(DEBUG_FLAGS "-O0 -g3 -DDEBUG")
MESSAGE(STATUS "memory sanitizer detected as false")
ENDIF ()
SET(RELEASE_FLAGS "-O3 -Wno-error")
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
SET(CMAKE_C_STANDARD 11)
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (${ACCOUNT} MATCHES "true")
......@@ -92,6 +92,8 @@ ENDIF ()
SET(TD_BUILD_HTTP FALSE)
SET(TD_AVRO_SUPPORT FALSE)
SET(TD_MEMORY_SANITIZER FALSE)
IF (${MEMORY_SANITIZER} MATCHES "true")
SET(TD_MEMORY_SANITIZER TRUE)
......
......@@ -35,7 +35,7 @@ ELSEIF (TD_WINDOWS)
#INSTALL(TARGETS taos RUNTIME DESTINATION driver)
#INSTALL(TARGETS shell RUNTIME DESTINATION .)
IF (TD_MVN_INSTALLED)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.35-dist.jar DESTINATION connector/jdbc)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.36-dist.jar DESTINATION connector/jdbc)
ENDIF ()
ELSEIF (TD_DARWIN)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
#
......@@ -21,7 +21,7 @@ SET(TD_LINUX FALSE)
SET(TD_ARM_32 FALSE)
SET(TD_MIPS_64 FALSE)
SET(TD_MIPS_32 FALSE)
SET(TD_APLHINE FALSE)
SET(TD_ALPINE FALSE)
SET(TD_NINGSI FALSE)
SET(TD_NINGSI_60 FALSE)
SET(TD_NINGSI_80 FALSE)
......@@ -36,7 +36,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# Get OS information and store in variable TD_OS_INFO.
#
execute_process(COMMAND chmod 777 ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh)
execute_process(COMMAND ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO)
execute_process(COMMAND sh ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO)
MESSAGE(STATUS "The current os is " ${TD_OS_INFO})
SET(TD_LINUX TRUE)
......@@ -52,8 +52,13 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
ENDIF ()
IF (${TD_OS_INFO} MATCHES "Alpine")
SET(TD_APLHINE TRUE)
MESSAGE(STATUS "The current OS is Alpine, append extra flags")
SET(TD_ALPINE TRUE)
MESSAGE(STATUS "The current OS is Alpine Linux, append extra flags")
ELSEIF (${TD_OS_INFO} MATCHES "Arch")
SET(TD_ARCHLINUX TRUE)
MESSAGE(STATUS "The current OS is Arch Linux")
ELSE ()
MESSAGE(STATUS "Ths distro is " ${TD_OS_INFO})
ENDIF()
ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
SET(TD_DARWIN TRUE)
......@@ -155,7 +160,7 @@ ELSEIF (${OSTYPE} MATCHES "Linux")
MESSAGE(STATUS "input osType: Linux")
ELSEIF (${OSTYPE} MATCHES "Alpine")
MESSAGE(STATUS "input osType: Alpine")
SET(TD_APLHINE TRUE)
SET(TD_ALPINE TRUE)
ELSE ()
MESSAGE(STATUS "The user specified osType is unknown: " ${OSTYPE})
ENDIF ()
CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (DEFINED VERNUMBER)
SET(TD_VER_NUMBER ${VERNUMBER})
ELSE ()
SET(TD_VER_NUMBER "2.3.0.0")
SET(TD_VER_NUMBER "2.3.1.0")
ENDIF ()
IF (DEFINED VERCOMPATIBLE)
......
......@@ -25,10 +25,36 @@ IF (TD_DARWIN AND TD_MQTT)
ADD_SUBDIRECTORY(MQTT-C)
ENDIF ()
IF (TD_AVRO_SUPPORT)
MESSAGE("")
MESSAGE("${Green} ENABLE avro format support ${ColourReset}")
MESSAGE("")
include(ExternalProject)
ExternalProject_Add(
apache-avro
PREFIX "avro"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c
BUILD_IN_SOURCE 1
PATCH_COMMAND
COMMAND git clean -f -d
COMMAND sed -i.bak -e "/TARGETS avroappend/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avrocat/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avromod/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avropipe/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
CONFIGURE_COMMAND cmake -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}/build
)
ELSE ()
MESSAGE("")
MESSAGE("${Yellow} NO avro format support ${ColourReset}")
MESSAGE("")
ENDIF ()
IF (TD_LINUX_64 AND JEMALLOC_ENABLED)
MESSAGE("")
MESSAGE("${Green} ENABLE jemalloc ${ColourReset}")
MESSAGE("")
MESSAGE("setup deps/jemalloc, current source dir:" ${CMAKE_CURRENT_SOURCE_DIR})
MESSAGE("binary dir:" ${CMAKE_BINARY_DIR})
include(ExternalProject)
ExternalProject_Add(jemalloc
PREFIX "jemalloc"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc
......
Subproject commit 0ca5b15a8eac40327dd737be52c926fa5675712c
Subproject commit a1fce29d9675b4dd95dfee9db32cc505d0b2227c
......@@ -81,6 +81,7 @@ TDengine是一个高效的存储、查询、分析时序大数据的平台,专
* [Node.js Connector](/connector#nodejs):给node应用提供一个连接TDengine服务器的驱动
* [C# Connector](/connector#csharp):给C#应用提供一个连接TDengine服务器的驱动
* [Windows客户端](https://www.taosdata.com/blog/2019/07/26/514.html):自行编译windows客户端,Windows环境的各种连接器都需要它
* [Rust Connector](/connector/rust): Rust语言下通过libtaos客户端或RESTful接口,连接TDengine服务器。
## [与其他工具的连接](/connections)
......
......@@ -145,7 +145,7 @@ insert delay, avg: 8.31ms, max: 860.12ms, min: 2.00ms
```
$ taosdemo --help
-f, --file=FILE The meta file to the execution procedure.
-f, --file=FILE The meta file to the execution procedure. Currently, we support standard UTF-8 (without BOM) encoded files only.
-u, --user=USER The user name to use when connecting to the server.
-p, --password The password to use when connecting to the server.
-c, --config-dir=CONFIG_DIR Configuration directory.
......@@ -442,7 +442,7 @@ TDengine是涛思数据专为物联网、车联网、工业互联网、IT运维
taosdemo支持两种配置参数的模式,一种是命令行参数,一种是使用json格式的配置文件。
一、命令行参数
-f:指定taosdemo所需参数的meta文件。当使用该参数时,其他所有命令行参数都失效。可选项,缺省是NULL。
-f:指定taosdemo所需参数的meta文件。当使用该参数时,其他所有命令行参数都失效。可选项,缺省是NULL。目前仅支持不含 BOM(byte-order mark)的标准 UTF-8 编码文件。
-u: 用户名。可选项,缺省是“root“。
......
......@@ -27,13 +27,18 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6,
- 对同一张表,如果新插入记录的时间戳已经存在,默认情形下(UPDATE=0)新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。如果在创建数据库时使用了 UPDATE 1 选项,插入相同时间戳的新记录将覆盖原有记录。
- 写入的数据的时间戳必须大于当前时间减去配置参数keep的时间。如果keep配置为3650天,那么无法写入比3650天还早的数据。写入数据的时间戳也不能大于当前时间加配置参数days。如果days为2,那么无法写入比当前时间还晚2天的数据。
## <a class="anchor" id="schemaless"></a>Schemaless 写入
## <a class="anchor" id="schemaless"></a>无模式(Schemaless)写入
**前言**
<br/>在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供调用 Schemaless 写入方式,可以免于预先创建超级表/子表的步骤,随着数据写入写入接口能够自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。
<br/>目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 [Schemaless 方式写入接口](https://www.taosdata.com/cn/documentation/connector#schemaless)章节。这里对 Schemaless 的数据表达格式进行了描述。
<br/>无模式写入方式建立的超级表及其对应的子表与通过 SQL 直接建立的超级表和子表完全没有区别,您也可以通过 SQL 语句直接向其中写入数据。需要注意的是,通过无模式写入方式建立的表,其表名是基于标签值按照固定的映射规则生成,所以无法明确地进行表意,缺乏可读性。
在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供 Schemaless 写入方式,可以免于预先创建超级表/数据子表,而是随着数据写入,自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 [Schemaless 方式写入接口](https://www.taosdata.com/cn/documentation/connector#schemaless) 章节。这里对 Schemaless 的数据表达格式进行描述。
**无模式写入行协议**
<br/>TDengine 的无模式写入的行协议兼容 InfluxDB 的 行协议(Line Protocol)、OpenTSDB 的 telnet 行协议、OpenTSDB 的 Json 格式协议。但是使用这三种协议的时候,需要在 API 中指定输入内容使用解析协议的标准。
### Schemaless 数据行协议
对于InfluxDB、OpenTSDB的标准写入协议请参考各自的文档。下面首先以 InfluxDB 的行协议为基础,介绍 TDengine 扩展的协议内容,允许用户采用更加精细的方式控制(超级表)模式。
Schemaless 采用一个字符串来表达最终存储的一个数据行(可以向 Schemaless 写入 API 中一次传入多个字符串来实现多个数据行的批量写入),其格式约定如下:
Schemaless 采用一个字符串来表达一个数据行(可以向写入 API 中一次传入多行字符串来实现多个数据行的批量写入),其格式约定如下:
```json
measurement,tag_set field_set timestamp
```
......@@ -44,51 +49,104 @@ measurement,tag_set field_set timestamp
* field_set 将作为普通列数据,其格式形如 `<field_key>=<field_value>,<field_key>=<field_value>`,同样是使用英文逗号来分隔多个普通列的数据。它与 timestamp 之间使用一个半角空格来分隔。
* timestamp 即本行数据对应的主键时间戳。
在 Schemaless 的数据行协议中,tag_set、field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说:
tag_set 中的所有的数据自动转化为 nchar 数据类型,并不需要使用双引号(")。
<br/>在无模式写入数据行协议中,field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说:
* 如果两边有英文双引号,表示 BIANRY(32) 类型。例如 `"abc"`
* 如果两边有英文双引号而且带有 L 前缀,表示 NCHAR(32) 类型。例如 `L"报错信息"`
* 对空格、等号(=)、逗号(,)、双引号("),前面需要使用反斜杠(\)进行转义。(都指的是英文半角符号)
* 数值类型将通过后缀来区分数据类型:
- 没有后缀,为 FLOAT 类型;
- 后缀为 f32,为 FLOAT 类型;
- 后缀为 f64,为 DOUBLE 类型;
- 后缀为 i8,表示为 TINYINT (INT8) 类型;
- 后缀为 i16,表示为 SMALLINT (INT16) 类型;
- 后缀为 i32,表示为 INT (INT32) 类型;
- 后缀为 i64,表示为 BIGINT (INT64) 类型;
| **序号** | **后缀** | **映射类型** | **大小(字节)** |
| -- | ------- | ---------| ------ |
| 1 | 无或f64 | double | 8 |
| 2 | f32 | float | 4 |
| 3 | i8 | TinyInt | 1 |
| 4 | i16 | SmallInt | 2 |
| 5 | i32 | Int | 4 |
| 6 | i64或i | Bigint | 8 |
* t, T, true, True, TRUE, f, F, false, False 将直接作为 BOOL 型来处理。
<br/>例如如下数据行表示:向名为 st 的超级表下的 t1 标签为 "3"(NCHAR)、t2 标签为 "4"(NCHAR)、t3 标签为 "t3"(NCHAR)的数据子表,写入 c1 列为 3(BIGINT)、c2 列为 false(BOOL)、c3 列为 "passit"(BINARY)、c4 列为 4(DOUBLE)、主键时间戳为 1626006833639000000 的一行数据。
```json
st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4f64 1626006833639000000
```
需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。
timestamp 位置的时间戳通过后缀来声明时间精度,具体如下:
* 不带任何后缀的长整数会被当作微秒来处理;
* 当后缀为 s 时,表示秒时间戳;
* 当后缀为 ms 时,表示毫秒时间戳;
* 当后缀为 us 时,表示微秒时间戳;
* 当后缀为 ns 时,表示纳秒时间戳;
* 当时间戳为 0 时,表示采用客户端的当前时间(因此,同一批提交的数据中,时间戳 0 会被解释为同一个时间点,于是就有可能导致时间戳重复)。
### 无模式写入的主要处理逻辑
例如,如下 Schemaless 数据行表示:向名为 st 的超级表下的 t1 标签为 3(BIGINT 类型)、t2 标签为 4(DOUBLE 类型)、t3 标签为 "t3"(BINARY 类型)的数据子表,写入 c1 列为 3(BIGINT 类型)、c2 列为 false(BOOL 类型)、c3 列为 "passit"(NCHAR 类型)、c4 列为 4(DOUBLE 类型)、主键时间戳为 1626006833639000000(纳秒精度)的一行数据。
无模式写入按照如下原则来处理行数据:
1. 当 tag_set 中有 ID 字段时,该字段的值将作为子表的表名。
2. 没有 ID 字段时,将使用如下规则来生成子表名:
首先将measurement 的名称和标签的 key 和 value 组合成为如下的字符串
```json
"measurement,tag_key1=tag_value1,tag_key2=tag_value2"
```
需要注意的是,这里的tag_key1, tag_key2并不是用户输入的标签的原始顺序,而是使用了标签名称按照字符串升序排列后的结果。所以,tag_key1 并不是在行协议中输入的第一个标签。
排列完成以后计算该字符串的 MD5 散列值 "md5_val"。然后将计算的结果与字符串组合生成表名:“t_md5_val”。其中的 “t_” 是固定的前缀,每个通过该映射关系自动生成的表都具有该前缀。
<br/>3. 如果解析行协议获得的超级表不存在,则会创建这个超级表。
<br/>4. 如果解析行协议获得子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
<br/>5. 如果数据行中指定的标签列或普通列不存在,则在超级表中增加对应的标签列或普通列(只增不减)。
<br/>6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
<br/>7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,自动增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
<br/>8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
<br/>9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
**备注:**
<br/>无模式所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。
**时间分辨率识别**
<br/>无模式写入过程中支持三个指定的模式,具体如下
| **序号** | **值** | **说明** |
| ---- | ------------------- | ------------ |
| 1 | SML_LINE_PROTOCOL | InfluxDB行协议(Line Protocol) |
| 2 | SML_TELNET_PROTOCOL | OpenTSDB文本行协议 |
| 3 | SML_JSON_PROTOCOL | Json协议格式 |
<br/>在 SML_LINE_PROTOCOL 解析模式下,需要用户指定输入的时间戳的时间分辨率。可用的时间分辨率如下表所示:<br/>
| **序号** | **时间分辨率定义** | **含义** |
| ---- | ----------------------------- | --------- |
| 1 | TSDB_SML_TIMESTAMP_NOT_CONFIGURED | 未定义(无效) |
| 2 | TSDB_SML_TIMESTAMP_HOURS | 小时 |
| 3 | TSDB_SML_TIMESTAMP_MINUTES | 分钟 |
| 4 | TSDB_SML_TIMESTAMP_SECONDS | 秒 |
| 5 | TSDB_SML_TIMESTAMP_MILLI_SECONDS | 毫秒 |
| 6 | TSDB_SML_TIMESTAMP_MICRO_SECONDS | 微秒 |
| 7 | TSDB_SML_TIMESTAMP_NANO_SECONDS | 纳秒 |
在 SML_TELNET_PROTOCOL 和 SML_JSON_PROTOCOL 模式下,根据时间戳的长度来确定时间精度(与 OpenTSDB 标准操作方式相同),此时会忽略用户指定的时间分辨率。
**数据模式变更处理**
<br/>本节将说明不同行数据写入情况下,对于数据模式的影响。
在使用行协议写入一个明确的标识的字段类型的时候,后续更改该字段的类型定义,会出现明确的数据模式错误,即会触发写入 API 报告错误。如下所示,
```json
st,t1=3i64,t2=4f64,t3="t3" c1=3i64,c3=L"passit",c2=false,c4=4f64 1626006833639000000ns
st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4i 1626006833640000000
```
第一行的数据类型映射将 c4 列定义为 Double, 但是第二行的数据又通过数值后缀方式声明该列为 BigInt, 由此会触发无模式写入的解析错误。
需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。
如果列前面的行协议将数据列声明为了 binary, 后续的要求长度更长的binary长度,此时会触发超级表模式的变更。
```json
st,t1=3,t2=4,t3=t3 c1=3i64,c5="pass" 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c5="passit" 1626006833640000000
```
第一行中行协议解析会声明 c5 列是一个 binary(4)的字段,第二次行数据写入会提取列 c5 仍然是 binary 列,但是其宽度为 6,此时需要将binary的宽度增加到能够容纳 新字符串的宽度。
```json
st,t1=3,t2=4,t3=t3 c1=3i64 1626006833639000000
st,t1=3,t2=4,t3=t3 c1=3i64,c6="passit" 1626006833640000000
```
第二行数据相对于第一行来说增加了一个列 c6,类型为binary(6)。那么此时会自动增加一个列 c6, 类型为 binary(6)。
### Schemaless 的处理逻辑
**写入完整性**
<br/>TDengine 提供数据写入的幂等性保证,即您可以反复调用 API 进行出错数据的写入操作。但是不提供多行数据写入的原子性保证。即在多行数据一批次写入过程中,会出现部分数据写入成功,部分数据写入失败的情况。
Schemaless 按照如下原则来处理行数据:
1. 当 tag_set 中有 ID 字段时,该字段的值将作为数据子表的表名。
2. 没有 ID 字段时,将使用 `measurement + tag_value1 + tag_value2 + ...` 的 md5 值来作为子表名。
3. 如果指定的超级表名不存在,则 Schemaless 会创建这个超级表。
4. 如果指定的数据子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
5. 如果数据行中指定的标签列或普通列不存在,则 Schemaless 会在超级表中增加对应的标签列或普通列(只增不减)。
6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,那么 Schemaless 会增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
**错误码**
<br/>如果是无模式写入过程中的数据本身错误,应用会得到 TSDB_CODE_TSC_LINE_SYNTAX_ERROR 错误信息,该错误信息表明错误发生在写入文本中。其他的错误码与原系统一致,可以通过 taos_errstr 获取具体的错误原因。
**注意:**Schemaless 所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。
**后续升级计划**
<br/>当前版本只提供了 C 版本的 API,后续将提供 其他高级语言的 API,例如 Java/Go/Python/C# 等。此外,在TDengine v2.3及后续版本中,您还可以通过 Taos Adapter 采用 REST 的方式直接写入无模式数据。
关于 Schemaless 的字符串编码处理、时区设置等,均会沿用 TAOSC 客户端的设置。
## <a class="anchor" id="prometheus"></a>Prometheus 直接写入
......@@ -183,10 +241,10 @@ use prometheus;
select * from apiserver_request_latencies_bucket;
```
## <a class="anchor" id="telegraf"></a> Telegraf 直接写入(通过 BLM v3)
## <a class="anchor" id="telegraf"></a> Telegraf 直接写入(通过 taosadapter)
安装 Telegraf 请参考[官方文档](https://portal.influxdata.com/downloads/)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。
TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。
配置方法,在 /etc/telegraf/telegraf.conf 增加如下文字,其中 database name 请填写希望在 TDengine 保存 Telegraf 数据的数据库名,TDengine server/cluster host、username和 password 填写 TDengine 实际值:
```
......@@ -206,14 +264,14 @@ sudo systemctl start telegraf
```
即可在 TDengine 中查询 metrics 数据库中 Telegraf 写入的数据。
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="collectd"></a> collectd 直接写入(通过 BLM v3)
## <a class="anchor" id="collectd"></a> collectd 直接写入(通过 taosadapter)
安装 collectd,请参考[官方文档](https://collectd.org/download.shtml)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 collectd 的多种应用的数据写入。
TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 collectd 的多种应用的数据写入。
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
```
LoadPlugin network
<Plugin network>
......@@ -224,15 +282,15 @@ LoadPlugin network
```
sudo systemctl start collectd
```
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="statsd"></a> StatsD 直接写入(通过 BLM v3)
## <a class="anchor" id="statsd"></a> StatsD 直接写入(通过 taosadapter)
安装 StatsD
请参考[官方文档](https://github.com/statsd/statsd)
TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 StatsD 的多种应用的数据写入。
TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 StatsD 的多种应用的数据写入。
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
```
backends 部分添加 "./backends/repeater"
repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>}
......@@ -247,12 +305,12 @@ port: 8125
}
```
BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
## <a class="anchor" id="blm2-telegraf"></a> 使用 Bailongma 2.0 接入 Telegraf 数据写入
## <a class="anchor" id="taosadapter2-telegraf"></a> 使用 Bailongma 2.0 接入 Telegraf 数据写入
*注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 BLM v3,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。
*注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 taosadapter ,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。
[Telegraf](https://www.influxdata.com/time-series-platform/telegraf/)是一流行的IT运维数据采集开源工具,TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Telegraf做简单配置,无需任何代码,就可将Telegraf采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
......
# Rust 连接器
![Crates.io](https://img.shields.io/crates/v/libtaos) ![Crates.io](https://img.shields.io/crates/d/libtaos)
> Rust 连接器仍然在快速开发中,版本API变动在所难免,在1.0 之前无法保证其向后兼容,请使用时注意版本及对应的文档。
感谢 [@songtianyi](https://github.com/songtianyi)[libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) 的贡献,使Rust社区能够使用Rust 连接[TDengine]. [libtaos-rs] 项目旨在为Rust开发者提供官方支持,使用taosc接口及HTTP接口构建兼容API以便于用户切换接口方式。
## 依赖
- [Rust](https://www.rust-lang.org/learn/get-started)
默认情况下,[libtaos-rs] 使用 C 接口连接数据库,所以您需要:
- [TDengine] [客户端](https://www.taosdata.com/cn/getting-started/#%E9%80%9A%E8%BF%87%E5%AE%89%E8%A3%85%E5%8C%85%E5%AE%89%E8%A3%85)
- `clang`: `bindgen` 使用 `libclangAST` 来生成对应的Rust绑定。
## 特性列表
- [x] C 接口的Rust绑定
- [x] 使用 `rest` feature 来启用 RESTful API.
- [x] [r2d2] 连接池支持(feature `r2d2`
- [ ] 迭代器接口
- [ ] 流式计算接口
- [ ] 订阅支持
## 构建和测试
```sh
cargo build
cargo test
```
测试使用默认用户名密码和本地连接。您可以根据具体情况设置环境变量:
- `TEST_TAOS_IP`
- `TEST_TAOS_PORT`
- `TEST_TAOS_USER`
- `TEST_TAOS_PASS`
- `TEST_TAOS_DB`
## 使用
使用默认的taosc 连接方式,可以在 `Cargo.toml` 中直接添加 `libtaos` 依赖:
```toml
[dependencies]
libtaos = "v0.3.8"
```
添加 feature `r2d2` 来启动连接池:
```toml
[dependencies]
libtaos = { version = "*", features = ["r2d2"] }
```
对于RESTful接口,可使用 `rest` 特性来替代taosc,免去安装TDengine客户端。
```toml
[dependencies]
libtaos = { version = "*", features = ["rest"] }
```
本项目中提供一个 [示例程序]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) 如下:
```rust
// ...
#[tokio::main]
async fn main() -> Result<(), Error> {
init();
let taos = taos_connect()?;
assert_eq!(
taos.query("drop database if exists demo").await.is_ok(),
true
);
assert_eq!(taos.query("create database demo").await.is_ok(), true);
assert_eq!(taos.query("use demo").await.is_ok(), true);
assert_eq!(
taos.query("create table m1 (ts timestamp, speed int)")
.await
.is_ok(),
true
);
for i in 0..10i32 {
assert_eq!(
taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
.await
.is_ok(),
true
);
}
let rows = taos.query("select * from m1").await?;
println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
for row in rows.rows {
println!("{}", row.into_iter().join(","));
}
Ok(())
}
```
您可以在 [bailongma-rs] - 一个 Rust 编写的 Prometheus 远程存储 API 适配器 - 看到如何在具体应用中使用 Rust 连接器。
[libtaos-rs]: https://github.com/taosdata/libtaos-rs
[TDengine]: https://github.com/taosdata/TDengine
[bailongma-rs]: https://github.com/taosdata/bailongma-rs
[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
......@@ -3,7 +3,7 @@
## <a class="anchor" id="grafana"></a>Grafana
TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine 中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现。
TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine 中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现。关于TDengine插件的使用您可以在[GitHub](https://github.com/taosdata/grafanaplugin/blob/master/README.md)中了解更多。
### 安装Grafana
......@@ -11,19 +11,24 @@ TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/
### 配置Grafana
TDengine 的 Grafana 插件在安装包的 /usr/local/taos/connector/grafanaplugin 目录下。
以 CentOS 7.2 操作系统为例,将 grafanaplugin 目录拷贝到 /var/lib/grafana/plugins 目录下,重新启动 grafana 即可。
TDengine 的 Grafana 插件请从 <https://github.com/taosdata/grafanaplugin/releases/latest> 下载。
```bash
sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine
GF_VERSION=3.1.1
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
```
Grafana 8.x 版本会对插件进行签名检查,因此还需要在 grafana.ini 文件中增加如下行,才能正确使用插件:
以 CentOS 7.2 操作系统为例,将插件包解压到 /var/lib/grafana/plugins 目录下,重新启动 grafana 即可。
```bash
sudo unzip tdengine-datasource-$GF_VERSION.zip -d /var/lib/grafana/plugins/
```
Grafana 7.3+ / 8.x 版本会对插件进行签名检查,因此还需要在 grafana.ini 文件中增加如下行,才能正确使用插件:
```ini
[plugins]
enable_alpha = true
allow_loading_unsigned_plugins = taosdata-tdengine-datasource
allow_loading_unsigned_plugins = tdengine-datasource
```
### 使用 Grafana
......@@ -62,7 +67,6 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource
* ALIAS BY:可设置当前查询别名。
* GENERATE SQL: 点击该按钮会自动替换相应变量,并生成最终执行的语句。
按照默认提示查询当前 TDengine 部署所在服务器指定间隔系统内存平均使用量如下:
![img](../images/connections/create_dashboard2.jpg)
......@@ -71,16 +75,15 @@ allow_loading_unsigned_plugins = taosdata-tdengine-datasource
#### 导入 Dashboard
在 Grafana 插件目录 /usr/local/taos/connector/grafanaplugin/dashboard 下提供了一个 `tdengine-grafana.json` 可导入的 dashboard
我们提供一个 TDengine Dashboard 可以作为 TDengine 集群的监控可视化工具使用,见 [Grafana Dashboard 15146](https://grafana.com/grafana/dashboards/15146)
点击左侧 `Import` 按钮,并上传 `tdengine-grafana.json` 文件
点击左侧 `Import` 按钮,选择 **Grafana.com Dashboard**,j将id `15146` 填入并加载
![img](../images/connections/import_dashboard1.jpg)
导入完成之后可看到如下效果:
![img](../images/connections/import_dashboard2.jpg)
![img](../images/connections/dashboard-15146.png)
## <a class="anchor" id="matlab"></a>MATLAB
......
......@@ -729,17 +729,17 @@ Query OK, 1 row(s) in set (0.001091s)
| **Operation** | **Note** | **Applicable Data Types** |
| ------------- | ------------------------ | ----------------------------------------- |
| > | larger than | **`timestamp`** and all numeric types |
| < | smaller than | **`timestamp`** and all numeric types |
| >= | larger than or equal to | **`timestamp`** and all numeric types |
| <= | smaller than or equal to | **`timestamp`** and all numeric types |
| > | larger than | all types except bool |
| < | smaller than | all types except bool |
| >= | larger than or equal to | all types except bool |
| <= | smaller than or equal to | all types except bool |
| = | equal to | all types |
| <> | not equal to | all types |
| is [not] null | is null or is not null | all types |
| between and | within a certain range | **`timestamp`** and all numeric types |
| between and | within a certain range | all types except bool |
| in | match any value in a set | all types except first column `timestamp` |
| like | match a wildcard string | **`binary`** **`nchar`** |
| match/nmatch | filter regex | **regex** |
| match/nmatch | filter regex | **`binary`** **`nchar`** |
1. <> 算子也可以写为 != ,请注意,这个算子不能用于数据表第一列的 timestamp 字段。
2. like 算子使用通配符字符串进行匹配检查。
......@@ -766,15 +766,10 @@ Query OK, 1 row(s) in set (0.001091s)
**使用限制**
只能针对表名(即 tbname 筛选)和标签的名称和binary类型标签值 进行正则表达式过滤,不支持针对普通列使用正则表达式过滤。
只能在 WHERE 子句中作为过滤条件存在。
只能针对表名(即 tbname 筛选)、binary/nchar类型标签值进行正则表达式过滤,不支持普通列的过滤。
正则匹配字符串长度不能超过 128 字节。可以通过参数 *maxRegexStringLen* 设置和调整最大允许的正则匹配字符串,该参数是客户端配置参数,需要重启才能生效。
**嵌套查询支持**
可以在内层查询和外层查询中使用。<!-- REPLACE_OPEN_TO_ENTERPRISE__IN_OPERATOR_AND_UNSIGNED_INTEGER -->
<a class="anchor" id="join"></a>
### JOIN 子句
......
......@@ -185,23 +185,23 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端
| TCP | 6041 | 客户端与服务端之间的 RESTful 通讯。 | 随 serverPort 端口变化。 |
| TCP | 6042 | Arbitrator 的服务端口。 | 随 Arbitrator 启动参数设置变化。 |
| TCP | 6043 | TaosKeeper 监控服务端口。 | 随 TaosKeeper 启动参数设置变化。 |
| TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6045 | 支持 collectd 数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6045 | 支持 collectd 数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6060 | 企业版内 Monitor 服务的网络端口。 | |
| UDP | 6030-6034 | 客户端与服务端之间通讯。 | 随 serverPort 端口变化。 |
| UDP | 6035-6039 | 多节点集群的节点间通讯。 | 随 serverPort 端口变化。 |
## 20. go 语言编写组件编译失败怎样解决?
新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 BLM3 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 blm3 仓库代码后再编译。
新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 taosadapter 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 taosadapter 仓库代码后再编译。
目前编译方式默认自动编译 blm3。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决:
目前编译方式默认自动编译 taosadapter。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决:
```sh
go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct
```
如果希望继续使用之前的内置 httpd,可以关闭 blm3 编译,使用
如果希望继续使用之前的内置 httpd,可以关闭 taosadapter 编译,使用
`cmake .. -DBUILD_HTTP=true` 使用原来内置的 httpd。
......@@ -30,12 +30,14 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
## 数据链路设置
### 复制 TDengine 插件到 grafana 插件目录
```
1. sudo cp -r /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine
2. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
3. echo -e "[plugins]\nallow_loading_unsigned_plugins = taosdata-tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
4. sudo systemctl restart grafana-server.service
### 下载 TDengine 插件到 grafana 插件目录
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
```
### 修改 /etc/telegraf/telegraf.conf
......@@ -61,7 +63,7 @@ sudo systemctl start telegraf
使用 Web 浏览器访问 IP:3000 登录 Grafana 界面,系统初始用户名密码为 admin/admin。
点击左侧齿轮图标并选择 Plugins,应该可以找到 TDengine data source 插件图标。
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 文件。如果按照 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 下载 dashboard JSON 文件再导入。之后可以看到如下界面的仪表盘:
点击左侧加号图标并选择 Import,从 https://github.com/taosdata/grafanaplugin/blob/master/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 下载 dashboard JSON 文件后导入。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-telegraf-dashboard.png](../../images/IT-DevOps-Solutions-telegraf-dashboard.png)
......
......@@ -30,15 +30,17 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
## 数据链路设置
### 复制 TDengine 插件到 grafana 插件目录
```
1. sudo cp -r /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine
2. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
3. echo -e "[plugins]\nallow_loading_unsigned_plugins = taosdata-tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
4. sudo systemctl restart grafana-server.service
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
```
### 配置 collectd
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 Taos Adapter 配置的实际值:
```
LoadPlugin network
<Plugin network>
......@@ -49,7 +51,7 @@ sudo systemctl start collectd
```
### 配置 StatsD
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 Taos Adapter 配置的实际值:
```
backends 部分添加 "./backends/repeater"
repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for StatsD>}
......@@ -62,13 +64,13 @@ repeater 部分添加 { host:'<TDengine server/cluster host>', port: <port for S
#### 导入 collectd 仪表盘
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 文件。如果按照 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 下载 dashboard json 文件再导入。之后可以看到如下界面的仪表盘:
从 https://github.com/taosdata/grafanaplugin/blob/master/examples/collectd/grafana/dashboards/collect-metrics-with-tdengine-v0.1.0.json 下载 dashboard json 文件,点击左侧加号图标并选择 Import,按照界面提示选择 JSON 文件导入。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-collectd-dashboard.png](../../images/IT-DevOps-Solutions-collectd-dashboard.png)
#### 导入 StatsD 仪表盘
点击左侧加号图标并选择 Import,按照界面提示选择 /usr/local/taos/connector/grafanaplugin/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 文件。如果安装 Grafana 的机器上没有安装 TDengine,可以从 https://github.com/taosdata/grafanaplugin/blob/master/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 下载 dashboard json 文件再导入。之后可以看到如下界面的仪表盘:
从 https://github.com/taosdata/grafanaplugin/blob/master/examples/statsd/dashboards/statsd-with-tdengine-v0.1.0.json 下载 dashboard json 文件,点击左侧加号图标并选择 Import,按照界面提示导入JSON文件。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-statsd-dashboard.png](../../images/IT-DevOps-Solutions-statsd-dashboard.png)
## 总结
......
......@@ -77,6 +77,7 @@ TDengine is a highly efficient platform to store, query, and analyze time-series
- [Node.js Connector](/connector#nodejs): driver for connecting to TDengine server from Node.js applications
- [C# Connector](/connector#csharp): driver for connecting to TDengine server from C# applications
- [Windows Client](https://www.taosdata.com/blog/2019/07/26/514.html): compile your own Windows client, which is required by various connectors on the Windows environment
- [Rust Connector](/connector/rust): A taosc/RESTful API based TDengine client for Rust
## [Connections with Other Tools](/connections)
......
......@@ -154,7 +154,7 @@ The complete list of taosdemo command-line arguments can be displayed via taosde
```
$ taosdemo --help
-f, --file=FILE The meta file to the execution procedure.
-f, --file=FILE The meta file to the execution procedure. Currently, we support standard UTF-8 (without BOM) encoded files only.
-u, --user=USER The user name to use when connecting to the server.
-p, --password The password to use when connecting to the server.
-c, --config-dir=CONFIG_DIR Configuration directory.
......
# Rust Connector
![Crates.io](https://img.shields.io/crates/v/libtaos) ![Crates.io](https://img.shields.io/crates/d/libtaos)
> Note that the rust connector is under active development and the APIs will changes a lot between versions. But we promise to ensure backward compatibility after version 1.0 .
Thanks [@songtianyi](https://github.com/songtianyi) for [libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) - a rust bindings project for [TDengine]. It's an new design for [TDengine] rust client based on C interface or the REST API. It'll will provide Rust-like APIs and all rust things (like async/stream/iterators and others).
## Dependencies
- [Rust](https://www.rust-lang.org/learn/get-started) of course.
if you use the default features, it'll depend on:
- [TDengine] Client library and headers.
- clang because bindgen will requires the clang AST library.
## Fetures
In-design features:
- [x] API for both C interface
- [x] REST API support by feature `rest`.
- [x] [r2d2] Pool support by feature `r2d2`
- [ ] Iterators for fields fetching
- [ ] Stream support
- [ ] Subscribe support
## Build and test
```sh
cargo build
cargo test
```
`test` will use default TDengine user and password on localhost (TDengine default).
Set variables if it's not default:
- `TEST_TAOS_IP`
- `TEST_TAOS_PORT`
- `TEST_TAOS_USER`
- `TEST_TAOS_PASS`
- `TEST_TAOS_DB`
## Usage
For default C-based client API, set in Cargo.toml
```toml
[dependencies]
libtaos = "v0.3.8"
```
For r2d2 support:
```toml
[dependencies]
libtaos = { version = "*", features = ["r2d2"] }
```
For REST client:
```toml
[dependencies]
libtaos = { version = "*", features = ["rest"] }
```
There's a [demo app]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) in examples directory, looks like this:
```rust
// ...
#[tokio::main]
async fn main() -> Result<(), Error> {
init();
let taos = taos_connect()?;
assert_eq!(
taos.query("drop database if exists demo").await.is_ok(),
true
);
assert_eq!(taos.query("create database demo").await.is_ok(), true);
assert_eq!(taos.query("use demo").await.is_ok(), true);
assert_eq!(
taos.query("create table m1 (ts timestamp, speed int)")
.await
.is_ok(),
true
);
for i in 0..10i32 {
assert_eq!(
taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
.await
.is_ok(),
true
);
}
let rows = taos.query("select * from m1").await?;
println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
for row in rows.rows {
println!("{}", row.into_iter().join(","));
}
Ok(())
}
```
You can check out the experimental [bailongma-rs](https://github.com/taosdata/bailongma-rs) - a TDengine adapters for prometheus written with Rust - as a more productive code example.
[libtaos-rs]: https://github.com/taosdata/libtaos-rs
[TDengine]: https://github.com/taosdata/TDengine
[bailongma-rs]: https://github.com/taosdata/bailongma-rs
[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
......@@ -12,12 +12,17 @@ https://grafana.com/grafana/download.
### Configure Grafana
TDengine Grafana plugin is in the /usr/local/taos/connector/grafanaplugin directory.
Download grafana plugin from <https://github.com/taosdata/grafanaplugin/releases/latest> .
```bash
GF_VERSION=3.1.1
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
```
Taking Centos 7.2 as an example, just copy grafanaplugin directory to /var/lib/grafana/plugins directory and restart Grafana.
```bash
sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine
sudo unzip tdengine-datasource-$GF_VERSION.zip /var/lib/grafana/plugins/
```
### Use Grafana
......@@ -64,15 +69,15 @@ According to the default prompt, query the average system memory usage at the sp
#### Import Dashboard
A `tdengine-grafana.json` importable dashboard is provided under the Grafana plug-in directory `/usr/local/taos/connector/grafanaplugin/dashboard`.
We provide an example dashboard [Grafana Dashboard 15146](https://grafana.com/grafana/dashboards/15146)
Click the `Import` button on the left panel and upload the `tdengine-grafana.json` file:
Click the `Import` button on the left panel and load the grafana id:
![img](../images/connections/import_dashboard1.jpg)
You can see as follows after Dashboard imported.
![img](../images/connections/import_dashboard2.jpg)
![img](../images/connections/dashboard-15146.png)
## <a class="anchor" id="matlab"></a> MATLAB
......
......@@ -203,6 +203,9 @@ keepColumnName 1
# database name must be specified in restful interface if the following parameter is set, off by default
# httpDbNameMandatory 1
# http keep alive, default is 30 seconds
# httpKeepAlive 30000
# The following parameter is used to limit the maximum number of lines in log files.
# max number of lines per log filters
# numOfLogLines 10000000
......
......@@ -128,12 +128,12 @@ function check_link() {
function check_main_path() {
#check install main dir and all sub dir
main_dir=("" "cfg" "bin" "connector" "driver" "examples" "include" "init.d")
for i in ${main_dir[@]};do
for i in "${main_dir[@]}";do
check_file ${install_main_dir} $i
done
if [ "$verMode" == "cluster" ]; then
nginx_main_dir=("admin" "conf" "html" "sbin" "logs")
for i in ${nginx_main_dir[@]};do
for i in "${nginx_main_dir[@]}";do
check_file ${nginx_dir} $i
done
fi
......@@ -142,12 +142,12 @@ function check_main_path() {
function check_bin_path() {
# check install bin dir and all sub dir
bin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
for i in ${bin_dir[@]};do
bin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
for i in "${bin_dir[@]}";do
check_file ${sbin_dir} $i
done
lbin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
for i in ${lbin_dir[@]};do
lbin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
for i in "${lbin_dir[@]}";do
check_link ${bin_link_dir}/$i
done
if [ "$verMode" == "cluster" ]; then
......@@ -171,16 +171,17 @@ function check_lib_path() {
function check_header_path() {
# check all header
header_dir=("taos.h" "taoserror.h")
for i in ${header_dir[@]};do
for i in "${header_dir[@]}";do
check_link ${inc_link_dir}/$i
done
echo -e "Check bin path:\033[32mOK\033[0m!"
}
function check_blm3_config_dir() {
function check_taosadapter_config_dir() {
# check all config
check_file ${cfg_install_dir} blm3.toml
check_file ${install_main_dir}/cfg blm.toml.org
check_file ${cfg_install_dir} taosadapter.toml
check_file ${cfg_install_dir} taosadapter.service
check_file ${install_main_dir}/cfg taosadapter.toml.org
echo -e "Check conf path:\033[32mOK\033[0m!"
}
......@@ -221,7 +222,7 @@ function test_TDengine() {
check_lib_path
check_header_path
check_config_dir
check_blm3_config_dir
check_taosadapter_config_dir
check_log_path
check_data_path
result=`taos -s 'create database test ;create table test.tt(ts timestamp ,i int);insert into test.tt values(now,11);select * from test.tt' 2>&1 ||:`
......
......@@ -11,4 +11,3 @@ Maintainer: support@taosdata.com
Provides: taosdata
Homepage: http://taosdata.com
Description: Big Data Platform Designed and Optimized for IoT.
......@@ -28,8 +28,12 @@ if [ -f "${install_main_dir}/taos.cfg" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taos.cfg || :
fi
if [ -f "${install_main_dir}/blm.toml" ]; then
${csudo} rm -f ${install_main_dir}/cfg/blm.toml || :
if [ -f "${install_main_dir}/taosadapter.toml" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taosadapter.toml || :
fi
if [ -f "${install_main_dir}/taosadapter.service" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taosadapter.service || :
fi
# there can not libtaos.so*, otherwise ln -s error
......
......@@ -25,7 +25,7 @@ else
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
......
......@@ -44,8 +44,11 @@ mkdir -p ${pkg_dir}${install_home_path}/init.d
mkdir -p ${pkg_dir}${install_home_path}/script
cp ${compile_dir}/../packaging/cfg/taos.cfg ${pkg_dir}${install_home_path}/cfg
if [ -f "${compile_dir}/test/cfg/blm.toml" ]; then
cp ${compile_dir}/test/cfg/blm.toml ${pkg_dir}${install_home_path}/cfg
if [ -f "${compile_dir}/test/cfg/taosadapter.toml" ]; then
cp ${compile_dir}/test/cfg/taosadapter.toml ${pkg_dir}${install_home_path}/cfg
fi
if [ -f "${compile_dir}/test/cfg/taosadapter.service" ]; then
cp ${compile_dir}/test/cfg/taosadapter.service ${pkg_dir}${install_home_path}/cfg ||:
fi
cp ${compile_dir}/../packaging/deb/taosd ${pkg_dir}${install_home_path}/init.d
......@@ -59,8 +62,8 @@ cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_pat
cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
if [ -f "${compile_dir}/build/bin/blm3" ]; then
cp ${compile_dir}/build/bin/blm3 ${pkg_dir}${install_home_path}/bin ||:
if [ -f "${compile_dir}/build/bin/taosadapter" ]; then
cp ${compile_dir}/build/bin/taosadapter ${pkg_dir}${install_home_path}/bin ||:
fi
cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin
......@@ -68,19 +71,24 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
if [ -d "${top_dir}/src/connector/grafanaplugin/dist" ]; then
cp -r ${top_dir}/src/connector/grafanaplugin/dist ${pkg_dir}${install_home_path}/connector/grafanaplugin
else
echo "grafanaplugin bundled directory not found!"
exit 1
fi
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/nodejs ${pkg_dir}${install_home_path}/connector
cp ${compile_dir}/build/lib/taos-jdbcdriver*.* ${pkg_dir}${install_home_path}/connector ||:
install_user_local_path="/usr/local"
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
mkdir -p ${pkg_dir}${install_user_local_path}/lib
cp ${compile_dir}/build/lib/libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/
ln -sf libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/libavro.so.23
ln -sf libavro.so.23 ${pkg_dir}${install_user_local_path}/lib/libavro.so
fi
if [ -f ${compile_dir}/build/lib/libavro.a ]; then
cp ${compile_dir}/build/lib/libavro.a ${pkg_dir}${install_user_local_path}/lib/
fi
if [ -f ${compile_dir}/build/bin/jemalloc-config ]; then
install_user_local_path="/usr/local"
mkdir -p ${pkg_dir}${install_user_local_path}/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
cp ${compile_dir}/build/bin/jemalloc-config ${pkg_dir}${install_user_local_path}/bin/
if [ -f ${compile_dir}/build/bin/jemalloc.sh ]; then
......@@ -120,6 +128,10 @@ chmod 755 ${pkg_dir}/DEBIAN/*
debver="Version: "$tdengine_ver
sed -i "2c$debver" ${pkg_dir}/DEBIAN/control
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak "s/#Depends: no/Depends: libjansson4, libsnappy1v5/g" ${pkg_dir}/DEBIAN/control
fi
#get taos version, then set deb name
......@@ -151,4 +163,3 @@ cp ${pkg_dir}/*.deb ${output_dir}
# clean tmep dir
rm -rf ${pkg_dir}
......@@ -25,7 +25,7 @@ GROUP="root"
DAEMON="/usr/local/taos/bin/taosd"
DAEMON_OPTS=""
HTTPD_NAME="blm3"
HTTPD_NAME="taosadapter"
DAEMON_HTTPD_NAME=$HTTPD_NAME
DAEMON_HTTPD="/usr/local/taos/bin/$HTTPD_NAME"
......
......@@ -45,24 +45,32 @@ echo "version=${version}"
#docker manifest rm tdengine/tdengine:${version}
if [ "$verType" == "beta" ]; then
docker manifest inspect tdengine/tdengine-beta:latest
docker manifest inspect tdengine/tdengine-beta:${version}
docker manifest create -a tdengine/tdengine-beta:${version} tdengine/tdengine-amd64-beta:${version} tdengine/tdengine-aarch64-beta:${version} tdengine/tdengine-aarch32-beta:${version}
docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest
sleep 30
docker manifest rm tdengine/tdengine-beta:${version}
docker manifest rm tdengine/tdengine-beta:latest
docker manifest create -a tdengine/tdengine-beta:${version} tdengine/tdengine-amd64-beta:${version} tdengine/tdengine-aarch64-beta:${version} tdengine/tdengine-aarch32-beta:${version}
docker manifest create -a tdengine/tdengine-beta:latest tdengine/tdengine-amd64-beta:latest tdengine/tdengine-aarch64-beta:latest tdengine/tdengine-aarch32-beta:latest
docker login -u tdengine -p ${passWord} #replace the docker registry username and password
docker manifest push tdengine/tdengine-beta:latest
docker manifest push tdengine/tdengine-beta:${version}
docker manifest push tdengine/tdengine-beta:latest
elif [ "$verType" == "stable" ]; then
docker manifest inspect tdengine/tdengine:latest
docker manifest inspect tdengine/tdengine:${version}
docker manifest create -a tdengine/tdengine:${version} tdengine/tdengine-amd64:${version} tdengine/tdengine-aarch64:${version} tdengine/tdengine-aarch32:${version}
docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
sleep 30
docker manifest rm tdengine/tdengine:latest
docker manifest rm tdengine/tdengine:${version}
docker manifest inspect tdengine/tdengine:latest
docker manifest inspect tdengine/tdengine:${version}
docker manifest create -a tdengine/tdengine:${version} tdengine/tdengine-amd64:${version} tdengine/tdengine-aarch64:${version} tdengine/tdengine-aarch32:${version}
docker manifest create -a tdengine/tdengine:latest tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
docker login -u tdengine -p ${passWord} #replace the docker registry username and password
docker manifest push tdengine/tdengine:latest
docker manifest push tdengine/tdengine:${version}
docker manifest push tdengine/tdengine:latest
else
echo "unknow verType, nor stabel or beta"
exit 1
......
......@@ -151,7 +151,7 @@ function vercomp () {
}
# 1. check version information
if (( ! is_valid_version $verNumber ) || ( ! is_valid_version $verNumberComp ) || [[ "$(vercomp $verNumber $verNumberComp)" == '2' ]]); then
if ( ( ! is_valid_version $verNumber ) || ( ! is_valid_version $verNumberComp ) || [[ "$(vercomp $verNumber $verNumberComp)" == '2' ]] ); then
echo "please enter correct version"
exit 0
fi
......@@ -213,7 +213,7 @@ else
exit 1
fi
make -j8
make -j8 && ${csudo} make install
cd ${curr_dir}
......
......@@ -32,11 +32,11 @@ if command -v sudo > /dev/null; then
fi
function cp_rpm_package() {
local cur_dir
cd $1
cur_dir=$(pwd)
local cur_dir
cd $1
cur_dir=$(pwd)
for dirlist in $(ls ${cur_dir}); do
for dirlist in "$(ls ${cur_dir})"; do
if test -d ${dirlist}; then
cd ${dirlist}
cp_rpm_package ${cur_dir}/${dirlist}
......@@ -45,7 +45,7 @@ for dirlist in $(ls ${cur_dir}); do
if test -e ${dirlist}; then
cp ${cur_dir}/${dirlist} ${output_dir}/TDengine-${tdengine_ver}.rpm
fi
done
done
}
if [ -d ${pkg_dir} ]; then
......@@ -56,6 +56,10 @@ cd ${pkg_dir}
${csudo} mkdir -p BUILD BUILDROOT RPMS SOURCES SPECS SRPMS
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak 's/#Requires:/Requires: jansson snappy/g' ${spec_file}
fi
${csudo} rpmbuild --define="_version ${tdengine_ver}" --define="_topdir ${pkg_dir}" --define="_compiledir ${compile_dir}" -bb ${spec_file}
# copy rpm package to output_dir, and modify package name, then clean temp dir
......
......@@ -54,8 +54,11 @@ mkdir -p %{buildroot}%{homepath}/init.d
mkdir -p %{buildroot}%{homepath}/script
cp %{_compiledir}/../packaging/cfg/taos.cfg %{buildroot}%{homepath}/cfg
if [ -f %{_compiledir}/test/cfg/blm.toml ]; then
cp %{_compiledir}/test/cfg/blm.toml %{buildroot}%{homepath}/cfg
if [ -f %{_compiledir}/test/cfg/taosadapter.toml ]; then
cp %{_compiledir}/test/cfg/taosadapter.toml %{buildroot}%{homepath}/cfg
fi
if [ -f %{_compiledir}/test/cfg/taosadapter.service ]; then
cp %{_compiledir}/test/cfg/taosadapter.service %{buildroot}%{homepath}/cfg
fi
cp %{_compiledir}/../packaging/rpm/taosd %{buildroot}%{homepath}/init.d
cp %{_compiledir}/../packaging/tools/post.sh %{buildroot}%{homepath}/script
......@@ -65,26 +68,28 @@ cp %{_compiledir}/../packaging/tools/set_core.sh %{buildroot}%{homepath}/bin
cp %{_compiledir}/../packaging/tools/taosd-dump-cfg.gdb %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taos %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin
if [ -f %{_compiledir}/build/bin/blm3 ]; then
cp %{_compiledir}/build/bin/blm3 %{buildroot}%{homepath}/bin ||:
if [ -f %{_compiledir}/build/bin/taosadapter ]; then
cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||:
fi
cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
if [ -d %{_compiledir}/../src/connector/grafanaplugin/dist ]; then
cp -r %{_compiledir}/../src/connector/grafanaplugin/dist %{buildroot}%{homepath}/connector/grafanaplugin
else
echo grafanaplugin bundled directory not found!
exit 1
fi
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/nodejs %{buildroot}%{homepath}/connector
cp %{_compiledir}/build/lib/taos-jdbcdriver*.* %{buildroot}%{homepath}/connector ||:
cp -r %{_compiledir}/../tests/examples/* %{buildroot}%{homepath}/examples
if [ -f %{_compiledir}/build/lib/libavro.so.23.0.0 ]; then
cp %{_compiledir}/build/lib/libavro.so.23.0.0 %{buildroot}%{homepath}/driver
ln -sf libavro.so.23.0.0 %{buildroot}%{homepath}/driver/libavro.so.23
ln -sf libavro.so.23 %{buildroot}%{homepath}/driver/libavro.so
fi
if [ -f %{_compiledir}/build/lib/libavro.a ]; then
cp %{_compiledir}/build/lib/libavro.a %{buildroot}%{homepath}/driver
fi
if [ -f %{_compiledir}/build/bin/jemalloc-config ]; then
mkdir -p %{buildroot}%{userlocalpath}/bin
......@@ -151,14 +156,14 @@ if pidof taosd &> /dev/null; then
echo "Stop taosd service success!"
sleep 1
fi
# if taos.cfg already softlink, remove it
# if taos.cfg already exist, remove it
if [ -f %{cfg_install_dir}/taos.cfg ]; then
${csudo} rm -f %{homepath}/cfg/taos.cfg || :
${csudo} rm -f %{cfg_install_dir}/cfg/taos.cfg || :
fi
# if blm.toml already softlink, remove it
if [ -f %{cfg_install_dir}/blm.toml ]; then
${csudo} rm -f %{homepath}/cfg/blm.toml || :
# if taosadapter.toml already exist, remove it
if [ -f %{cfg_install_dir}/taosadapter.toml ]; then
${csudo} rm -f %{cfg_install_dir}/cfg/taosadapter.toml || :
fi
# there can not libtaos.so*, otherwise ln -s error
......@@ -199,7 +204,7 @@ if [ $1 -eq 0 ];then
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
......
# /bin/bash
#!/bin/bash
#
CSI=$(echo -e "\033[")
CRED="${CSI}1;31m"
......
......@@ -185,7 +185,7 @@ function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
......@@ -197,7 +197,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || :
[ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/remove.sh ] && ${csudo} ln -s ${install_main_dir}/bin/remove.sh ${bin_link_dir}/rmtaos || :
......@@ -303,7 +303,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$localIp" ]]; then
return
......@@ -358,7 +358,7 @@ function is_correct_ipaddr() {
IFS=" "
arr=($iplist)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$newIp" ]]; then
return 0
......@@ -447,18 +447,18 @@ function local_fqdn_check() {
fi
}
function install_blm3_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir}
[ -f ${script_dir}/cfg/blm.toml ] && ${csudo} cp ${script_dir}/cfg/blm.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/blm.toml
[ -f ${script_dir}/cfg/taosadapter.toml ] && ${csudo} cp ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/taosadapter.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi
[ -f ${script_dir}/cfg/blm.toml ] &&
${csudo} cp -f ${script_dir}/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org
[ -f ${script_dir}/cfg/taosadapter.toml ] &&
${csudo} cp -f ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}/taosadapter.toml.new
[ -f ${cfg_install_dir}/blm.toml ] &&
${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml
[ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${install_main_dir}/cfg/taosadapter.toml
[ ! -z $1 ] && return 0 || : # only install client
......@@ -473,7 +473,7 @@ function install_config() {
${csudo} chmod 644 ${cfg_install_dir}/*
fi
${csudo} cp -f ${script_dir}/cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
${csudo} cp -f ${script_dir}/cfg/taos.cfg ${cfg_install_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg
[ ! -z $1 ] && return 0 || : # only install client
......@@ -679,8 +679,8 @@ function install_service_on_systemd() {
taosd_service_config="${service_config_dir}/taosd.service"
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
......@@ -756,6 +756,11 @@ function install_service_on_systemd() {
fi
}
function install_taosadapter_service() {
[ -f ${script_dir}/cfg/taosadapter.service ] &&\
${csudo} cp ${script_dir}/cfg/taosadapter.service ${service_config_dir}/
}
function install_service() {
if ((${service_mod}==0)); then
install_service_on_systemd
......@@ -878,8 +883,9 @@ function update_TDengine() {
if [ -z $1 ]; then
install_bin
install_service
install_taosadapter_service
install_config
install_blm3_config
install_taosadapter_config
openresty_work=false
if [ "$verMode" == "cluster" ]; then
......@@ -959,6 +965,7 @@ function install_TDengine() {
# For installing new
install_bin
install_service
install_taosadapter_service
openresty_work=false
if [ "$verMode" == "cluster" ]; then
......
......@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$localIp" ]]; then
return
......@@ -342,7 +342,7 @@ function is_correct_ipaddr() {
IFS=" "
arr=($iplist)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$newIp" ]]; then
return 0
......
......@@ -278,7 +278,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$localIp" ]]; then
return
......@@ -330,7 +330,7 @@ function is_correct_ipaddr() {
IFS=" "
arr=($iplist)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$newIp" ]]; then
return 0
......
......@@ -287,7 +287,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$localIp" ]]; then
return
......@@ -342,7 +342,7 @@ function is_correct_ipaddr() {
IFS=" "
arr=($iplist)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$newIp" ]]; then
return 0
......
......@@ -114,8 +114,8 @@ if [ "$osType" != "Darwin" ]; then
fi
fi
function kill_blm3() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
function kill_taosadapter() {
pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
......@@ -156,7 +156,7 @@ function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
......@@ -176,7 +176,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || :
[ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/perfMonitor ] && ${csudo} ln -s ${install_main_dir}/bin/perfMonitor ${bin_link_dir}/perfMonitor || :
......@@ -191,7 +191,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] || [ -x ${install_main_2_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || ${csudo} ln -s ${install_main_2_dir}/bin/taos || :
[ -x ${install_main_dir}/bin/taosd ] || [ -x ${install_main_2_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || ${csudo} ln -s ${install_main_2_dir}/bin/taosd || :
[ -x ${install_main_dir}/bin/blm3 ] || [ -x ${install_main_2_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || ${csudo} ln -s ${install_main_2_dir}/bin/blm3 || :
[ -x ${install_main_dir}/bin/taosadapter ] || [ -x ${install_main_2_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || ${csudo} ln -s ${install_main_2_dir}/bin/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] || [ -x ${install_main_2_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || ln -s ${install_main_2_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] || [ -x ${install_main_2_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || ln -s ${install_main_2_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
fi
......@@ -212,7 +212,8 @@ function install_jemalloc() {
fi
if [ -f "${binary_dir}/build/include/jemalloc/jemalloc.h" ]; then
/usr/bin/install -c -d /usr/local/include/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/include/jemalloc/jemalloc.h /usr/local/include/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/include/jemalloc/jemalloc.h\
/usr/local/include/jemalloc
fi
if [ -f "${binary_dir}/build/lib/libjemalloc.so.2" ]; then
/usr/bin/install -c -d /usr/local/lib
......@@ -225,25 +226,49 @@ function install_jemalloc() {
/usr/bin/install -c -m 755 ${binary_dir}/build/lib/libjemalloc_pic.a /usr/local/lib
if [ -f "${binary_dir}/build/lib/pkgconfig/jemalloc.pc" ]; then
/usr/bin/install -c -d /usr/local/lib/pkgconfig
/usr/bin/install -c -m 644 ${binary_dir}/build/lib/pkgconfig/jemalloc.pc /usr/local/lib/pkgconfig
/usr/bin/install -c -m 644 ${binary_dir}/build/lib/pkgconfig/jemalloc.pc\
/usr/local/lib/pkgconfig
fi
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
fi
fi
if [ -f "${binary_dir}/build/share/doc/jemalloc/jemalloc.html" ]; then
/usr/bin/install -c -d /usr/local/share/doc/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/share/doc/jemalloc/jemalloc.html /usr/local/share/doc/jemalloc
/usr/bin/install -c -m 644 ${binary_dir}/build/share/doc/jemalloc/jemalloc.html\
/usr/local/share/doc/jemalloc
fi
if [ -f "${binary_dir}/build/share/man/man3/jemalloc.3" ]; then
/usr/bin/install -c -d /usr/local/share/man/man3
/usr/bin/install -c -m 644 ${binary_dir}/build/share/man/man3/jemalloc.3 /usr/local/share/man/man3
/usr/bin/install -c -m 644 ${binary_dir}/build/share/man/man3/jemalloc.3\
/usr/local/share/man/man3
fi
fi
}
function install_avro() {
if [ "$osType" != "Darwin" ]; then
if [ -f "${binary_dir}/build/$1/libavro.so.23.0.0" ]; then
/usr/bin/install -c -d /usr/local/$1
/usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.so.23.0.0 /usr/local/$1
ln -sf libavro.so.23.0.0 /usr/local/$1/libavro.so.23
ln -sf libavro.so.23 /usr/local/$1/libavro.so
/usr/bin/install -c -d /usr/local/$1
[ -f ${binary_dir}/build/$1/libavro.a ] &&
/usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.a /usr/local/$1
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/$1" | ${csudo} tee /etc/ld.so.conf.d/libavro.conf
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
fi
fi
fi
}
function install_lib() {
......@@ -292,6 +317,8 @@ function install_lib() {
fi
install_jemalloc
install_avro lib
install_avro lib64
if [ "$osType" != "Darwin" ]; then
${csudo} ldconfig
......@@ -324,39 +351,33 @@ function install_config() {
[ -f ${script_dir}/../cfg/taos.cfg ] &&
${csudo} cp ${script_dir}/../cfg/taos.cfg ${cfg_install_dir}
${csudo} chmod 644 ${cfg_install_dir}/taos.cfg
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg/taos.cfg
${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
${cfg_install_dir}/taos.cfg.${verNumber}
${csudo} ln -s ${cfg_install_dir}/taos.cfg \
${install_main_dir}/cfg/taos.cfg
else
if [ "$osType" != "Darwin" ]; then
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
else
${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org\
|| ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_2_dir}/cfg/taos.cfg.org
fi
${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
${cfg_install_dir}/taos.cfg.${verNumber}
fi
}
function install_blm3_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir}
[ -f ${binary_dir}/test/cfg/blm.toml ] &&
${csudo} cp ${binary_dir}/test/cfg/blm.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/blm.toml
[ -f ${binary_dir}/test/cfg/blm.toml ] &&
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org
[ -f ${cfg_install_dir}/blm.toml ] &&
${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml
[ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
${csudo} cp ${binary_dir}/test/cfg/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
[ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
${cfg_install_dir}/taosadapter.toml.${verNumber}
[ -f ${cfg_install_dir}/taosadapter.toml ] && \
${csudo} ln -s ${cfg_install_dir}/taosadapter.toml \
${install_main_dir}/cfg/taosadapter.toml
else
if [ -f "${binary_dir}/test/cfg/blm.toml" ]; then
if [ "$osType" != "Darwin" ]; then
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \
${install_main_dir}/cfg/blm.toml.org
else
${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org \
|| ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \
${install_main_2_dir}/cfg/blm.toml.org
fi
if [ -f "${binary_dir}/test/cfg/taosadapter.toml" ]; then
${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
${cfg_install_dir}/taosadapter.toml.${verNumber}
fi
fi
}
......@@ -381,11 +402,6 @@ function install_data() {
}
function install_connector() {
if [ -d "${source_dir}/src/connector/grafanaplugin/dist" ]; then
${csudo} cp -rf ${source_dir}/src/connector/grafanaplugin/dist ${install_main_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${source_dir}/src/connector/go -mindepth 1 -maxdepth 1 | read; then
${csudo} cp -r ${source_dir}/src/connector/go ${install_main_dir}/connector
else
......@@ -481,8 +497,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
......@@ -503,6 +519,16 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd
}
function install_taosadapter_service() {
if ((${service_mod}==0)); then
[ -f ${binary_dir}/test/cfg/taosadapter.service ] &&\
${csudo} cp ${binary_dir}/test/cfg/taosadapter.service\
${service_config_dir}/ || :
else
kill_taosadapter
fi
}
function install_service() {
if ((${service_mod}==0)); then
install_service_on_systemd
......@@ -510,7 +536,6 @@ function install_service() {
install_service_on_sysvinit
else
# must manual stop taosd
kill_blm3
kill_taosd
fi
}
......@@ -526,7 +551,7 @@ function update_TDengine() {
elif ((${service_mod}==1)); then
${csudo} service taosd stop || :
else
kill_blm3
kill_taosadapter
kill_taosd
fi
sleep 1
......@@ -544,10 +569,11 @@ function update_TDengine() {
if [ "$osType" != "Darwin" ]; then
install_service
install_taosadapter_service
fi
install_config
install_blm3_config
install_taosadapter_config
if [ "$osType" != "Darwin" ]; then
echo
......@@ -555,7 +581,7 @@ function update_TDengine() {
echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
echo -e "${GREEN_DARK}To configure blm3 (if has) ${NC}: edit /etc/taos/blm.toml"
echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then
......@@ -598,10 +624,11 @@ function install_TDengine() {
if [ "$osType" != "Darwin" ]; then
install_service
install_taosadapter_service
fi
install_config
install_blm3_config
install_taosadapter_config
if [ "$osType" != "Darwin" ]; then
# Ask if to start the service
......@@ -609,7 +636,7 @@ function install_TDengine() {
echo -e "\033[44;32;1mTDengine is installed successfully!${NC}"
echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
echo -e "${GREEN_DARK}To configure blm (if has) ${NC}: edit /etc/taos/blm.toml"
echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then
......
......@@ -150,11 +150,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -210,11 +210,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -172,11 +172,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -177,11 +177,6 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
fi
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bunlded dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -35,12 +35,12 @@ fi
if [ "$pagMode" == "lite" ]; then
strip ${build_dir}/bin/taosd
strip ${build_dir}/bin/taos
# lite version doesn't include blm3, which will lead to no restful interface
# lite version doesn't include taosadapter, which will lead to no restful interface
bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh"
else
bin_files="${build_dir}/bin/taosd \
${build_dir}/bin/taos \
${build_dir}/bin/blm3 \
${build_dir}/bin/taosadapter \
${build_dir}/bin/taosdump \
${build_dir}/bin/taosdemo \
${build_dir}/bin/tarbitrator\
......@@ -78,7 +78,7 @@ mkdir -p ${install_dir}
mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc
mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg
[ -f ${cfg_dir}/blm.toml ] && cp ${cfg_dir}/blm.toml ${install_dir}/cfg/blm.toml
[ -f ${cfg_dir}/taosadapter.toml ] && cp ${cfg_dir}/taosadapter.toml ${install_dir}/cfg/taosadapter.toml
mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/* || :
mkdir -p ${install_dir}/init.d && cp ${init_file_deb} ${install_dir}/init.d/taosd.deb
......@@ -195,11 +195,6 @@ connector_dir="${code_dir}/connector"
mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if you want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -81,7 +81,7 @@ else
# bin_files="${build_dir}/bin/powerd ${build_dir}/bin/power ${build_dir}/bin/powerdemo ${build_dir}/bin/tarbitrator ${script_dir}/remove_power.sh ${script_dir}/set_core.sh"
cp ${build_dir}/bin/taos ${install_dir}/bin/power
cp ${build_dir}/bin/taosd ${install_dir}/bin/powerd
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_power.sh ${install_dir}/bin
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/powerdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/powerdump
......@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -62,7 +62,7 @@ else
fi
cp ${build_dir}/bin/taos ${install_dir}/bin/prodbc
cp ${build_dir}/bin/taosd ${install_dir}/bin/prodbs
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_pro.sh ${install_dir}/bin
chmod a+x ${install_dir}/bin/* || :
......@@ -154,11 +154,6 @@ mkdir -p ${install_dir}/driver && cp ${lib_files} ${install_dir}/driver && echo
#if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
# cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
# if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
# cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
# else
# echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
# fi
# if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
# cp -r ${connector_dir}/go ${install_dir}/connector
# else
......
......@@ -82,7 +82,7 @@ else
cp ${build_dir}/bin/taos ${install_dir}/bin/tq
cp ${build_dir}/bin/taosd ${install_dir}/bin/tqd
cp ${script_dir}/remove_tq.sh ${install_dir}/bin
cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/tqdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/tqdump
cp ${build_dir}/bin/tarbitrator ${install_dir}/bin
......@@ -168,11 +168,6 @@ mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector ||:
if [ -d "${connector_dir}/grafanaplugin/dist" ]; then
cp -r ${connector_dir}/grafanaplugin/dist ${install_dir}/connector/grafanaplugin
else
echo "WARNING: grafanaplugin bundled dir not found, please check if want to use it!"
fi
if find ${connector_dir}/go -mindepth 1 -maxdepth 1 | read; then
cp -r ${connector_dir}/go ${install_dir}/connector
else
......
......@@ -2,7 +2,7 @@
#
# This file is used to install tdengine rpm package on centos systems. The operating system
# is required to use systemd to manage services at boot
#set -x
# set -x
iplist=""
serverFqdn=""
......@@ -64,9 +64,9 @@ else
service_mod=2
fi
function kill_blm3() {
# ${csudo} pkill -f blm3 || :
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
function kill_taosadapter() {
# ${csudo} pkill -f taosadapter || :
pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
......@@ -86,6 +86,24 @@ function install_include() {
${csudo} ln -s ${inc_dir}/taoserror.h ${inc_link_dir}/taoserror.h
}
function install_avro_lib() {
${csudo} rm -f ${lib_link_dir}/libavro* || :
${csudo} rm -f ${lib64_link_dir}/libavro* || :
if [[ -f ${lib_dir}/libavro.so.23.0.0 ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23.0.0
${csudo} ln -s ${lib_link_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23
${csudo} ln -s ${lib_link_dir}/libavro.so.23 ${lib_link_dir}/libavro.so
if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libavro.so ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23.0.0 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23 ${lib64_link_dir}/libavro.so || :
fi
fi
${csudo} ldconfig
}
function install_lib() {
${csudo} rm -f ${lib_link_dir}/libtaos* || :
${csudo} rm -f ${lib64_link_dir}/libtaos* || :
......@@ -97,13 +115,15 @@ function install_lib() {
${csudo} ln -s ${lib_dir}/libtaos.* ${lib64_link_dir}/libtaos.so.1 || :
${csudo} ln -s ${lib64_link_dir}/libtaos.so.1 ${lib64_link_dir}/libtaos.so || :
fi
${csudo} ldconfig
}
function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
......@@ -114,7 +134,7 @@ function install_bin() {
#Make link
[ -x ${bin_dir}/taos ] && ${csudo} ln -s ${bin_dir}/taos ${bin_link_dir}/taos || :
[ -x ${bin_dir}/taosd ] && ${csudo} ln -s ${bin_dir}/taosd ${bin_link_dir}/taosd || :
[ -x ${bin_dir}/blm3 ] && ${csudo} ln -s ${bin_dir}/blm3 ${bin_link_dir}/blm3 || :
[ -x ${bin_dir}/taosadapter ] && ${csudo} ln -s ${bin_dir}/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${bin_dir}/taosdemo ] && ${csudo} ln -s ${bin_dir}/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${bin_dir}/taosdump ] && ${csudo} ln -s ${bin_dir}/taosdump ${bin_link_dir}/taosdump || :
[ -x ${bin_dir}/set_core.sh ] && ${csudo} ln -s ${bin_dir}/set_core.sh ${bin_link_dir}/set_core || :
......@@ -127,7 +147,7 @@ function add_newHostname_to_hosts() {
iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
arr=($iphost)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$localIp" ]]; then
return
......@@ -182,7 +202,7 @@ function is_correct_ipaddr() {
IFS=" "
arr=($iplist)
IFS="$OLD_IFS"
for s in ${arr[@]}
for s in "${arr[@]}"
do
if [[ "$s" == "$newIp" ]]; then
return 0
......@@ -271,20 +291,20 @@ function local_fqdn_check() {
fi
}
function install_blm3_config() {
if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
function install_taosadapter_config() {
if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
[ ! -d %{cfg_install_dir} ] &&
${csudo} ${csudo} mkdir -p ${cfg_install_dir}
[ -f ${cfg_dir}/blm.toml ] && ${csudo} cp ${cfg_dir}/blm.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/blm.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/blm.toml
[ -f ${cfg_dir}/taosadapter.toml ] && ${csudo} cp ${cfg_dir}/taosadapter.toml ${cfg_install_dir}
[ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi
[ -f ${cfg_dir}/blm.toml ] &&
${csudo} mv ${cfg_dir}/blm.toml ${cfg_dir}/blm.toml.org
[ -f ${cfg_dir}/taosadapter.toml ] &&
${csudo} mv ${cfg_dir}/taosadapter.toml ${cfg_dir}/taosadapter.toml.new
[ -f ${cfg_install_dir}/blm.toml ] &&
${csudo} ln -s ${cfg_install_dir}/blm.toml ${cfg_dir}
[ -f ${cfg_install_dir}/taosadapter.toml ] &&
${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${cfg_dir}
}
function install_config() {
......@@ -302,7 +322,7 @@ function install_config() {
# restore the backup standard input, and turn off 6
exec 0<&6 6<&-
${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.org
${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${cfg_dir}
#FQDN_FORMAT="(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"
#FQDN_FORMAT="(:[1-6][0-9][0-9][0-9][0-9]$)"
......@@ -424,8 +444,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target' >> ${taosd_service_config}"
${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
......@@ -446,6 +466,10 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd
}
function install_taosadapter_service() {
[ -f ${cfg_dir}/taosadapter.service ] && ${csudo} cp ${cfg_dir}/taosadapter.service ${service_config_dir}
}
function install_service() {
if ((${service_mod}==0)); then
install_service_on_systemd
......@@ -453,7 +477,7 @@ function install_service() {
install_service_on_sysvinit
else
# manual start taosd
kill_blm3
kill_taosadapter
kill_taosd
fi
}
......@@ -474,10 +498,12 @@ function install_TDengine() {
# Install include, lib, binary and service
install_include
install_lib
install_avro_lib
install_bin
install_service
install_config
install_blm3_config
install_taosadapter_config
install_taosadapter_service
install_service
# Ask if to start the service
#echo
......
......@@ -43,8 +43,8 @@ else
service_mod=2
fi
function kill_blm3() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
function kill_taosadapter() {
pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
......@@ -58,6 +58,12 @@ function kill_taosd() {
}
function clean_service_on_systemd() {
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet taosadapter; then
echo "taosadapter is running, stopping it..."
${csudo} systemctl stop taosadapter &> /dev/null || echo &> /dev/null
fi
taosd_service_config="${service_config_dir}/${taos_service_name}.service"
if systemctl is-active --quiet ${taos_service_name}; then
......@@ -67,6 +73,9 @@ function clean_service_on_systemd() {
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config}
[ -f ${taosadapter_service_config} ] && ${csudo} rm -f ${taosadapter_service_config}
}
function clean_service_on_sysvinit() {
......@@ -100,7 +109,7 @@ function clean_service() {
clean_service_on_sysvinit
else
# must manual stop taosd
kill_blm3
kill_taosadapter
kill_taosd
fi
}
......@@ -111,11 +120,11 @@ clean_service
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/set_core || :
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/*.new || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || :
......@@ -125,7 +134,7 @@ ${csudo} rm -f ${log_link_dir} || :
${csudo} rm -f ${data_link_dir} || :
if ((${service_mod}==2)); then
kill_blm3
kill_taosadapter
kill_taosd
fi
......
......@@ -54,8 +54,8 @@ else
service_mod=2
fi
function kill_blm3() {
pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
function kill_taosadapter() {
pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
......@@ -78,7 +78,7 @@ function clean_bin() {
# Remove link
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/blm3 || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
......@@ -111,12 +111,14 @@ function clean_log() {
function clean_service_on_systemd() {
taosd_service_config="${service_config_dir}/${taos_service_name}.service"
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet ${taos_service_name}; then
echo "TDengine taosd is running, stopping it..."
${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null
fi
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config}
[ -f ${taosadapter_service_config} ] && ${sudo} rm -f ${taosadapter_service_config}
tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service"
if systemctl is-active --quiet ${tarbitrator_service_name}; then
......@@ -191,7 +193,7 @@ function clean_service() {
clean_service_on_sysvinit
else
# must manual stop taosd
kill_blm3
kill_taosadapter
kill_taosd
kill_tarbitrator
fi
......
......@@ -9,8 +9,8 @@ line=`grep StartLimitBurst ${taosd}`
num=${line##*=}
#echo "burst num: ${num}"
startSeqFile=/usr/local/taos/.startSeq
recordFile=/usr/local/taos/.startRecord
startSeqFile=/var/log/taos/.startSeq
recordFile=/var/log/taos/.startRecord
startSeq=0
......@@ -48,4 +48,3 @@ if [ ${coreFlag} = "unlimited" ];then
fi
fi
/usr/bin/blm3 &
name: tdengine
base: core20
version: '2.3.0.0'
version: '2.3.1.0'
icon: snap/gui/t-dengine.svg
summary: an open-source big data platform designed and optimized for IoT.
description: |
......
......@@ -66,8 +66,7 @@ typedef struct {
int32_t affectedRows;
} SSmlLinesInfo;
void addEscapeCharToString(char *str, int32_t len);
char* addEscapeCharToString(char *str, int32_t len);
int tscSmlInsert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint, SSmlLinesInfo* info);
bool checkDuplicateKey(char *key, SHashObj *pHash, SSmlLinesInfo* info);
bool isValidInteger(char *str);
......
......@@ -358,9 +358,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
int num_fields = taos_num_fields(pSql);
TAOS_FIELD *fields = taos_fetch_fields(pSql);
char buf[TSDB_COL_NAME_LEN + 16];
for (int i = 0; i < num_fields; i++) {
memset(buf, 0, sizeof(buf));
char *buf = calloc(1, lengths[i] + 1);
if (buf == NULL) {
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
memset(buf, 0, lengths[i] + 1);
int32_t ret = tscGetNthFieldResult(row, fields, lengths, i, buf);
if (i == 0) {
......@@ -373,6 +377,9 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
} else {
snprintf(result + strlen(result), TSDB_MAX_BINARY_LEN - strlen(result), "%s,", buf);
}
free(buf);
if (i == num_fields - 1) {
sprintf(result + strlen(result) - 1, "%s", ")");
}
......
......@@ -631,11 +631,11 @@ static int32_t modifyDBSchemas(TAOS* taos, SArray* stableSchemas, SSmlLinesInfo*
if (code != 0) {
tscError("SML:0x%"PRIx64" reconcile point schema failed. can not create %s", info->id, pointSchema->sTableName);
return code;
} else {
pointSchema->precision = dbSchema.precision;
destroySmlSTableSchema(&dbSchema);
}
} else if (code == TSDB_CODE_SUCCESS) {
}
if (code == TSDB_CODE_SUCCESS) {
pointSchema->precision = dbSchema.precision;
size_t pointTagSize = taosArrayGetSize(pointSchema->tags);
size_t pointFieldSize = taosArrayGetSize(pointSchema->fields);
......@@ -1177,13 +1177,14 @@ static void escapeSpecialCharacter(uint8_t field, const char **pos) {
*pos = cur;
}
void addEscapeCharToString(char *str, int32_t len) {
char* addEscapeCharToString(char *str, int32_t len) {
if (str == NULL) {
return;
return NULL;
}
memmove(str + 1, str, len);
str[0] = str[len + 1] = TS_ESCAPE_CHAR;
str[len + 2] = '\0';
return str;
}
bool isValidInteger(char *str) {
......@@ -1511,9 +1512,9 @@ static bool convertStrToNumber(TAOS_SML_KV *pVal, char *str, SSmlLinesInfo* info
errno = 0;
uint8_t type = pVal->type;
int16_t length = pVal->length;
int64_t val_s;
uint64_t val_u;
double val_d;
int64_t val_s = 0;
uint64_t val_u = 0;
double val_d = 0.0;
strntolower_s(str, str, (int32_t)strlen(str));
if (IS_FLOAT_TYPE(type)) {
......@@ -1813,7 +1814,7 @@ static int32_t getTimeStampValue(char *value, uint16_t len,
int32_t convertSmlTimeStamp(TAOS_SML_KV *pVal, char *value,
uint16_t len, SSmlLinesInfo* info) {
int32_t ret;
SMLTimeStampType type;
SMLTimeStampType type = SML_TIME_STAMP_NOW;
int64_t tsVal;
ret = isTimeStamp(value, len, &type, info);
......@@ -1907,8 +1908,6 @@ static int32_t parseSmlKey(TAOS_SML_KV *pKV, const char **index, SHashObj *pHash
}
//Escape special character
if (*cur == '\\') {
//TODO: escape will work after column & tag
//support spcial characters
escapeSpecialCharacter(2, &cur);
}
key[len] = *cur;
......@@ -1985,6 +1984,7 @@ static int32_t parseSmlValue(TAOS_SML_KV *pKV, const char **index,
//Escape special character
if (*cur == '\\') {
escapeSpecialCharacter(isTag ? 2 : 3, &cur);
len++;
}
cur++;
len++;
......@@ -2107,6 +2107,13 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
pkv = *pKVs;
}
size_t childTableNameLen = strlen(tsSmlChildTableName);
char childTableName[TSDB_TABLE_NAME_LEN + TS_ESCAPE_CHAR_SIZE] = {0};
if (childTableNameLen != 0) {
memcpy(childTableName, tsSmlChildTableName, childTableNameLen);
addEscapeCharToString(childTableName, (int32_t)(childTableNameLen));
}
while (*cur != '\0') {
ret = parseSmlKey(pkv, &cur, pHash, info);
if (ret) {
......@@ -2118,7 +2125,8 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
tscError("SML:0x%"PRIx64" Unable to parse value", info->id);
goto error;
}
if (!isField && (strcasecmp(pkv->key, "`ID`") == 0)) {
if (!isField && childTableNameLen != 0 && strcasecmp(pkv->key, childTableName) == 0) {
smlData->childTableName = malloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1);
memcpy(smlData->childTableName, pkv->value, pkv->length);
strntolower_s(smlData->childTableName, smlData->childTableName, (int32_t)pkv->length);
......@@ -2405,7 +2413,7 @@ static SSqlObj* createSmlQueryObj(TAOS* taos, int32_t affected_rows, int32_t cod
TAOS_RES* taos_schemaless_insert(TAOS* taos, char* lines[], int numLines, int protocol, int precision) {
int code = TSDB_CODE_SUCCESS;
int affected_rows = 0;
SMLTimeStampType tsType;
SMLTimeStampType tsType = SML_TIME_STAMP_NOW;
if (protocol == TSDB_SML_LINE_PROTOCOL) {
code = convertPrecisionType(precision, &tsType);
......
......@@ -305,6 +305,12 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
*pKVs = tcalloc(capacity, sizeof(TAOS_SML_KV));
pkv = *pKVs;
size_t childTableNameLen = strlen(tsSmlChildTableName);
char childTbName[TSDB_TABLE_NAME_LEN + TS_ESCAPE_CHAR_SIZE] = {0};
if (childTableNameLen != 0) {
memcpy(childTbName, tsSmlChildTableName, childTableNameLen);
addEscapeCharToString(childTbName, (int32_t)(childTableNameLen));
}
while (*cur != '\0') {
ret = parseTelnetTagKey(pkv, &cur, pHash, info);
if (ret) {
......@@ -316,7 +322,7 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
tscError("OTD:0x%"PRIx64" Unable to parse value", info->id);
return ret;
}
if ((strcasecmp(pkv->key, "`ID`") == 0)) {
if (childTableNameLen != 0 && strcasecmp(pkv->key, childTbName) == 0) {
*childTableName = tcalloc(pkv->length + TS_ESCAPE_CHAR_SIZE + 1, 1);
memcpy(*childTableName, pkv->value, pkv->length);
(*childTableName)[pkv->length] = '\0';
......@@ -892,8 +898,13 @@ static int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs,
if (tags == NULL || tags->type != cJSON_Object) {
return TSDB_CODE_TSC_INVALID_JSON;
}
//only pick up the first ID value as child table name
cJSON *id = cJSON_GetObjectItem(tags, "ID");
//handle child table name
size_t childTableNameLen = strlen(tsSmlChildTableName);
char childTbName[TSDB_TABLE_NAME_LEN] = {0};
if (childTableNameLen != 0) {
memcpy(childTbName, tsSmlChildTableName, childTableNameLen);
cJSON *id = cJSON_GetObjectItem(tags, childTbName);
if (id != NULL) {
if (!cJSON_IsString(id)) {
tscError("OTD:0x%"PRIx64" ID must be JSON string", info->id);
......@@ -906,12 +917,14 @@ static int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs,
addEscapeCharToString(*childTableName, (int32_t)idLen);
//check duplicate IDs
cJSON_DeleteItemFromObject(tags, "ID");
id = cJSON_GetObjectItem(tags, "ID");
cJSON_DeleteItemFromObject(tags, childTbName);
id = cJSON_GetObjectItem(tags, childTbName);
if (id != NULL) {
return TSDB_CODE_TSC_DUP_TAG_NAMES;
}
}
}
int32_t tagNum = cJSON_GetArraySize(tags);
//at least one tag pair required
if (tagNum <= 0) {
......
......@@ -2454,6 +2454,7 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
const char* msg12 = "parameter is out of range [1, 100]";
const char* msg13 = "parameter list required";
const char* msg14 = "third parameter algorithm must be 'default' or 't-digest'";
const char* msg15 = "parameter is out of range [1, 1000]";
switch (functionId) {
case TSDB_FUNC_COUNT: {
......@@ -2901,11 +2902,15 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
}
}
} else if (functionId == TSDB_FUNC_MAVG || functionId == TSDB_FUNC_SAMPLE) {
if (pVariant->nType != TSDB_DATA_TYPE_BIGINT) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg2);
}
tVariantDump(pVariant, val, TSDB_DATA_TYPE_BIGINT, true);
int64_t numRowsSelected = GET_INT32_VAL(val);
int64_t numRowsSelected = GET_INT64_VAL(val);
if (numRowsSelected <= 0 || numRowsSelected > 1000) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg12);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg15);
}
// todo REFACTOR
......@@ -5748,6 +5753,7 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
const char* msg9 = "orderby column must projected in subquery";
const char* msg10 = "not support distinct mixed with order by";
const char* msg11 = "not support order with udf";
const char* msg12 = "order by tags not supported with diff/derivative/csum/mavg";
setDefaultOrderInfo(pQueryInfo);
STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, 0);
......@@ -5846,6 +5852,9 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
size_t s = taosArrayGetSize(pSortOrder);
if (s == 1) {
if (orderByTags) {
if (tscIsDiffDerivLikeQuery(pQueryInfo)) {
return invalidOperationMsg(pMsgBuf, msg12);
}
pQueryInfo->groupbyExpr.orderIndex = index.columnIndex - tscGetNumOfColumns(pTableMetaInfo->pTableMeta);
tVariantListItem* p1 = taosArrayGet(pSqlNode->pSortOrder, 0);
......
......@@ -2956,6 +2956,7 @@ int32_t tscGetTableMetaImpl(SSqlObj* pSql, STableMetaInfo *pTableMetaInfo, bool
if (pMeta->tableType == TSDB_CHILD_TABLE) {
int32_t code = tscCreateTableMetaFromSTableMeta(pSql, &pTableMetaInfo->pTableMeta, name, &pTableMetaInfo->tableMetaCapacity, (STableMeta **)(&pSTMeta));
pSql->pBuf = (void *)(pSTMeta);
pMeta = pTableMetaInfo->pTableMeta;
if (code != TSDB_CODE_SUCCESS) {
return getTableMetaFromMnode(pSql, pTableMetaInfo, autocreate);
}
......
......@@ -781,6 +781,16 @@ bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col) {
return isNull(((char*) pSql->res.urow[col]) + row * pInfo->field.bytes, pInfo->field.type);
}
bool taos_is_update_query(TAOS_RES *res) {
SSqlObj *pSql = (SSqlObj *)res;
if (pSql == NULL || pSql->signature != pSql) {
return false;
}
SSqlCmd* pCmd = &pSql->cmd;
return ((pCmd->command >= TSDB_SQL_INSERT && pCmd->command <= TSDB_SQL_DROP_DNODE) || TSDB_SQL_RESET_CACHE == pCmd->command || TSDB_SQL_USE_DB == pCmd->command);
}
int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields) {
int len = 0;
......@@ -909,7 +919,6 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
strtolower(pSql->sqlstr, sql);
// pCmd->curSql = NULL;
if (NULL != pCmd->insertParam.pTableBlockHashList) {
taosHashCleanup(pCmd->insertParam.pTableBlockHashList);
pCmd->insertParam.pTableBlockHashList = NULL;
......@@ -934,6 +943,17 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
return code;
}
void taos_reset_current_db(TAOS *taos) {
STscObj* pObj = (STscObj*) taos;
if (pObj == NULL || pObj->signature != pObj) {
return;
}
pthread_mutex_lock(&pObj->mutex);
memset(pObj->db, 0, tListLen(pObj->db));
pthread_mutex_unlock(&pObj->mutex);
}
void loadMultiTableMetaCallback(void *param, TAOS_RES *res, int code) {
SSqlObj* pSql = (SSqlObj*)taosAcquireRef(tscObjRef, (int64_t)param);
if (pSql == NULL) {
......
......@@ -64,10 +64,11 @@ extern int32_t tsCompressColData;
extern int32_t tsMaxNumOfDistinctResults;
extern char tsTempDir[];
//query buffer management
// query buffer management
extern int32_t tsQueryBufferSize; // maximum allowed usage buffer size in MB for each data node during query processing
extern int64_t tsQueryBufferSizeBytes; // maximum allowed usage buffer size in byte for each data node during query processing
extern int32_t tsRetrieveBlockingModel;// retrieve threads will be blocked
extern int64_t
tsQueryBufferSizeBytes; // maximum allowed usage buffer size in byte for each data node during query processing
extern int32_t tsRetrieveBlockingModel; // retrieve threads will be blocked
extern int8_t tsKeepOriginalColumnName;
......@@ -108,7 +109,7 @@ extern int32_t tsQuorum;
extern int8_t tsUpdate;
extern int8_t tsCacheLastRow;
//tsdb
// tsdb
extern bool tsdbForceKeepFile;
extern bool tsdbForceCompactFile;
extern int32_t tsdbWalFlushSize;
......@@ -134,6 +135,7 @@ extern int8_t tsHttpEnableCompress;
extern int8_t tsHttpEnableRecordSql;
extern int8_t tsTelegrafUseFieldNum;
extern int8_t tsHttpDbNameMandatory;
extern int32_t tsHttpKeepAlive;
// mqtt
extern int8_t tsEnableMqttModule;
......@@ -233,6 +235,7 @@ extern int8_t tsDeadLockKillQuery;
// schemaless
extern char tsDefaultJSONStrType[];
extern char tsSmlChildTableName[];
typedef struct {
......
......@@ -14,18 +14,18 @@
*/
#define _DEFAULT_SOURCE
#include "tglobal.h"
#include "monitor.h"
#include "os.h"
#include "taosdef.h"
#include "taoserror.h"
#include "tulog.h"
#include "tcompare.h"
#include "tconfig.h"
#include "tglobal.h"
#include "monitor.h"
#include "tsocket.h"
#include "tutil.h"
#include "tlocale.h"
#include "tsocket.h"
#include "ttimezone.h"
#include "tcompare.h"
#include "tulog.h"
#include "tutil.h"
// cluster
char tsFirst[TSDB_EP_LEN] = {0};
......@@ -51,7 +51,7 @@ int64_t tsDnodeStartTime = 0;
// common
int32_t tsRpcTimer = 300;
int32_t tsRpcMaxTime = 600; // seconds;
int32_t tsRpcForceTcp = 0; //disable this, means query, show command use udp protocol as default
int32_t tsRpcForceTcp = 0; // disable this, means query, show command use udp protocol as default
int32_t tsMaxShellConns = 50000;
int32_t tsMaxConnections = 5000;
int32_t tsShellActivityTimer = 3; // second
......@@ -109,7 +109,7 @@ int32_t tsMaxStreamComputDelay = 20000;
int32_t tsStreamCompStartDelay = 10000;
// the stream computing delay time after executing failed, change accordingly
int32_t tsRetryStreamCompDelay = 10*1000;
int32_t tsRetryStreamCompDelay = 10 * 1000;
// The delayed computing ration. 10% of the whole computing time window by default.
float tsStreamComputDelayRatio = 0.1f;
......@@ -180,6 +180,7 @@ int8_t tsHttpEnableCompress = 1;
int8_t tsHttpEnableRecordSql = 0;
int8_t tsTelegrafUseFieldNum = 0;
int8_t tsHttpDbNameMandatory = 0;
int32_t tsHttpKeepAlive = 30000;
// mqtt
int8_t tsEnableMqttModule = 0; // not finished yet, not started it by default
......@@ -274,7 +275,8 @@ int8_t tsClientMerge = 0;
//
// lossy compress 6
//
char lossyColumns[32] = ""; // "float|double" means all float and double columns can be lossy compressed. set empty can close lossy compress.
char lossyColumns[32] = ""; // "float|double" means all float and double columns can be lossy compressed. set empty
// can close lossy compress.
// below option can take effect when tsLossyColumns not empty
double fPrecision = 1E-8; // float column precision
double dPrecision = 1E-16; // double column precision
......@@ -288,6 +290,7 @@ int8_t tsDeadLockKillQuery = 0;
// default JSON string type
char tsDefaultJSONStrType[7] = "binary";
char tsSmlChildTableName[TSDB_TABLE_NAME_LEN] = ""; //user defined child table name can be specified in tag value. If set to empty system will generate table name using MD5 hash.
int32_t (*monStartSystemFp)() = NULL;
void (*monStopSystemFp)() = NULL;
......@@ -326,7 +329,8 @@ bool taosCfgDynamicOptions(char *msg) {
int32_t vint = 0;
paGetToken(msg, &option, &olen);
if (olen == 0) return false;;
if (olen == 0) return false;
;
paGetToken(option + olen + 1, &value, &vlen);
if (vlen == 0)
......@@ -339,7 +343,7 @@ bool taosCfgDynamicOptions(char *msg) {
for (int32_t i = 0; i < tsGlobalConfigNum; ++i) {
SGlobalCfg *cfg = tsGlobalConfig + i;
//if (!(cfg->cfgType & TSDB_CFG_CTYPE_B_LOG)) continue;
// if (!(cfg->cfgType & TSDB_CFG_CTYPE_B_LOG)) continue;
if (cfg->valType != TAOS_CFG_VTYPE_INT32 && cfg->valType != TAOS_CFG_VTYPE_INT8) continue;
int32_t cfgLen = (int32_t)strlen(cfg->option);
......@@ -427,7 +431,7 @@ static void taosCheckDataDirCfg() {
}
static int32_t taosCheckTmpDir(void) {
if (strlen(tsTempDir) <= 0){
if (strlen(tsTempDir) <= 0) {
uError("tempDir is not set");
return -1;
}
......@@ -577,8 +581,8 @@ static void doInitGlobalConfig(void) {
cfg.ptr = &tsMaxNumOfDistinctResults;
cfg.valType = TAOS_CFG_VTYPE_INT32;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG | TSDB_CFG_CTYPE_B_SHOW | TSDB_CFG_CTYPE_B_CLIENT;
cfg.minValue = 10*10000;
cfg.maxValue = 10000*10000;
cfg.minValue = 10 * 10000;
cfg.maxValue = 10000 * 10000;
cfg.ptrLength = 0;
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
......@@ -1320,6 +1324,17 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// pContext in cache
cfg.option = "httpKeepAlive";
cfg.ptr = &tsHttpKeepAlive;
cfg.valType = TAOS_CFG_VTYPE_INT32;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG;
cfg.minValue = 3000;
cfg.maxValue = 3600000;
cfg.ptrLength = 0;
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// debug flag
cfg.option = "numOfLogLines";
cfg.ptr = &tsNumOfLogLines;
......@@ -1401,7 +1416,6 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
cfg.option = "sdbDebugFlag";
cfg.ptr = &sdbDebugFlag;
cfg.valType = TAOS_CFG_VTYPE_INT32;
......@@ -1665,6 +1679,17 @@ static void doInitGlobalConfig(void) {
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// child talbe name specified in schemaless tag value
cfg.option = "smlChildTableName";
cfg.ptr = tsSmlChildTableName;
cfg.valType = TAOS_CFG_VTYPE_STRING;
cfg.cfgType = TSDB_CFG_CTYPE_B_CONFIG | TSDB_CFG_CTYPE_B_SHOW | TSDB_CFG_CTYPE_B_CLIENT;
cfg.minValue = 0;
cfg.maxValue = 0;
cfg.ptrLength = tListLen(tsSmlChildTableName);
cfg.unitType = TAOS_CFG_UTYPE_NONE;
taosInitConfigOption(cfg);
// flush vnode wal file if walSize > walFlushSize and walSize > cache*0.5*blocks
cfg.option = "walFlushSize";
cfg.ptr = &tsdbWalFlushSize;
......@@ -1731,12 +1756,9 @@ static void doInitGlobalConfig(void) {
#else
assert(tsGlobalConfigNum < TSDB_CFG_MAX_NUM);
#endif
}
void taosInitGlobalCfg() {
pthread_once(&tsInitGlobalCfgOnce, doInitGlobalConfig);
}
void taosInitGlobalCfg() { pthread_once(&tsInitGlobalCfgOnce, doInitGlobalConfig); }
int32_t taosCheckGlobalCfg() {
char fqdn[TSDB_FQDN_LEN];
......@@ -1794,8 +1816,8 @@ int32_t taosCheckGlobalCfg() {
}
if (tsMaxTablePerVnode < tsMinTablePerVnode) {
uError("maxTablesPerVnode(%d) < minTablesPerVnode(%d), reset to minTablesPerVnode(%d)",
tsMaxTablePerVnode, tsMinTablePerVnode, tsMinTablePerVnode);
uError("maxTablesPerVnode(%d) < minTablesPerVnode(%d), reset to minTablesPerVnode(%d)", tsMaxTablePerVnode,
tsMinTablePerVnode, tsMinTablePerVnode);
tsMaxTablePerVnode = tsMinTablePerVnode;
}
......@@ -1839,7 +1861,7 @@ int taosGetFqdnPortFromEp(const char *ep, char *fqdn, uint16_t *port) {
char *temp = strchr(fqdn, ':');
if (temp) {
*temp = 0;
*port = atoi(temp+1);
*port = atoi(temp + 1);
}
if (*port == 0) {
......
Subproject commit 9ae793ad2d567eb11d10627b65698f612542e988
Subproject commit 792ef7c3036f15068796e09883d3f4d47a038fe2
......@@ -8,7 +8,7 @@ IF (TD_MVN_INSTALLED)
ADD_CUSTOM_COMMAND(OUTPUT ${JDBC_CMD_NAME}
POST_BUILD
COMMAND mvn -Dmaven.test.skip=true install -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.35-dist.jar ${LIBRARY_OUTPUT_PATH}
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.36-dist.jar ${LIBRARY_OUTPUT_PATH}
COMMAND mvn -Dmaven.test.skip=true clean -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMENT "build jdbc driver")
ADD_CUSTOM_TARGET(${JDBC_TARGET_NAME} ALL WORKING_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} DEPENDS ${JDBC_CMD_NAME})
......
......@@ -5,7 +5,7 @@
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.35</version>
<version>2.0.36</version>
<packaging>jar</packaging>
<name>JDBCDriver</name>
......
......@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.35</version>
<version>2.0.36</version>
<packaging>jar</packaging>
<name>JDBCDriver</name>
<url>https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc</url>
......@@ -58,6 +58,13 @@
<version>4.13.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
......@@ -70,6 +77,18 @@
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
......
......@@ -8,16 +8,35 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
/**
* @author huolibo@qq.com
* @version v1.0.0
* @JDK: 1.8
* @description: this class is an extension of {@link Statement}. use like:
* Statement statement = conn.createStatement();
* SchemalessStatement schemalessStatement = new SchemalessStatement(statement);
* schemalessStatement.execute(sql);
* schemalessStatement.executeSchemaless(lines, SchemalessProtocolType, SchemalessTimestampType);
* @since 2021-11-03 17:10
*/
public class SchemalessStatement extends AbstractStatementWrapper {
public SchemalessStatement(Statement statement) {
super(statement);
}
public void executeSchemaless(String[] strings, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
/**
* batch insert schemaless lines
*
* @param lines schemaless data
* @param protocolType schemaless type {@link SchemalessProtocolType}
* @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception
*/
public void executeSchemaless(String[] lines, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
Connection connection = this.getConnection();
if (connection instanceof TSDBConnection) {
TSDBConnection tsdbConnection = (TSDBConnection) connection;
tsdbConnection.getConnector().insertLines(strings, protocolType, timestampType);
tsdbConnection.getConnector().insertLines(lines, protocolType, timestampType);
} else if (connection instanceof RestfulConnection) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD, "restful connection is not supported currently");
} else {
......@@ -25,7 +44,15 @@ public class SchemalessStatement extends AbstractStatementWrapper {
}
}
public void executeSchemaless(String sql, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
executeSchemaless(new String[]{sql}, protocolType, timestampType);
/**
* only one insert
*
* @param line schemaless line
* @param protocolType schemaless type {@link SchemalessProtocolType}
* @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception
*/
public void executeSchemaless(String line, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
executeSchemaless(new String[]{line}, protocolType, timestampType);
}
}
......@@ -135,7 +135,6 @@ public class TSDBDriver extends AbstractDriver {
TSDBJNIConnector.init(props);
return new TSDBConnection(props, this.dbMetaData);
} catch (SQLWarning sqlWarning) {
sqlWarning.printStackTrace();
return new TSDBConnection(props, this.dbMetaData);
} catch (SQLException sqlEx) {
throw sqlEx;
......
......@@ -36,15 +36,15 @@ import java.util.regex.Pattern;
* compatibility needs.
*/
public class TSDBPreparedStatement extends TSDBStatement implements PreparedStatement {
// for jdbc preparedStatement interface
private String rawSql;
private Object[] parameters;
private ArrayList<ColumnInfo> colData;
// for parameter binding
private long nativeStmtHandle = 0;
private String tableName;
private ArrayList<TableTagInfo> tableTags;
private int tagValueLength;
private String tableName;
private long nativeStmtHandle = 0;
private ArrayList<ColumnInfo> colData;
TSDBPreparedStatement(TSDBConnection connection, String sql) {
super(connection);
......@@ -72,10 +72,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
preprocessSql();
}
/*
*
*/
/**
* Some of the SQLs sent by other popular frameworks or tools like Spark, contains syntax that cannot be parsed by
* the TDengine client. Thus, some simple parsers/filters are intentionally added in this JDBC implementation in
......@@ -250,13 +246,10 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
if (isClosed()) {
if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
}
if (parameterIndex < 1 && parameterIndex >= parameters.length) {
if (parameterIndex < 1 && parameterIndex >= parameters.length)
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_PARAMETER_INDEX_OUT_RANGE);
}
parameters[parameterIndex - 1] = x;
}
......@@ -335,7 +328,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
// TODO:
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
......@@ -419,7 +411,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
//TODO:
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
......@@ -477,7 +468,6 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
@Override
......@@ -496,7 +486,7 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
///////////////////////////////////////////////////////////////////////
// NOTE: the following APIs are not JDBC compatible
// set the bind table name
// parameter binding
private static class ColumnInfo {
@SuppressWarnings("rawtypes")
private ArrayList data;
......@@ -539,7 +529,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
}
}
public void setTableName(String name) {
public void setTableName(String name) throws SQLException {
if (this.tableName != null) {
this.columnDataExecuteBatch();
this.columnDataClearBatchInternal();
}
this.tableName = name;
}
......@@ -960,17 +954,22 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
public void columnDataExecuteBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.executeBatch(this.nativeStmtHandle);
this.columnDataClearBatch();
this.columnDataClearBatchInternal();
}
@Deprecated
public void columnDataClearBatch() {
columnDataClearBatchInternal();
}
private void columnDataClearBatchInternal() {
int size = this.colData.size();
this.colData.clear();
this.colData.addAll(Collections.nCopies(size, null));
this.tableName = null; // clear the table name
}
public void columnDataCloseBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.closeBatch(this.nativeStmtHandle);
......@@ -978,4 +977,11 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
this.nativeStmtHandle = 0L;
this.tableName = null;
}
@Override
public void close() throws SQLException {
this.columnDataClearBatchInternal();
this.columnDataCloseBatch();
super.close();
}
}
package com.taosdata.jdbc.enums;
import java.util.Arrays;
public enum SchemalessProtocolType {
UNKNOWN,
LINE,
......@@ -7,4 +9,10 @@ public enum SchemalessProtocolType {
JSON,
;
public static SchemalessProtocolType parse(String type) {
return Arrays.stream(SchemalessProtocolType.values())
.filter(protocol -> type.equalsIgnoreCase(protocol.name()))
.findFirst().orElse(UNKNOWN);
}
}
package com.taosdata.jdbc.enums;
public enum SchemalessTimestampType {
// Let the database decide
NOT_CONFIGURED,
HOURS,
MINUTES,
......
......@@ -50,9 +50,13 @@ public class RestfulDriver extends AbstractDriver {
String password = URLEncoder.encode(props.getProperty(TSDBDriver.PROPERTY_KEY_PASSWORD), StandardCharsets.UTF_8.displayName());
loginUrl = "http://" + props.getProperty(TSDBDriver.PROPERTY_KEY_HOST) + ":" + props.getProperty(TSDBDriver.PROPERTY_KEY_PORT) + "/rest/login/" + user + "/" + password + "";
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_INVALID_VARIABLE, "unsupported UTF-8 concoding, user: " + props.getProperty(TSDBDriver.PROPERTY_KEY_USER) + ", password: " + props.getProperty(TSDBDriver.PROPERTY_KEY_PASSWORD));
}
int poolSize = Integer.valueOf(props.getProperty("httpPoolSize", HttpClientPoolUtil.DEFAULT_MAX_PER_ROUTE));
boolean keepAlive = Boolean.valueOf(props.getProperty("httpKeepAlive", HttpClientPoolUtil.DEFAULT_HTTP_KEEP_ALIVE));
HttpClientPoolUtil.init(poolSize, keepAlive);
String result = HttpClientPoolUtil.execute(loginUrl);
JSONObject jsonResult = JSON.parseObject(result);
String status = jsonResult.getString("status");
......
......@@ -5,12 +5,11 @@ import com.taosdata.jdbc.TSDBErrorNumbers;
import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpEntity;
import org.apache.http.NoHttpResponseException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.*;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
......@@ -21,21 +20,20 @@ import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.SSLException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.concurrent.TimeUnit;
public class HttpClientPoolUtil {
private static final String DEFAULT_CONTENT_TYPE = "application/json";
private static final int DEFAULT_MAX_RETRY_COUNT = 5;
private static final int DEFAULT_MAX_TOTAL = 50;
private static final int DEFAULT_MAX_PER_ROUTE = 5;
public static final String DEFAULT_HTTP_KEEP_ALIVE = "true";
public static final String DEFAULT_MAX_PER_ROUTE = "20";
private static final int DEFAULT_HTTP_KEEP_TIME = -1;
private static String isKeepAlive;
private static final ConnectionKeepAliveStrategy DEFAULT_KEEP_ALIVE_STRATEGY = (response, context) -> {
HeaderElementIterator it = new BasicHeaderElementIterator(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
......@@ -55,36 +53,39 @@ public class HttpClientPoolUtil {
private static CloseableHttpClient httpClient;
static {
public static void init(Integer connPoolSize, boolean keepAlive) {
if (httpClient == null) {
synchronized (HttpClientPoolUtil.class) {
if (httpClient == null) {
isKeepAlive = keepAlive ? HTTP.CONN_KEEP_ALIVE : HTTP.CONN_CLOSE;
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
connectionManager.setMaxTotal(DEFAULT_MAX_TOTAL);
connectionManager.setDefaultMaxPerRoute(DEFAULT_MAX_PER_ROUTE);
connectionManager.setMaxTotal(connPoolSize * 10);
connectionManager.setDefaultMaxPerRoute(connPoolSize);
httpClient = HttpClients.custom()
.setKeepAliveStrategy(DEFAULT_KEEP_ALIVE_STRATEGY)
.setConnectionManager(connectionManager)
.setRetryHandler((exception, executionCount, httpContext) -> executionCount < DEFAULT_MAX_RETRY_COUNT)
.build();
}
}
}
}
/*** execute GET request ***/
public static String execute(String uri) throws SQLException {
HttpEntity httpEntity = null;
String responseBody = "";
try {
HttpRequestBase method = getRequest(uri, HttpGet.METHOD_NAME);
HttpContext context = HttpClientContext.create();
CloseableHttpResponse httpResponse = httpClient.execute(method, context);
try (CloseableHttpResponse httpResponse = httpClient.execute(method, context)) {
httpEntity = httpResponse.getEntity();
if (httpEntity != null) {
responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8);
}
} catch (ClientProtocolException e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage());
} catch (IOException exception) {
exception.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage());
} finally {
if (httpEntity != null) {
......@@ -94,30 +95,27 @@ public class HttpClientPoolUtil {
return responseBody;
}
/*** execute POST request ***/
public static String execute(String uri, String data, String token) throws SQLException {
HttpEntity httpEntity = null;
String responseBody = "";
try {
HttpEntityEnclosingRequestBase method = (HttpEntityEnclosingRequestBase) getRequest(uri, HttpPost.METHOD_NAME);
method.setHeader(HTTP.CONTENT_TYPE, "text/plain");
method.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_KEEP_ALIVE);
method.setHeader(HTTP.CONN_DIRECTIVE, isKeepAlive);
method.setHeader("Authorization", "Taosd " + token);
method.setEntity(new StringEntity(data, StandardCharsets.UTF_8));
HttpContext context = HttpClientContext.create();
CloseableHttpResponse httpResponse = httpClient.execute(method, context);
HttpEntity httpEntity = null;
String responseBody = "";
try (CloseableHttpResponse httpResponse = httpClient.execute(method, context)) {
httpEntity = httpResponse.getEntity();
if (httpEntity == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_HTTP_ENTITY_IS_NULL, "httpEntity is null, sql: " + data);
}
responseBody = EntityUtils.toString(httpEntity, StandardCharsets.UTF_8);
} catch (ClientProtocolException e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_Protocol_Exception, e.getMessage());
} catch (IOException exception) {
exception.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESTFul_Client_IOException, exception.getMessage());
} finally {
if (httpEntity != null) {
......@@ -148,4 +146,12 @@ public class HttpClientPoolUtil {
return method;
}
public static void reset() {
synchronized (HttpClientPoolUtil.class) {
ClientConnectionManager cm = httpClient.getConnectionManager();
cm.closeExpiredConnections();
cm.closeIdleConnections(100, TimeUnit.MILLISECONDS);
}
}
}
\ No newline at end of file
......@@ -16,7 +16,6 @@ public class TaosInfo implements TaosInfoMBean {
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
ObjectName name = new ObjectName("TaosInfoMBean:name=TaosInfo");
server.registerMBean(TaosInfo.getInstance(), name);
} catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
e.printStackTrace();
}
......
......@@ -49,14 +49,9 @@ public class Utils {
try {
return parseMicroSecTimestamp(timeStampStr);
} catch (DateTimeParseException ee) {
try {
return parseNanoSecTimestamp(timeStampStr);
} catch (DateTimeParseException eee) {
eee.printStackTrace();
}
}
}
return null;
}
private static LocalDateTime parseMilliSecTimestamp(String timeStampStr) throws DateTimeParseException {
......
package com.taosdata.jdbc;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.sql.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
public class ParameterBindTest {
private static final String host = "127.0.0.1";
private static final String stable = "weather";
private Connection conn;
private final Random random = new Random(System.currentTimeMillis());
@Test
public void test() {
// given
String[] tbnames = {"t1", "t2", "t3"};
int rows = 10;
// when
insertIntoTables(tbnames, 10);
// then
assertRows(stable, tbnames.length * rows);
for (String t : tbnames) {
assertRows(t, rows);
}
}
@Test
public void testMultiThreads() {
// given
String[][] tables = {{"t1", "t2", "t3"}, {"t4", "t5", "t6"}, {"t7", "t8", "t9"}, {"t10"}};
int rows = 10;
// when
List<Thread> threads = Arrays.stream(tables).map(tbnames -> new Thread(() -> insertIntoTables(tbnames, rows))).collect(Collectors.toList());
threads.forEach(Thread::start);
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
// then
for (String[] table : tables) {
for (String t : table) {
assertRows(t, rows);
}
}
}
private void assertRows(String tbname, int rows) {
try (Statement stmt = conn.createStatement()) {
ResultSet rs = stmt.executeQuery("select count(*) from " + tbname);
while (rs.next()) {
int count = rs.getInt(1);
Assert.assertEquals(rows, count);
}
} catch (SQLException e) {
e.printStackTrace();
}
}
private void insertIntoTables(String[] tbnames, int rowsEachTable) {
long current = System.currentTimeMillis();
String sql = "insert into ? using " + stable + " tags(?, ?) values(?, ?, ?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 0; i < tbnames.length; i++) {
pstmt.setTableName(tbnames[i]);
pstmt.setTagInt(0, random.nextInt(100));
pstmt.setTagInt(1, random.nextInt(100));
ArrayList<Long> timestampList = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
timestampList.add(current + i * 1000 + j);
}
pstmt.setTimestamp(0, timestampList);
ArrayList<Integer> f1List = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
f1List.add(random.nextInt(100));
}
pstmt.setInt(1, f1List);
ArrayList<Integer> f2List = new ArrayList<>();
for (int j = 0; j < rowsEachTable; j++) {
f2List.add(random.nextInt(100));
}
pstmt.setInt(2, f2List);
pstmt.columnDataAddBatch();
}
pstmt.columnDataExecuteBatch();
} catch (SQLException e) {
e.printStackTrace();
}
}
@Before
public void before() {
String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata";
try {
conn = DriverManager.getConnection(url);
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists test_pd");
stmt.execute("create database if not exists test_pd");
stmt.execute("use test_pd");
stmt.execute("create table " + stable + "(ts timestamp, f1 int, f2 int) tags(t1 int, t2 int)");
} catch (SQLException e) {
e.printStackTrace();
}
}
@After
public void after() {
try {
// Statement stmt = conn.createStatement();
// stmt.execute("drop database if exists test_pd");
if (conn != null)
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
package com.taosdata.jdbc;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.taosdata.jdbc.enums.SchemalessProtocolType;
import com.taosdata.jdbc.enums.SchemalessTimestampType;
import org.junit.After;
......@@ -10,10 +12,14 @@ import org.junit.Test;
import java.sql.*;
public class SchemalessInsertTest {
private String host = "127.0.0.1";
private String dbname = "test_schemaless_insert";
private final String dbname = "test_schemaless_insert";
private Connection conn;
/**
* schemaless insert compatible with influxdb
*
* @throws SQLException execute error
*/
@Test
public void schemalessInsert() throws SQLException {
// given
......@@ -41,6 +47,11 @@ public class SchemalessInsertTest {
statement.close();
}
/**
* telnet insert compatible with opentsdb
*
* @throws SQLException execute error
*/
@Test
public void telnetInsert() throws SQLException {
// given
......@@ -71,6 +82,11 @@ public class SchemalessInsertTest {
statement.close();
}
/**
* json insert compatible with opentsdb json format
*
* @throws SQLException execute error
*/
@Test
public void jsonInsert() throws SQLException {
// given
......@@ -113,13 +129,15 @@ public class SchemalessInsertTest {
while (rs.next()) {
rowCnt++;
}
// Assert.assertEquals(json.length, rowCnt);
Assert.assertEquals(((JSONArray) JSONObject.parse(json)).size(), rowCnt);
rs.close();
statement.close();
}
@Before
public void before() {
String host = "127.0.0.1";
final String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata";
try {
conn = DriverManager.getConnection(url);
......
package com.taosdata.jdbc.rs;
import org.junit.Assert;
import org.junit.Test;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class HttpKeepAliveTest {
private static final String host = "127.0.0.1";
@Test
public void test() throws SQLException {
//given
int multi = 4000;
AtomicInteger exceptionCount = new AtomicInteger();
//when
Properties props = new Properties();
props.setProperty("httpKeepAlive", "false");
props.setProperty("httpPoolSize", "20");
Connection connection = DriverManager.getConnection("jdbc:TAOS-RS://" + host + ":6041/?user=root&password=taosdata", props);
List<Thread> threads = IntStream.range(0, multi).mapToObj(i -> new Thread(
() -> {
try (Statement stmt = connection.createStatement()) {
stmt.execute("insert into log.tb_not_exists values(now, 1)");
stmt.execute("select last(*) from log.dn");
} catch (SQLException throwables) {
exceptionCount.getAndIncrement();
}
}
)).collect(Collectors.toList());
threads.forEach(Thread::start);
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//then
Assert.assertEquals(multi, exceptionCount.get());
}
}
......@@ -6,8 +6,7 @@ import java.sql.*;
public class WasNullTest {
// private static final String host = "127.0.0.1";
private static final String host = "master";
private static final String host = "127.0.0.1";
private Connection conn;
......
......@@ -2,8 +2,6 @@ package com.taosdata.jdbc.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.taosdata.jdbc.TSDBDriver;
import com.taosdata.jdbc.TSDBError;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
......@@ -11,7 +9,6 @@ import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
......@@ -20,18 +17,21 @@ public class HttpClientPoolUtilTest {
String user = "root";
String password = "taosdata";
String host = "127.0.0.1";
String dbname = "log";
// String host = "master";
@Test
public void test() {
public void useLog() {
// given
List<Thread> threads = IntStream.range(0, 4000).mapToObj(i -> new Thread(() -> {
useDB();
// try {
// TimeUnit.SECONDS.sleep(10);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
int multi = 10;
// when
List<Thread> threads = IntStream.range(0, multi).mapToObj(i -> new Thread(() -> {
try {
String token = login(multi);
executeOneSql("use log", token);
} catch (SQLException | UnsupportedEncodingException e) {
e.printStackTrace();
}
})).collect(Collectors.toList());
threads.forEach(Thread::start);
......@@ -43,14 +43,41 @@ public class HttpClientPoolUtilTest {
e.printStackTrace();
}
}
}
@Test
public void tableNotExist() {
// given
int multi = 20;
// when
List<Thread> threads = IntStream.range(0, multi * 25).mapToObj(i -> new Thread(() -> {
try {
// String token = "/KfeAzX/f9na8qdtNZmtONryp201ma04bEl8LcvLUd7a8qdtNZmtONryp201ma04";
String token = login(multi);
executeOneSql("insert into log.tb_not_exist values(now, 1)", token);
executeOneSql("select last(*) from log.dn", token);
} catch (SQLException | UnsupportedEncodingException e) {
e.printStackTrace();
}
})).collect(Collectors.toList());
threads.forEach(Thread::start);
private void useDB() {
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private String login(int connPoolSize) throws SQLException, UnsupportedEncodingException {
user = URLEncoder.encode(user, StandardCharsets.UTF_8.displayName());
password = URLEncoder.encode(password, StandardCharsets.UTF_8.displayName());
String loginUrl = "http://" + host + ":" + 6041 + "/rest/login/" + user + "/" + password + "";
HttpClientPoolUtil.init(connPoolSize, false);
String result = HttpClientPoolUtil.execute(loginUrl);
JSONObject jsonResult = JSON.parseObject(result);
String status = jsonResult.getString("status");
......@@ -58,18 +85,19 @@ public class HttpClientPoolUtilTest {
if (!status.equals("succ")) {
throw new SQLException(jsonResult.getString("desc"));
}
return token;
}
private boolean executeOneSql(String sql, String token) throws SQLException {
String url = "http://" + host + ":6041/rest/sql";
String sql = "use " + dbname;
result = HttpClientPoolUtil.execute(url, sql, token);
String result = HttpClientPoolUtil.execute(url, sql, token);
JSONObject resultJson = JSON.parseObject(result);
if (resultJson.getString("status").equals("error")) {
throw TSDBError.createSQLException(resultJson.getInteger("code"), resultJson.getString("desc"));
}
} catch (UnsupportedEncodingException | SQLException e) {
e.printStackTrace();
// HttpClientPoolUtil.reset();
// throw TSDBError.createSQLException(resultJson.getInteger("code"), resultJson.getString("desc"));
return false;
}
return true;
}
......
#org.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog
org.apache.commons.logging.simplelog.defaultlog=TRACE
org.apache.commons.logging.simplelog.showlogname=true
org.apache.commons.logging.simplelog.showShortLogname=restful
org.apache.commons.logging.simplelog.showdatetime=true
org.apache.commons.logging.simplelog.dateTimeFormat=yyyy-mm-dd hh:MM:ss.SSS
\ No newline at end of file
......@@ -12,6 +12,7 @@ const FieldTypes = require('./constants');
const errors = require('./error');
const TaosObjects = require('./taosobjects');
const { NULL_POINTER } = require('ref-napi');
const { Console } = require('console');
module.exports = CTaosInterface;
......@@ -53,6 +54,18 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0
}
return res;
}
function convertTinyintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUIntLE(currOffset, 1);
res.push(d == FieldTypes.C_TINYINT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
......@@ -64,6 +77,18 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision =
}
return res;
}
function convertSmallintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUIntLE(currOffset, 2);
res.push(d == FieldTypes.C_SMALLINT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
......@@ -75,6 +100,19 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
}
return res;
}
function convertIntUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUInt32LE(currOffset);
res.push(d == FieldTypes.C_INT_UNSIGNED_NULL ? null : d);
currOffset += nbytes;
}
return res;
}
function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
......@@ -86,6 +124,19 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0)
}
return res;
}
function convertBigintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
let currOffset = 0;
while (currOffset < data.length) {
let d = data.readUInt64LE(currOffset);
res.push(d == FieldTypes.C_BIGINT_UNSIGNED_NULL ? null : BigInt(d));
currOffset += nbytes;
}
return res;
}
function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
......@@ -156,7 +207,11 @@ let convertFunctions = {
[FieldTypes.C_DOUBLE]: convertDouble,
[FieldTypes.C_BINARY]: convertBinary,
[FieldTypes.C_TIMESTAMP]: convertTimestamp,
[FieldTypes.C_NCHAR]: convertNchar
[FieldTypes.C_NCHAR]: convertNchar,
[FieldTypes.C_TINYINT_UNSIGNED]: convertTinyintUnsigned,
[FieldTypes.C_SMALLINT_UNSIGNED]: convertSmallintUnsigned,
[FieldTypes.C_INT_UNSIGNED]: convertIntUnsigned,
[FieldTypes.C_BIGINT_UNSIGNED]: convertBigintUnsigned
}
// Define TaosField structure
......@@ -321,6 +376,7 @@ CTaosInterface.prototype.close = function close(connection) {
CTaosInterface.prototype.query = function query(connection, sql) {
return this.libtaos.taos_query(connection, ref.allocCString(sql));
}
CTaosInterface.prototype.affectedRows = function affectedRows(result) {
return this.libtaos.taos_affected_rows(result);
}
......@@ -413,6 +469,7 @@ CTaosInterface.prototype.query_a = function query_a(connection, sql, callback, p
this.libtaos.taos_query_a(connection, ref.allocCString(sql), callback, param);
return param;
}
/** Asynchrnously fetches the next block of rows. Wraps callback and transfers a 4th argument to the cursor, the row data as blocks in javascript form
* Note: This isn't a recursive function, in order to fetch all data either use the TDengine cursor object, TaosQuery object, or implement a recrusive
* function yourself using the libtaos.taos_fetch_rows_a function
......
......@@ -36,13 +36,21 @@ module.exports = {
C_BINARY : 8,
C_TIMESTAMP : 9,
C_NCHAR : 10,
C_TINYINT_UNSIGNED : 11,
C_SMALLINT_UNSIGNED : 12,
C_INT_UNSIGNED : 13,
C_BIGINT_UNSIGNED : 14,
// NULL value definition
// NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL : 2,
C_TINYINT_NULL : -128,
C_TINYINT_UNSIGNED_NULL : 255,
C_SMALLINT_NULL : -32768,
C_SMALLINT_UNSIGNED_NULL : 65535,
C_INT_NULL : -2147483648,
C_BIGINT_NULL : -9223372036854775808,
C_INT_UNSIGNED_NULL : 4294967295,
C_BIGINT_NULL : -9223372036854775808n,
C_BIGINT_UNSIGNED_NULL : 18446744073709551615n,
C_FLOAT_NULL : 2146435072,
C_DOUBLE_NULL : -9223370937343148032,
C_NCHAR_NULL : 4294967295,
......@@ -64,6 +72,10 @@ const typeCodesToName = {
8 : 'Binary',
9 : 'Timestamp',
10 : 'Nchar',
11 : 'TINYINT_UNSIGNED',
12 : 'SMALLINT_UNSIGNED',
13 : 'INT_UNSIGNED',
14 : 'BIGINT_UNSIGNED',
}
/**
......
......@@ -7,7 +7,7 @@
"test": "test"
},
"scripts": {
"test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js"
"test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js && node test/testUnsignedType.js "
},
"repository": {
"type": "git",
......
......@@ -90,7 +90,7 @@ c1.execute("create table if not exists td_connector_test.weather(ts timestamp, t
c1.execute("insert into t1 using weather tags('北京') values(now, 11.11, 11)");
c1.execute("insert into t1(ts, temperature) values(now, 22.22)");
c1.execute("insert into t1(ts, humidity) values(now, 33)");
c1.query('select * from test.t1', true).then(function (result) {
c1.query('select * from td_connector_test.t1', true).then(function (result) {
result.pretty();
});
......
const taos = require('../tdengine');
var conn = taos.connect({ host: "127.0.0.1", user: "root", password: "taosdata", config: "/etc/taos", port: 10 });
var c1 = conn.cursor();
executeUpdate("create database nodedb;");
executeUpdate("use nodedb;");
executeUpdate("create table unsigntest(ts timestamp,ut tinyint unsigned,us smallint unsigned,ui int unsigned,ub bigint unsigned,bi bigint);");
executeUpdate("insert into unsigntest values (now, 254,65534,4294967294,18446744073709551614,9223372036854775807);");
executeUpdate("insert into unsigntest values (now, 0,0,0,0,-9223372036854775807);");
executeQuery("select * from unsigntest;");
executeUpdate("drop database nodedb;");
function executeUpdate(sql) {
console.log(sql);
c1.execute(sql);
}
function executeQuery(sql) {
c1.execute(sql)
var data = c1.fetchall();
// Latest query's Field metadata is stored in cursor.fields
console.log(c1.fields);
// Latest query's result data is stored in cursor.data, also returned by fetchall.
console.log(c1.data);
}
setTimeout(()=>conn.close(),2000);
......@@ -5,14 +5,27 @@
## Install
```sh
git clone --depth 1 https://github.com/taosdata/TDengine.git
pip install ./TDengine/src/connector/python
You can use `pip` to install the connector from PyPI:
```bash
pip install taospy
```
Or with git url:
```bash
pip install git+https://github.com/taosdata/taos-connector-python.git
```
If you have installed TDengine server or client with prebuilt packages, then you can install the connector from path:
```bash
pip install /usr/local/taos/connector/python
```
## Source Code
[TDengine](https://github.com/taosdata/TDengine) connector for Python source code is hosted on [GitHub](https://github.com/taosdata/TDengine/tree/develop/src/connector/python).
[TDengine](https://github.com/taosdata/TDengine) connector for Python source code is hosted on [GitHub](https://github.com/taosdata/taos-connector-python).
## Examples
......
[tool.poetry]
name = "taos"
version = "2.1.1"
name = "taospy"
version = "2.1.2"
description = "TDengine connector for python"
authors = ["Taosdata Inc. <support@taosdata.com>"]
license = "AGPL-3.0"
readme = "README.md"
packages = [
{include = "taos"}
]
[tool.poetry.dependencies]
python = "^2.7 || ^3.4"
......@@ -12,12 +15,12 @@ typing = "*"
[tool.poetry.dev-dependencies]
pytest = [
{ version = "^4.6", python = "^2.7" },
{ version = "^6.2", python = "^3.7" }
{ version = "^4.6", python = ">=2.7,<3.0" },
{ version = "^6.2", python = ">=3.7,<4.0" }
]
pdoc = { version = "^7.1.1", python = "^3.7" }
mypy = { version = "^0.910", python = "^3.6" }
black = { version = "^21.7b0", python = "^3.6" }
black = [{ version = "^21.*", python = ">=3.6.2,<4.0" }]
[build-system]
requires = ["poetry-core>=1.0.0"]
......
......@@ -442,18 +442,14 @@ from .statement import *
from .subscription import *
from .schemaless import *
try:
import importlib.metadata
__version__ = importlib.metadata.version("taos")
except:
None
from taos._version import __version__
# Globals
threadsafety = 0
paramstyle = "pyformat"
__all__ = [
"__version__",
# functions
"connect",
"new_bind_param",
......
......@@ -2,8 +2,9 @@
import ctypes
import platform
import sys
import inspect
from ctypes import *
try:
from typing import Any
except:
......@@ -14,6 +15,7 @@ from .bind import *
from .field import *
from .schemaless import *
_UNSUPPORTED = {}
# stream callback
stream_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p)
......@@ -47,10 +49,13 @@ def _load_taos():
"Darwin": _load_taos_darwin,
"Windows": _load_taos_windows,
}
pf = platform.system()
if load_func[pf] is None:
raise InterfaceError("unsupported platform: %s" % pf)
try:
return load_func[platform.system()]()
except:
raise InterfaceError('unsupported platform or failed to load taos client library')
return load_func[pf]()
except Exception as err:
raise InterfaceError("unable to load taos C library: %s" % err)
_libtaos = _load_taos()
......@@ -65,7 +70,6 @@ _libtaos.taos_consume.restype = ctypes.c_void_p
_libtaos.taos_fetch_lengths.restype = ctypes.POINTER(ctypes.c_int)
_libtaos.taos_free_result.restype = None
_libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
_libtaos.taos_schemaless_insert.restype = ctypes.c_void_p
try:
_libtaos.taos_stmt_errstr.restype = c_char_p
......@@ -181,6 +185,7 @@ def taos_connect(host=None, user="root", password="taosdata", db=None, port=0):
raise ConnectionError("connect to TDengine failed")
return connection
_libtaos.taos_connect_auth.restype = c_void_p
_libtaos.taos_connect_auth.argtypes = c_char_p, c_char_p, c_char_p, c_char_p, c_uint16
......@@ -236,6 +241,7 @@ def taos_connect_auth(host=None, user="root", auth="", db=None, port=0):
raise ConnectionError("connect to TDengine failed")
return connection
_libtaos.taos_query.restype = c_void_p
_libtaos.taos_query.argtypes = c_void_p, c_char_p
......@@ -287,6 +293,7 @@ def taos_affected_rows(result):
"""The affected rows after runing query"""
return _libtaos.taos_affected_rows(result)
subscribe_callback_type = CFUNCTYPE(None, c_void_p, c_void_p, c_void_p, c_int)
_libtaos.taos_subscribe.restype = c_void_p
# _libtaos.taos_subscribe.argtypes = c_void_p, c_int, c_char_p, c_char_p, subscribe_callback_type, c_void_p, c_int
......@@ -317,7 +324,7 @@ def taos_subscribe(connection, restart, topic, sql, interval, callback=None, par
_libtaos.taos_consume.restype = c_void_p
_libtaos.taos_consume.argstype = c_void_p,
_libtaos.taos_consume.argstype = (c_void_p,)
def taos_consume(sub):
......@@ -503,13 +510,17 @@ def taos_stop_query(result):
return _libtaos.taos_stop_query(result)
_libtaos.taos_load_table_info.restype = c_int
_libtaos.taos_load_table_info.argstype = (c_void_p, c_char_p)
try:
_libtaos.taos_load_table_info.restype = c_int
_libtaos.taos_load_table_info.argstype = (c_void_p, c_char_p)
except Exception as err:
_UNSUPPORTED["taos_open_stream"] = err
def taos_load_table_info(connection, tables):
# type: (ctypes.c_void_p, str) -> None
"""Stop current query"""
_check_if_supported()
errno = _libtaos.taos_load_table_info(connection, c_char_p(tables.encode("utf-8")))
if errno != 0:
msg = taos_errstr()
......@@ -562,12 +573,13 @@ def taos_select_db(connection, db):
try:
_libtaos.taos_open_stream.restype = c_void_p
_libtaos.taos_open_stream.argstype = c_void_p, c_char_p, stream_callback_type, c_int64, c_void_p, Any
except:
pass
except Exception as err:
_UNSUPPORTED["taos_open_stream"] = err
def taos_open_stream(connection, sql, callback, stime=0, param=None, callback2=None):
# type: (ctypes.c_void_p, str, stream_callback_type, c_int64, c_void_p, c_void_p) -> ctypes.pointer
_check_if_supported()
if callback2 != None:
callback2 = stream_callback2_type(callback2)
"""Open an stream"""
......@@ -600,6 +612,7 @@ def taos_stmt_init(connection):
"""
return c_void_p(_libtaos.taos_stmt_init(connection))
_libtaos.taos_stmt_prepare.restype = c_int
_libtaos.taos_stmt_prepare.argstype = (c_void_p, c_char_p, c_int)
......@@ -618,6 +631,7 @@ def taos_stmt_prepare(stmt, sql):
_libtaos.taos_stmt_close.restype = c_int
_libtaos.taos_stmt_close.argstype = (c_void_p,)
def taos_stmt_close(stmt):
# type: (ctypes.c_void_p) -> None
"""Close a statement query
......@@ -627,17 +641,12 @@ def taos_stmt_close(stmt):
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try:
_libtaos.taos_stmt_errstr.restype = c_char_p
_libtaos.taos_stmt_errstr.argstype = (c_void_p,)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_errstr" % taos_get_client_info())
try:
_libtaos.taos_stmt_errstr.restype = c_char_p
_libtaos.taos_stmt_errstr.argstype = (c_void_p,)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_errstr" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_stmt_set_tbname"] = err
def taos_stmt_errstr(stmt):
......@@ -645,16 +654,17 @@ def taos_stmt_errstr(stmt):
"""Get error message from stetement query
@stmt: c_void_p TAOS_STMT*
"""
_check_if_supported()
err = c_char_p(_libtaos.taos_stmt_errstr(stmt))
if err:
return err.value.decode("utf-8")
try:
_libtaos.taos_stmt_set_tbname.restype = c_int
_libtaos.taos_stmt_set_tbname.argstype = (c_void_p, c_char_p)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_set_tbname" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_stmt_set_tbname"] = err
def taos_stmt_set_tbname(stmt, name):
......@@ -662,15 +672,17 @@ def taos_stmt_set_tbname(stmt, name):
"""Set table name of a statement query if exists.
@stmt: c_void_p TAOS_STMT*
"""
_check_if_supported()
res = _libtaos.taos_stmt_set_tbname(stmt, c_char_p(name.encode("utf-8")))
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try:
_libtaos.taos_stmt_set_tbname_tags.restype = c_int
_libtaos.taos_stmt_set_tbname_tags.argstype = (c_void_p, c_char_p, c_void_p)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_set_tbname_tags" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_stmt_set_tbname_tags"] = err
def taos_stmt_set_tbname_tags(stmt, name, tags):
......@@ -678,11 +690,13 @@ def taos_stmt_set_tbname_tags(stmt, name, tags):
"""Set table name with tags bind params.
@stmt: c_void_p TAOS_STMT*
"""
_check_if_supported()
res = _libtaos.taos_stmt_set_tbname_tags(stmt, ctypes.c_char_p(name.encode("utf-8")), tags)
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
_libtaos.taos_stmt_is_insert.restype = c_int
_libtaos.taos_stmt_is_insert.argstype = (c_void_p, POINTER(c_int))
......@@ -702,6 +716,7 @@ def taos_stmt_is_insert(stmt):
_libtaos.taos_stmt_num_params.restype = c_int
_libtaos.taos_stmt_num_params.argstype = (c_void_p, POINTER(c_int))
def taos_stmt_num_params(stmt):
# type: (ctypes.c_void_p) -> int
"""Params number of the current statement query.
......@@ -713,6 +728,7 @@ def taos_stmt_num_params(stmt):
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
return num_params.value
_libtaos.taos_stmt_bind_param.restype = c_int
_libtaos.taos_stmt_bind_param.argstype = (c_void_p, c_void_p)
......@@ -729,12 +745,12 @@ def taos_stmt_bind_param(stmt, bind):
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try:
_libtaos.taos_stmt_bind_param_batch.restype = c_int
_libtaos.taos_stmt_bind_param_batch.argstype = (c_void_p, c_void_p)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_bind_param_batch" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_stmt_bind_param_batch"] = err
def taos_stmt_bind_param_batch(stmt, bind):
......@@ -745,15 +761,17 @@ def taos_stmt_bind_param_batch(stmt, bind):
"""
# ptr = ctypes.cast(bind, POINTER(TaosMultiBind))
# ptr = pointer(bind)
_check_if_supported()
res = _libtaos.taos_stmt_bind_param_batch(stmt, bind)
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
try:
_libtaos.taos_stmt_bind_single_param_batch.restype = c_int
_libtaos.taos_stmt_bind_single_param_batch.argstype = (c_void_p, c_void_p, c_int)
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_stmt_bind_single_param_batch" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_stmt_bind_single_param_batch"] = err
def taos_stmt_bind_single_param_batch(stmt, bind, col):
......@@ -763,6 +781,7 @@ def taos_stmt_bind_single_param_batch(stmt, bind, col):
@bind: TAOS_MULTI_BIND*
@col: column index
"""
_check_if_supported()
res = _libtaos.taos_stmt_bind_single_param_batch(stmt, bind, col)
if res != 0:
raise StatementError(msg=taos_stmt_errstr(stmt), errno=res)
......@@ -810,14 +829,17 @@ def taos_stmt_use_result(stmt):
raise StatementError(taos_stmt_errstr(stmt))
return result
try:
_libtaos.taos_schemaless_insert.restype = c_void_p
_libtaos.taos_schemaless_insert.argstype = c_void_p, c_void_p, c_int, c_int, c_int
except AttributeError:
print("WARNING: libtaos(%s) does not support taos_schemaless_insert" % taos_get_client_info())
except Exception as err:
_UNSUPPORTED["taos_schemaless_insert"] = err
def taos_schemaless_insert(connection, lines, protocol, precision):
# type: (c_void_p, list[str] | tuple(str), SmlProtocol, SmlPrecision) -> int
_check_if_supported()
num_of_lines = len(lines)
lines = (c_char_p(line.encode("utf-8")) for line in lines)
lines_type = ctypes.c_char_p * num_of_lines
......@@ -833,6 +855,18 @@ def taos_schemaless_insert(connection, lines, protocol, precision):
taos_free_result(res)
return affected_rows
def _check_if_supported():
func = inspect.stack()[1][3]
if func in _UNSUPPORTED:
raise InterfaceError("C function %s is not supported in v%s: %s" % (func, taos_get_client_info(), _UNSUPPORTED[func]))
def unsupported_methods():
for m, e in range(_UNSUPPORTED):
print("unsupported %s: %s", m, e)
class CTaosInterface(object):
def __init__(self, config=None):
"""
......
......@@ -175,11 +175,13 @@ DLL_EXPORT int taos_select_db(TAOS *taos, const char *db);
DLL_EXPORT int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields);
DLL_EXPORT void taos_stop_query(TAOS_RES *res);
DLL_EXPORT bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col);
DLL_EXPORT bool taos_is_update_query(TAOS_RES *res);
DLL_EXPORT int taos_fetch_block(TAOS_RES *res, TAOS_ROW *rows);
DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
DLL_EXPORT int* taos_fetch_lengths(TAOS_RES *res);
DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
DLL_EXPORT void taos_reset_current_db(TAOS *taos);
// TAOS_RES *taos_list_tables(TAOS *mysql, const char *wild);
// TAOS_RES *taos_list_dbs(TAOS *mysql, const char *wild);
......@@ -192,7 +194,6 @@ DLL_EXPORT int taos_errno(TAOS_RES *tres);
DLL_EXPORT void taos_query_a(TAOS *taos, const char *sql, void (*fp)(void *param, TAOS_RES *, int code), void *param);
DLL_EXPORT void taos_fetch_rows_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, int numOfRows), void *param);
//DLL_EXPORT void taos_fetch_row_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, TAOS_ROW row), void *param);
typedef void (*TAOS_SUBSCRIBE_CALLBACK)(TAOS_SUB* tsub, TAOS_RES *res, void* param, int code);
DLL_EXPORT TAOS_SUB *taos_subscribe(TAOS* taos, int restart, const char* topic, const char *sql, TAOS_SUBSCRIBE_CALLBACK fp, void *param, int interval);
......
......@@ -400,7 +400,7 @@ typedef struct SColIndex {
int16_t colId; // column id
int16_t colIndex; // column index in colList if it is a normal column or index in tagColList if a tag
uint16_t flag; // denote if it is a tag or a normal column
char name[TSDB_COL_NAME_LEN + TSDB_DB_NAME_LEN + 1];
char name[TSDB_COL_NAME_LEN + TSDB_TABLE_NAME_LEN + 1];
} SColIndex;
typedef struct SColumnFilterInfo {
......
......@@ -25,7 +25,7 @@
static char **shellSQLFiles = NULL;
static int32_t shellSQLFileNum = 0;
static char shellTablesSQLFile[TSDB_FILENAME_LEN] = {0};
static char shellTablesSQLFile[4096] = {0};
typedef struct {
pthread_t threadID;
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册