diff --git a/.gitmodules b/.gitmodules
index 4b0b8dcab54c3dcd0bdbd75a4f4a2871ce3218a7..7edcdff5d3dd805ec6b222915688940c7bd7dcb9 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -16,9 +16,9 @@
[submodule "deps/TSZ"]
path = deps/TSZ
url = https://github.com/taosdata/TSZ.git
-[submodule "src/plugins/blm3"]
- path = src/plugins/blm3
- url = https://github.com/taosdata/blm3
[submodule "deps/avro"]
path = deps/avro
url = https://github.com/apache/avro
+[submodule "src/plugins/taosadapter"]
+ path = src/plugins/taosadapter
+ url = https://github.com/taosdata/taosadapter
diff --git a/README.md b/README.md
index c821bdc031fc3125e7afdfd2f8a9c2878e51f505..edca04afd486687ea8653e955ae50da457f77ab9 100644
--- a/README.md
+++ b/README.md
@@ -129,7 +129,7 @@ mkdir debug && cd debug
cmake .. && cmake --build .
```
-Note TDengine 2.3.0.0 and later use a component named 'blm3' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The blm3 is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull blm3 source code. Please install go language 1.14 or above for compiling blm3. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem.
+Note TDengine 2.3.x.0 and later use a component named 'taosadapter' to play http daemon role by default instead of the http daemon embedded in the early version of TDengine. The taosadapter is programmed by go language. If you pull TDengine source code to the latest from an existing codebase, please execute 'git submodule update --init --recursive' to pull taosadapter source code. Please install go language 1.14 or above for compiling taosadapter. If you meet difficulties regarding 'go mod', especially you are from China, you can use a proxy to solve the problem.
```
go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct
diff --git a/cmake/define.inc b/cmake/define.inc
index 8d1d3f306febf481140f270f55ef2cd45de01db9..c14f7f65a5c176e9fb4a366d69ea7927f80c082d 100755
--- a/cmake/define.inc
+++ b/cmake/define.inc
@@ -1,4 +1,4 @@
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (TD_ACCOUNT)
diff --git a/cmake/env.inc b/cmake/env.inc
index 5ee0b2983c0394c3e3aad26a622bdd2e6247c4be..1c594cd4be229cf259d76f9612b35fafde46221c 100755
--- a/cmake/env.inc
+++ b/cmake/env.inc
@@ -1,4 +1,4 @@
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
SET(CMAKE_C_STANDARD 11)
diff --git a/cmake/input.inc b/cmake/input.inc
index d18aa56ce1c684cd54286421c975ddf485129cb5..0812711a5824ce0b328374fcdd04fc5f229ad01c 100755
--- a/cmake/input.inc
+++ b/cmake/input.inc
@@ -1,4 +1,4 @@
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (${ACCOUNT} MATCHES "true")
diff --git a/cmake/platform.inc b/cmake/platform.inc
index a78082a1fc62a8ad66c54dcf005e3e15edf5f5f0..e8bf7b0c2a1ee9cb71d43f93cf05fdf48ad12d3b 100755
--- a/cmake/platform.inc
+++ b/cmake/platform.inc
@@ -1,4 +1,4 @@
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
#
diff --git a/cmake/version.inc b/cmake/version.inc
index 2405f84104ebd7597d2e509034847eb78d31aabc..94ff39f5e655d89b16b57a4b8c8fbe275c82a49a 100755
--- a/cmake/version.inc
+++ b/cmake/version.inc
@@ -1,4 +1,4 @@
-CMAKE_MINIMUM_REQUIRED(VERSION 2.8...3.20)
+CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
IF (DEFINED VERNUMBER)
diff --git a/documentation20/cn/00.index/docs.md b/documentation20/cn/00.index/docs.md
index a16154443c96cfd31cbc7c5d4b49caf3ccbeab9e..70a6b7c5281e1a96f8348ff3a3bb81892b80c93c 100644
--- a/documentation20/cn/00.index/docs.md
+++ b/documentation20/cn/00.index/docs.md
@@ -81,6 +81,7 @@ TDengine是一个高效的存储、查询、分析时序大数据的平台,专
* [Node.js Connector](/connector#nodejs):给node应用提供一个连接TDengine服务器的驱动
* [C# Connector](/connector#csharp):给C#应用提供一个连接TDengine服务器的驱动
* [Windows客户端](https://www.taosdata.com/blog/2019/07/26/514.html):自行编译windows客户端,Windows环境的各种连接器都需要它
+* [Rust Connector](/connector/rust): Rust语言下通过libtaos客户端或RESTful接口,连接TDengine服务器。
## [与其他工具的连接](/connections)
diff --git a/documentation20/cn/05.insert/docs.md b/documentation20/cn/05.insert/docs.md
index 9a0e9b388e639d5e6c6e5094682f07a223c01ada..f503bd84a38a29f8d843205058be19cfdd546da2 100644
--- a/documentation20/cn/05.insert/docs.md
+++ b/documentation20/cn/05.insert/docs.md
@@ -27,13 +27,18 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6,
- 对同一张表,如果新插入记录的时间戳已经存在,默认情形下(UPDATE=0)新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。如果在创建数据库时使用了 UPDATE 1 选项,插入相同时间戳的新记录将覆盖原有记录。
- 写入的数据的时间戳必须大于当前时间减去配置参数keep的时间。如果keep配置为3650天,那么无法写入比3650天还早的数据。写入数据的时间戳也不能大于当前时间加配置参数days。如果days为2,那么无法写入比当前时间还晚2天的数据。
-## Schemaless 写入
+## 无模式(Schemaless)写入
+**前言**
+
在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供调用 Schemaless 写入方式,可以免于预先创建超级表/子表的步骤,随着数据写入写入接口能够自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。
+
目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 Schemaless 方式写入接口 章节。这里对 Schemaless 的数据表达格式进行了描述。
+
无模式写入方式建立的超级表及其对应的子表与通过 SQL 直接建立的超级表和子表完全没有区别,您也可以通过 SQL 语句直接向其中写入数据。需要注意的是,通过无模式写入方式建立的表,其表名是基于标签值按照固定的映射规则生成,所以无法明确地进行表意,缺乏可读性。
-在物联网应用中,常会采集比较多的数据项,用于实现智能控制、业务分析、设备监控等。由于应用逻辑的版本升级,或者设备自身的硬件调整等原因,数据采集项就有可能比较频繁地出现变动。为了在这种情况下方便地完成数据记录工作,TDengine 从 2.2.0.0 版本开始,提供 Schemaless 写入方式,可以免于预先创建超级表/数据子表,而是随着数据写入,自动创建与数据对应的存储结构。并且在必要时,Schemaless 将自动增加必要的数据列,保证用户写入的数据可以被正确存储。目前,TDengine 的 C/C++ Connector 提供支持 Schemaless 的操作接口,详情请参见 [Schemaless 方式写入接口](https://www.taosdata.com/cn/documentation/connector#schemaless) 章节。这里对 Schemaless 的数据表达格式进行描述。
+**无模式写入行协议**
+
TDengine 的无模式写入的行协议兼容 InfluxDB 的 行协议(Line Protocol)、OpenTSDB 的 telnet 行协议、OpenTSDB 的 Json 格式协议。但是使用这三种协议的时候,需要在 API 中指定输入内容使用解析协议的标准。
-### Schemaless 数据行协议
+对于InfluxDB、OpenTSDB的标准写入协议请参考各自的文档。下面首先以 InfluxDB 的行协议为基础,介绍 TDengine 扩展的协议内容,允许用户采用更加精细的方式控制(超级表)模式。
-Schemaless 采用一个字符串来表达最终存储的一个数据行(可以向 Schemaless 写入 API 中一次传入多个字符串来实现多个数据行的批量写入),其格式约定如下:
+Schemaless 采用一个字符串来表达一个数据行(可以向写入 API 中一次传入多行字符串来实现多个数据行的批量写入),其格式约定如下:
```json
measurement,tag_set field_set timestamp
```
@@ -44,70 +49,122 @@ measurement,tag_set field_set timestamp
* field_set 将作为普通列数据,其格式形如 `=,=`,同样是使用英文逗号来分隔多个普通列的数据。它与 timestamp 之间使用一个半角空格来分隔。
* timestamp 即本行数据对应的主键时间戳。
-在 Schemaless 的数据行协议中,tag_set、field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说:
+tag_set 中的所有的数据自动转化为 nchar 数据类型,并不需要使用双引号(")。
+
在无模式写入数据行协议中,field_set 中的每个数据项都需要对自身的数据类型进行描述。具体来说:
* 如果两边有英文双引号,表示 BIANRY(32) 类型。例如 `"abc"`。
* 如果两边有英文双引号而且带有 L 前缀,表示 NCHAR(32) 类型。例如 `L"报错信息"`。
* 对空格、等号(=)、逗号(,)、双引号("),前面需要使用反斜杠(\)进行转义。(都指的是英文半角符号)
* 数值类型将通过后缀来区分数据类型:
- - 没有后缀,为 FLOAT 类型;
- - 后缀为 f32,为 FLOAT 类型;
- - 后缀为 f64,为 DOUBLE 类型;
- - 后缀为 i8,表示为 TINYINT (INT8) 类型;
- - 后缀为 i16,表示为 SMALLINT (INT16) 类型;
- - 后缀为 i32,表示为 INT (INT32) 类型;
- - 后缀为 i64,表示为 BIGINT (INT64) 类型;
+
+| **序号** | **后缀** | **映射类型** | **大小(字节)** |
+| ---- | ------------------- | ------------ | -------- |
+| 1 | 无或f64 | double | 8 |
+| 2 | f32 | float | 4 |
+| 3 | i8 | TinyInt | 1 |
+| 4 | i16 | SmallInt | 2 |
+| 5 | i32 | Int | 4 |
+| 6 | i64或i | Bigint | 8 |
* t, T, true, True, TRUE, f, F, false, False 将直接作为 BOOL 型来处理。
+
例如如下数据行表示:向名为 st 的超级表下的 t1 标签为 "3"(NCHAR)、t2 标签为 "4"(NCHAR)、t3 标签为 "t3"(NCHAR)的数据子表,写入 c1 列为 3(BIGINT)、c2 列为 false(BOOL)、c3 列为 "passit"(BINARY)、c4 列为 4(DOUBLE)、主键时间戳为 1626006833639000000 的一行数据。
+```json
+st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4f64 1626006833639000000
+```
+需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。
-timestamp 位置的时间戳通过后缀来声明时间精度,具体如下:
-* 不带任何后缀的长整数会被当作微秒来处理;
-* 当后缀为 s 时,表示秒时间戳;
-* 当后缀为 ms 时,表示毫秒时间戳;
-* 当后缀为 us 时,表示微秒时间戳;
-* 当后缀为 ns 时,表示纳秒时间戳;
-* 当时间戳为 0 时,表示采用客户端的当前时间(因此,同一批提交的数据中,时间戳 0 会被解释为同一个时间点,于是就有可能导致时间戳重复)。
+### 无模式写入的主要处理逻辑
-例如,如下 Schemaless 数据行表示:向名为 st 的超级表下的 t1 标签为 3(BIGINT 类型)、t2 标签为 4(DOUBLE 类型)、t3 标签为 "t3"(BINARY 类型)的数据子表,写入 c1 列为 3(BIGINT 类型)、c2 列为 false(BOOL 类型)、c3 列为 "passit"(NCHAR 类型)、c4 列为 4(DOUBLE 类型)、主键时间戳为 1626006833639000000(纳秒精度)的一行数据。
+无模式写入按照如下原则来处理行数据:
+1. 当 tag_set 中有 ID 字段时,该字段的值将作为子表的表名。
+2. 没有 ID 字段时,将使用如下规则来生成子表名:
+首先将measurement 的名称和标签的 key 和 value 组合成为如下的字符串
+```json
+"measurement,tag_key1=tag_value1,tag_key2=tag_value2"
+```
+需要注意的是,这里的tag_key1, tag_key2并不是用户输入的标签的原始顺序,而是使用了标签名称按照字符串升序排列后的结果。所以,tag_key1 并不是在行协议中输入的第一个标签。
+排列完成以后计算该字符串的 MD5 散列值 "md5_val"。然后将计算的结果与字符串组合生成表名:“t_md5_val”。其中的 “t_” 是固定的前缀,每个通过该映射关系自动生成的表都具有该前缀。
+
3. 如果解析行协议获得的超级表不存在,则会创建这个超级表。
+
4. 如果解析行协议获得子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
+
5. 如果数据行中指定的标签列或普通列不存在,则在超级表中增加对应的标签列或普通列(只增不减)。
+
6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
+
7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,自动增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
+
8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
+
9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
+
+**备注:**
+
无模式所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。
+
+**时间分辨率识别**
+
无模式写入过程中支持三个指定的模式,具体如下
+
+| **序号** | **值** | **说明** |
+| ---- | ------------------- | ------------ |
+| 1 | SML_LINE_PROTOCOL | InfluxDB行协议(Line Protocol) |
+| 2 | SML_TELNET_PROTOCOL | OpenTSDB文本行协议 |
+| 3 | SML_JSON_PROTOCOL | Json协议格式 |
+
+在 SML_LINE_PROTOCOL 解析模式下,需要用户指定输入的时间戳的时间分辨率。可用的时间分辨率如下表所示:
+| **序号** | **时间分辨率定义** | **含义** |
+| ---- | ----------------------------- | --------- |
+| 1 | TSDB_SML_TIMESTAMP_NOT_CONFIGURED | 未定义(无效) |
+| 2 | TSDB_SML_TIMESTAMP_HOURS | 小时 |
+| 3 | TSDB_SML_TIMESTAMP_MINUTES | 分钟 |
+| 4 | TSDB_SML_TIMESTAMP_SECONDS | 秒 |
+| 5 | TSDB_SML_TIMESTAMP_MILLI_SECONDS | 毫秒 |
+| 6 | TSDB_SML_TIMESTAMP_MICRO_SECONDS | 微秒 |
+| 7 | TSDB_SML_TIMESTAMP_NANO_SECONDS | 纳秒 |
+
+在 SML_TELNET_PROTOCOL 和 SML_JSON_PROTOCOL 模式下,根据时间戳的长度来确定时间精度(与 OpenTSDB 标准操作方式相同),此时会忽略用户指定的时间分辨率。
+
+**数据模式变更处理**
+
本节将说明不同行数据写入情况下,对于数据模式的影响。
+
+在使用行协议写入一个明确的标识的字段类型的时候,后续更改该字段的类型定义,会出现明确的数据模式错误,即会触发写入 API 报告错误。如下所示,
```json
-st,t1=3i64,t2=4f64,t3="t3" c1=3i64,c3=L"passit",c2=false,c4=4f64 1626006833639000000ns
+st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4 1626006833639000000
+st,t1=3,t2=4,t3=t3 c1=3i64,c3="passit",c2=false,c4=4i 1626006833640000000
```
+第一行的数据类型映射将 c4 列定义为 Double, 但是第二行的数据又通过数值后缀方式声明该列为 BigInt, 由此会触发无模式写入的解析错误。
-需要注意的是,如果描述数据类型后缀时使用了错误的大小写,或者为数据指定的数据类型有误,均可能引发报错提示而导致数据写入失败。
+如果列前面的行协议将数据列声明为了 binary, 后续的要求长度更长的binary长度,此时会触发超级表模式的变更。
+```json
+st,t1=3,t2=4,t3=t3 c1=3i64,c5="pass" 1626006833639000000
+st,t1=3,t2=4,t3=t3 c1=3i64,c5="passit" 1626006833640000000
+```
+第一行中行协议解析会声明 c5 列是一个 binary(4)的字段,第二次行数据写入会提取列 c5 仍然是 binary 列,但是其宽度为 6,此时需要将binary的宽度增加到能够容纳 新字符串的宽度。
+```json
+st,t1=3,t2=4,t3=t3 c1=3i64 1626006833639000000
+st,t1=3,t2=4,t3=t3 c1=3i64,c6="passit" 1626006833640000000
+```
+第二行数据相对于第一行来说增加了一个列 c6,类型为binary(6)。那么此时会自动增加一个列 c6, 类型为 binary(6)。
-### Schemaless 的处理逻辑
+**写入完整性**
+
TDengine 提供数据写入的幂等性保证,即您可以反复调用 API 进行出错数据的写入操作。但是不提供多行数据写入的原子性保证。即在多行数据一批次写入过程中,会出现部分数据写入成功,部分数据写入失败的情况。
-Schemaless 按照如下原则来处理行数据:
-1. 当 tag_set 中有 ID 字段时,该字段的值将作为数据子表的表名。
-2. 没有 ID 字段时,将使用 `measurement + tag_value1 + tag_value2 + ...` 的 md5 值来作为子表名。
-3. 如果指定的超级表名不存在,则 Schemaless 会创建这个超级表。
-4. 如果指定的数据子表不存在,则 Schemaless 会按照步骤 1 或 2 确定的子表名来创建子表。
-5. 如果数据行中指定的标签列或普通列不存在,则 Schemaless 会在超级表中增加对应的标签列或普通列(只增不减)。
-6. 如果超级表中存在一些标签列或普通列未在一个数据行中被指定取值,那么这些列的值在这一行中会被置为 NULL。
-7. 对 BINARY 或 NCHAR 列,如果数据行中所提供值的长度超出了列类型的限制,那么 Schemaless 会增加该列允许存储的字符长度上限(只增不减),以保证数据的完整保存。
-8. 如果指定的数据子表已经存在,而且本次指定的标签列取值跟已保存的值不一样,那么最新的数据行中的值会覆盖旧的标签列取值。
-9. 整个处理过程中遇到的错误会中断写入过程,并返回错误代码。
+**错误码**
+
如果是无模式写入过程中的数据本身错误,应用会得到 TSDB_CODE_TSC_LINE_SYNTAX_ERROR 错误信息,该错误信息表明错误发生在写入文本中。其他的错误码与原系统一致,可以通过 taos_errstr 获取具体的错误原因。
-**注意:**Schemaless 所有的处理逻辑,仍会遵循 TDengine 对数据结构的底层限制,例如每行数据的总长度不能超过 16k 字节。这方面的具体限制约束请参见 [TAOS SQL 边界限制](https://www.taosdata.com/cn/documentation/taos-sql#limitation) 章节。
+**后续升级计划**
+
当前版本只提供了 C 版本的 API,后续将提供 其他高级语言的 API,例如 Java/Go/Python/C# 等。此外,在TDengine v2.3及后续版本中,您还可以通过 BLM v3 采用 REST 的方式直接写入无模式数据。
-关于 Schemaless 的字符串编码处理、时区设置等,均会沿用 TAOSC 客户端的设置。
## Prometheus 直接写入
[Prometheus](https://www.prometheus.io/)作为Cloud Native Computing Fundation毕业的项目,在性能监控以及K8S性能监控领域有着非常广泛的应用。TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需对Prometheus做简单配置,无需任何代码,就可将Prometheus采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用Bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
-### 从源代码编译 blm_prometheus
+### 从源代码编译 taosadapter_prometheus
用户需要从github下载[Bailongma](https://github.com/taosdata/Bailongma)的源码,使用Golang语言编译器编译生成可执行文件。在开始编译前,需要准备好以下条件:
- Linux操作系统的服务器
- 安装好Golang,1.14版本以上
- 对应的TDengine版本。因为用到了TDengine的客户端动态链接库,因此需要安装好和服务端相同版本的TDengine程序;比如服务端版本是TDengine 2.0.0, 则在Bailongma所在的Linux服务器(可以与TDengine在同一台服务器,或者不同服务器)
-Bailongma项目中有一个文件夹blm_prometheus,存放了prometheus的写入API程序。编译过程如下:
+Bailongma项目中有一个文件夹taosadapter_prometheus,存放了prometheus的写入API程序。编译过程如下:
```bash
-cd blm_prometheus
+cd taosadapter_prometheus
go build
```
-一切正常的情况下,就会在对应的目录下生成一个blm_prometheus的可执行程序。
+一切正常的情况下,就会在对应的目录下生成一个taosadapter_prometheus的可执行程序。
### 安装 Prometheus
@@ -118,23 +175,23 @@ go build
参考Prometheus的[配置文档](https://prometheus.io/docs/prometheus/latest/configuration/configuration/),在Prometheus的配置文件中的部分,增加以下配置:
```
- - url: "bailongma API服务提供的URL"(参考下面的blm_prometheus启动示例章节)
+ - url: "bailongma API服务提供的URL"(参考下面的taosadapter_prometheus启动示例章节)
```
启动Prometheus后,可以通过taos客户端查询确认数据是否成功写入。
-### 启动 blm_prometheus 程序
+### 启动 taosadapter_prometheus 程序
-blm_prometheus程序有以下选项,在启动blm_prometheus程序时可以通过设定这些选项来设定blm_prometheus的配置。
+taosadapter_prometheus程序有以下选项,在启动taosadapter_prometheus程序时可以通过设定这些选项来设定taosadapter_prometheus的配置。
```bash
--tdengine-name
如果TDengine安装在一台具备域名的服务器上,也可以通过配置TDengine的域名来访问TDengine。在K8S环境下,可以配置成TDengine所运行的service name。
--batch-size
-blm_prometheus会将收到的prometheus的数据拼装成TDengine的写入请求,这个参数控制一次发给TDengine的写入请求中携带的数据条数。
+taosadapter_prometheus会将收到的prometheus的数据拼装成TDengine的写入请求,这个参数控制一次发给TDengine的写入请求中携带的数据条数。
--dbname
-设置在TDengine中创建的数据库名称,blm_prometheus会自动在TDengine中创建一个以dbname为名称的数据库,缺省值是prometheus。
+设置在TDengine中创建的数据库名称,taosadapter_prometheus会自动在TDengine中创建一个以dbname为名称的数据库,缺省值是prometheus。
--dbuser
设置访问TDengine的用户名,缺省值是'root'。
@@ -143,16 +200,16 @@ blm_prometheus会将收到的prometheus的数据拼装成TDengine的写入请求
设置访问TDengine的密码,缺省值是'taosdata'。
--port
-blm_prometheus对prometheus提供服务的端口号。
+taosadapter_prometheus对prometheus提供服务的端口号。
```
### 启动示例
-通过以下命令启动一个blm_prometheus的API服务
+通过以下命令启动一个taosadapter_prometheus的API服务
```bash
-./blm_prometheus -port 8088
+./taosadapter_prometheus -port 8088
```
-假设blm_prometheus所在服务器的IP地址为"10.1.2.3",则在prometheus的配置文件中部分增加url为
+假设taosadapter_prometheus所在服务器的IP地址为"10.1.2.3",则在prometheus的配置文件中部分增加url为
```yaml
remote_write:
- url: "http://10.1.2.3:8088/receive"
@@ -177,16 +234,16 @@ prometheus产生的数据格式如下:
}
}
```
-其中,apiserver_request_latencies_bucket为prometheus采集的时序数据的名称,后面{}中的为该时序数据的标签。blm_prometheus会以时序数据的名称在TDengine中自动创建一个超级表,并将{}中的标签转换成TDengine的tag值,Timestamp作为时间戳,value作为该时序数据的值。因此在TDengine的客户端中,可以通过以下指令查到这个数据是否成功写入。
+其中,apiserver_request_latencies_bucket为prometheus采集的时序数据的名称,后面{}中的为该时序数据的标签。taosadapter_prometheus会以时序数据的名称在TDengine中自动创建一个超级表,并将{}中的标签转换成TDengine的tag值,Timestamp作为时间戳,value作为该时序数据的值。因此在TDengine的客户端中,可以通过以下指令查到这个数据是否成功写入。
```mysql
use prometheus;
select * from apiserver_request_latencies_bucket;
```
-## Telegraf 直接写入(通过 BLM v3)
+## Telegraf 直接写入(通过 taosadapter)
安装 Telegraf 请参考[官方文档](https://portal.influxdata.com/downloads/)。
-TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。
+TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 Telegraf 的多种应用的数据写入。
配置方法,在 /etc/telegraf/telegraf.conf 增加如下文字,其中 database name 请填写希望在 TDengine 保存 Telegraf 数据的数据库名,TDengine server/cluster host、username和 password 填写 TDengine 实际值:
```
@@ -206,14 +263,14 @@ sudo systemctl start telegraf
```
即可在 TDengine 中查询 metrics 数据库中 Telegraf 写入的数据。
-BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
+taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
-## collectd 直接写入(通过 BLM v3)
+## collectd 直接写入(通过 taosadapter)
安装 collectd,请参考[官方文档](https://collectd.org/download.shtml)。
-TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 collectd 的多种应用的数据写入。
+TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 collectd 的多种应用的数据写入。
-在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
+在 /etc/collectd/collectd.conf 文件中增加如下内容,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
```
LoadPlugin network
@@ -224,15 +281,15 @@ LoadPlugin network
```
sudo systemctl start collectd
```
-BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
+taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
-## StatsD 直接写入(通过 BLM v3)
+## StatsD 直接写入(通过 taosadapter)
安装 StatsD
请参考[官方文档](https://github.com/statsd/statsd)。
-TDengine 新版本(2.3.0.0+)包含一个 BLM3 独立程序,负责接收包括 StatsD 的多种应用的数据写入。
+TDengine 新版本(2.3.0.0+)包含一个 taosadapter 独立程序,负责接收包括 StatsD 的多种应用的数据写入。
-在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 BLM3 配置的实际值:
+在 config.js 文件中增加如下内容后启动 StatsD,其中 host 和 port 请填写 TDengine 和 taosadapter 配置的实际值:
```
backends 部分添加 "./backends/repeater"
repeater 部分添加 { host:'', port: }
@@ -247,16 +304,16 @@ port: 8125
}
```
-BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
+taosadapter 相关配置参数请参考 taosadapter --help 命令输出以及相关文档。
-## 使用 Bailongma 2.0 接入 Telegraf 数据写入
+## 使用 Bailongma 2.0 接入 Telegraf 数据写入
-*注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 BLM v3,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。
+*注意:TDengine 新版本(2.3.0.0+)提供新版本 Bailongma ,命名为 taosadapter ,提供更简便的 Telegraf 数据写入以及其他更强大的功能,Bailongma v2 即之前版本将逐步不再维护。
[Telegraf](https://www.influxdata.com/time-series-platform/telegraf/)是一流行的IT运维数据采集开源工具,TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Telegraf做简单配置,无需任何代码,就可将Telegraf采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
-### 从源代码编译 blm_telegraf
+### 从源代码编译 taosadapter_telegraf
用户需要从github下载[Bailongma](https://github.com/taosdata/Bailongma)的源码,使用Golang语言编译器编译生成可执行文件。在开始编译前,需要准备好以下条件:
@@ -264,14 +321,14 @@ BLM v3 相关配置参数请参考 blm3 --help 命令输出以及相关文档。
- 安装好Golang,1.10版本以上
- 对应的TDengine版本。因为用到了TDengine的客户端动态链接库,因此需要安装好和服务端相同版本的TDengine程序;比如服务端版本是TDengine 2.0.0, 则在Bailongma所在的Linux服务器(可以与TDengine在同一台服务器,或者不同服务器)
-Bailongma项目中有一个文件夹blm_telegraf,存放了Telegraf的写入API程序。编译过程如下:
+Bailongma项目中有一个文件夹taosadapter_telegraf,存放了Telegraf的写入API程序。编译过程如下:
```bash
-cd blm_telegraf
+cd taosadapter_telegraf
go build
```
-一切正常的情况下,就会在对应的目录下生成一个blm_telegraf的可执行程序。
+一切正常的情况下,就会在对应的目录下生成一个taosadapter_telegraf的可执行程序。
### 安装 Telegraf
@@ -294,19 +351,19 @@ go build
关于如何使用Telegraf采集数据以及更多有关使用Telegraf的信息,请参考Telegraf官方的[文档](https://docs.influxdata.com/telegraf/v1.11/)。
-### 启动 blm_telegraf 程序
+### 启动 taosadapter_telegraf 程序
-blm_telegraf程序有以下选项,在启动blm_telegraf程序时可以通过设定这些选项来设定blm_telegraf的配置。
+taosadapter_telegraf程序有以下选项,在启动taosadapter_telegraf程序时可以通过设定这些选项来设定taosadapter_telegraf的配置。
```bash
--host
TDengine服务端的IP地址,缺省值为空。
--batch-size
-blm_telegraf会将收到的telegraf的数据拼装成TDengine的写入请求,这个参数控制一次发给TDengine的写入请求中携带的数据条数。
+taosadapter_telegraf会将收到的telegraf的数据拼装成TDengine的写入请求,这个参数控制一次发给TDengine的写入请求中携带的数据条数。
--dbname
-设置在TDengine中创建的数据库名称,blm_telegraf会自动在TDengine中创建一个以dbname为名称的数据库,缺省值是prometheus。
+设置在TDengine中创建的数据库名称,taosadapter_telegraf会自动在TDengine中创建一个以dbname为名称的数据库,缺省值是prometheus。
--dbuser
设置访问TDengine的用户名,缺省值是'root'。
@@ -315,17 +372,17 @@ blm_telegraf会将收到的telegraf的数据拼装成TDengine的写入请求,
设置访问TDengine的密码,缺省值是'taosdata'。
--port
-blm_telegraf对telegraf提供服务的端口号。
+taosadapter_telegraf对telegraf提供服务的端口号。
```
### 启动示例
-通过以下命令启动一个blm_telegraf的API服务:
+通过以下命令启动一个taosadapter_telegraf的API服务:
```bash
-./blm_telegraf -host 127.0.0.1 -port 8089
+./taosadapter_telegraf -host 127.0.0.1 -port 8089
```
-假设blm_telegraf所在服务器的IP地址为"10.1.2.3",则在telegraf的配置文件中, 在output plugins部分,增加[[outputs.http]]配置项:
+假设taosadapter_telegraf所在服务器的IP地址为"10.1.2.3",则在telegraf的配置文件中, 在output plugins部分,增加[[outputs.http]]配置项:
```yaml
url = "http://10.1.2.3:8089/telegraf"
@@ -358,7 +415,7 @@ telegraf产生的数据格式如下:
}
```
-其中,name字段为telegraf采集的时序数据的名称,tags字段为该时序数据的标签。blm_telegraf会以时序数据的名称在TDengine中自动创建一个超级表,并将tags字段中的标签转换成TDengine的tag值,timestamp作为时间戳,fields字段中的值作为该时序数据的值。因此在TDengine的客户端中,可以通过以下指令查到这个数据是否成功写入。
+其中,name字段为telegraf采集的时序数据的名称,tags字段为该时序数据的标签。taosadapter_telegraf会以时序数据的名称在TDengine中自动创建一个超级表,并将tags字段中的标签转换成TDengine的tag值,timestamp作为时间戳,fields字段中的值作为该时序数据的值。因此在TDengine的客户端中,可以通过以下指令查到这个数据是否成功写入。
```mysql
use telegraf;
diff --git a/documentation20/cn/08.connector/02.rust/docs.md b/documentation20/cn/08.connector/02.rust/docs.md
new file mode 100644
index 0000000000000000000000000000000000000000..01d4087e3acf2eed2dbea207d6d48ff360b5aece
--- /dev/null
+++ b/documentation20/cn/08.connector/02.rust/docs.md
@@ -0,0 +1,110 @@
+# Rust 连接器
+
+ 
+
+> Rust 连接器仍然在快速开发中,版本API变动在所难免,在1.0 之前无法保证其向后兼容,请使用时注意版本及对应的文档。
+
+感谢 [@songtianyi](https://github.com/songtianyi) 对 [libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) 的贡献,使Rust社区能够使用Rust 连接[TDengine]. [libtaos-rs] 项目旨在为Rust开发者提供官方支持,使用taosc接口及HTTP接口构建兼容API以便于用户切换接口方式。
+
+## 依赖
+
+- [Rust](https://www.rust-lang.org/learn/get-started)
+
+默认情况下,[libtaos-rs] 使用 C 接口连接数据库,所以您需要:
+
+- [TDengine] [客户端](https://www.taosdata.com/cn/getting-started/#%E9%80%9A%E8%BF%87%E5%AE%89%E8%A3%85%E5%8C%85%E5%AE%89%E8%A3%85)
+- `clang`: `bindgen` 使用 `libclangAST` 来生成对应的Rust绑定。
+
+## 特性列表
+
+- [x] C 接口的Rust绑定
+- [x] 使用 `rest` feature 来启用 RESTful API.
+- [x] [r2d2] 连接池支持(feature `r2d2`)
+- [ ] 迭代器接口
+- [ ] 流式计算接口
+- [ ] 订阅支持
+
+## 构建和测试
+
+```sh
+cargo build
+cargo test
+```
+
+测试使用默认用户名密码和本地连接。您可以根据具体情况设置环境变量:
+
+- `TEST_TAOS_IP`
+- `TEST_TAOS_PORT`
+- `TEST_TAOS_USER`
+- `TEST_TAOS_PASS`
+- `TEST_TAOS_DB`
+
+## 使用
+
+使用默认的taosc 连接方式,可以在 `Cargo.toml` 中直接添加 `libtaos` 依赖:
+
+```toml
+[dependencies]
+libtaos = "v0.3.8"
+```
+
+添加 feature `r2d2` 来启动连接池:
+
+```toml
+[dependencies]
+libtaos = { version = "*", features = ["r2d2"] }
+```
+
+对于RESTful接口,可使用 `rest` 特性来替代taosc,免去安装TDengine客户端。
+
+```toml
+[dependencies]
+libtaos = { version = "*", features = ["rest"] }
+```
+
+本项目中提供一个 [示例程序]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) 如下:
+
+```rust
+// ...
+#[tokio::main]
+async fn main() -> Result<(), Error> {
+ init();
+ let taos = taos_connect()?;
+
+ assert_eq!(
+ taos.query("drop database if exists demo").await.is_ok(),
+ true
+ );
+ assert_eq!(taos.query("create database demo").await.is_ok(), true);
+ assert_eq!(taos.query("use demo").await.is_ok(), true);
+ assert_eq!(
+ taos.query("create table m1 (ts timestamp, speed int)")
+ .await
+ .is_ok(),
+ true
+ );
+
+ for i in 0..10i32 {
+ assert_eq!(
+ taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
+ .await
+ .is_ok(),
+ true
+ );
+ }
+ let rows = taos.query("select * from m1").await?;
+
+ println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
+ for row in rows.rows {
+ println!("{}", row.into_iter().join(","));
+ }
+ Ok(())
+}
+```
+
+您可以在 [bailongma-rs] - 一个 Rust 编写的 Prometheus 远程存储 API 适配器 - 看到如何在具体应用中使用 Rust 连接器。
+
+[libtaos-rs]: https://github.com/taosdata/libtaos-rs
+[TDengine]: https://github.com/taosdata/TDengine
+[bailongma-rs]: https://github.com/taosdata/bailongma-rs
+[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
diff --git a/documentation20/cn/12.taos-sql/docs.md b/documentation20/cn/12.taos-sql/docs.md
index 64020208abe45d589058414fb123d1616c67f2c7..4ba496d575e0f680c2dbd2820d3dfc062c56cb1c 100644
--- a/documentation20/cn/12.taos-sql/docs.md
+++ b/documentation20/cn/12.taos-sql/docs.md
@@ -729,17 +729,17 @@ Query OK, 1 row(s) in set (0.001091s)
| **Operation** | **Note** | **Applicable Data Types** |
| ------------- | ------------------------ | ----------------------------------------- |
-| > | larger than | **`timestamp`** and all numeric types |
-| < | smaller than | **`timestamp`** and all numeric types |
-| >= | larger than or equal to | **`timestamp`** and all numeric types |
-| <= | smaller than or equal to | **`timestamp`** and all numeric types |
+| > | larger than | all types except bool |
+| < | smaller than | all types except bool |
+| >= | larger than or equal to | all types except bool |
+| <= | smaller than or equal to | all types except bool |
| = | equal to | all types |
| <> | not equal to | all types |
| is [not] null | is null or is not null | all types |
-| between and | within a certain range | **`timestamp`** and all numeric types |
+| between and | within a certain range | all types except bool |
| in | match any value in a set | all types except first column `timestamp` |
| like | match a wildcard string | **`binary`** **`nchar`** |
-| match/nmatch | filter regex | **regex** |
+| match/nmatch | filter regex | **`binary`** **`nchar`** |
1. <> 算子也可以写为 != ,请注意,这个算子不能用于数据表第一列的 timestamp 字段。
2. like 算子使用通配符字符串进行匹配检查。
@@ -766,15 +766,10 @@ Query OK, 1 row(s) in set (0.001091s)
**使用限制**
- 只能针对表名(即 tbname 筛选)和标签的名称和binary类型标签值 进行正则表达式过滤,不支持针对普通列使用正则表达式过滤。
-
- 只能在 WHERE 子句中作为过滤条件存在。
+ 只能针对表名(即 tbname 筛选)、binary/nchar类型标签值进行正则表达式过滤,不支持普通列的过滤。
正则匹配字符串长度不能超过 128 字节。可以通过参数 *maxRegexStringLen* 设置和调整最大允许的正则匹配字符串,该参数是客户端配置参数,需要重启才能生效。
- **嵌套查询支持**
-
- 可以在内层查询和外层查询中使用。
### JOIN 子句
diff --git a/documentation20/cn/13.faq/docs.md b/documentation20/cn/13.faq/docs.md
index 7483c972eebe26d0b010724ea699cd94906f382c..eb5f20e708bb4bb592a1ab2d535fcf261457b989 100644
--- a/documentation20/cn/13.faq/docs.md
+++ b/documentation20/cn/13.faq/docs.md
@@ -185,23 +185,23 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端
| TCP | 6041 | 客户端与服务端之间的 RESTful 通讯。 | 随 serverPort 端口变化。 |
| TCP | 6042 | Arbitrator 的服务端口。 | 随 Arbitrator 启动参数设置变化。 |
| TCP | 6043 | TaosKeeper 监控服务端口。 | 随 TaosKeeper 启动参数设置变化。 |
-| TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 |
-| TCP | 6045 | 支持 collectd 数据接入端口。 | 随 BLM3 启动参数设置变化(2.3.0.1+以上版本)。 |
+| TCP | 6044 | 支持 StatsD 的数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
+| TCP | 6045 | 支持 collectd 数据接入端口。 | 随 taosadapter 启动参数设置变化(2.3.0.1+以上版本)。 |
| TCP | 6060 | 企业版内 Monitor 服务的网络端口。 | |
| UDP | 6030-6034 | 客户端与服务端之间通讯。 | 随 serverPort 端口变化。 |
| UDP | 6035-6039 | 多节点集群的节点间通讯。 | 随 serverPort 端口变化。 |
## 20. go 语言编写组件编译失败怎样解决?
-新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 BLM3 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
-使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 blm3 仓库代码后再编译。
+新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 taosadapter 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
+使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 taosadapter 仓库代码后再编译。
-目前编译方式默认自动编译 blm3。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决:
+目前编译方式默认自动编译 taosadapter。go 语言版本要求 1.14 以上,如果发生 go 编译错误,往往是国内访问 go mod 问题,可以通过设置 go 环境变量来解决:
```sh
go env -w GO111MODULE=on
go env -w GOPROXY=https://goproxy.cn,direct
```
-如果希望继续使用之前的内置 httpd,可以关闭 blm3 编译,使用
+如果希望继续使用之前的内置 httpd,可以关闭 taosadapter 编译,使用
`cmake .. -DBUILD_HTTP=true` 使用原来内置的 httpd。
diff --git a/documentation20/en/00.index/docs.md b/documentation20/en/00.index/docs.md
index 258b2f718feb87a2fa8d92b17a403919ac2e8f56..0cb6612700a9211ee30fc51fed8a3b3fa77f3342 100644
--- a/documentation20/en/00.index/docs.md
+++ b/documentation20/en/00.index/docs.md
@@ -77,6 +77,7 @@ TDengine is a highly efficient platform to store, query, and analyze time-series
- [Node.js Connector](/connector#nodejs): driver for connecting to TDengine server from Node.js applications
- [C# Connector](/connector#csharp): driver for connecting to TDengine server from C# applications
- [Windows Client](https://www.taosdata.com/blog/2019/07/26/514.html): compile your own Windows client, which is required by various connectors on the Windows environment
+- [Rust Connector](/connector/rust): A taosc/RESTful API based TDengine client for Rust
## [Connections with Other Tools](/connections)
diff --git a/documentation20/en/08.connector/02.rust/docs.md b/documentation20/en/08.connector/02.rust/docs.md
new file mode 100644
index 0000000000000000000000000000000000000000..235a7d074955024989f60e6e689aead276f184f2
--- /dev/null
+++ b/documentation20/en/08.connector/02.rust/docs.md
@@ -0,0 +1,114 @@
+# Rust Connector
+
+ 
+
+> Note that the rust connector is under active development and the APIs will changes a lot between versions. But we promise to ensure backward compatibility after version 1.0 .
+
+Thanks [@songtianyi](https://github.com/songtianyi) for [libtdengine](https://github.com/songtianyi/tdengine-rust-bindings) - a rust bindings project for [TDengine]. It's an new design for [TDengine] rust client based on C interface or the REST API. It'll will provide Rust-like APIs and all rust things (like async/stream/iterators and others).
+
+## Dependencies
+
+- [Rust](https://www.rust-lang.org/learn/get-started) of course.
+
+if you use the default features, it'll depend on:
+
+- [TDengine] Client library and headers.
+- clang because bindgen will requires the clang AST library.
+
+## Fetures
+
+In-design features:
+
+- [x] API for both C interface
+- [x] REST API support by feature `rest`.
+- [x] [r2d2] Pool support by feature `r2d2`
+- [ ] Iterators for fields fetching
+- [ ] Stream support
+- [ ] Subscribe support
+
+## Build and test
+
+```sh
+cargo build
+cargo test
+```
+
+`test` will use default TDengine user and password on localhost (TDengine default).
+
+Set variables if it's not default:
+
+- `TEST_TAOS_IP`
+- `TEST_TAOS_PORT`
+- `TEST_TAOS_USER`
+- `TEST_TAOS_PASS`
+- `TEST_TAOS_DB`
+
+## Usage
+
+For default C-based client API, set in Cargo.toml
+
+```toml
+[dependencies]
+libtaos = "v0.3.8"
+```
+
+For r2d2 support:
+
+```toml
+[dependencies]
+libtaos = { version = "*", features = ["r2d2"] }
+```
+
+For REST client:
+
+```toml
+[dependencies]
+libtaos = { version = "*", features = ["rest"] }
+```
+
+There's a [demo app]([examples/demo.rs](https://github.com/taosdata/libtaos-rs/blob/main/examples/demo.rs)) in examples directory, looks like this:
+
+```rust
+// ...
+#[tokio::main]
+async fn main() -> Result<(), Error> {
+ init();
+ let taos = taos_connect()?;
+
+ assert_eq!(
+ taos.query("drop database if exists demo").await.is_ok(),
+ true
+ );
+ assert_eq!(taos.query("create database demo").await.is_ok(), true);
+ assert_eq!(taos.query("use demo").await.is_ok(), true);
+ assert_eq!(
+ taos.query("create table m1 (ts timestamp, speed int)")
+ .await
+ .is_ok(),
+ true
+ );
+
+ for i in 0..10i32 {
+ assert_eq!(
+ taos.query(format!("insert into m1 values (now+{}s, {})", i, i).as_str())
+ .await
+ .is_ok(),
+ true
+ );
+ }
+ let rows = taos.query("select * from m1").await?;
+
+ println!("{}", rows.column_meta.into_iter().map(|col| col.name).join(","));
+ for row in rows.rows {
+ println!("{}", row.into_iter().join(","));
+ }
+ Ok(())
+}
+```
+
+You can check out the experimental [bailongma-rs](https://github.com/taosdata/bailongma-rs) - a TDengine adapters for prometheus written with Rust - as a more productive code example.
+
+[libtaos-rs]: https://github.com/taosdata/libtaos-rs
+[TDengine]: https://github.com/taosdata/TDengine
+[bailongma-rs]: https://github.com/taosdata/bailongma-rs
+[r2d2]: https://crates.io/crates/r2d2
\ No newline at end of file
diff --git a/packaging/check_package.sh b/packaging/check_package.sh
index 9728f9b964732195970708fbf9fb61361768143b..0870e8c8eccc1a745ae5b081e2726ed8d809cf2b 100755
--- a/packaging/check_package.sh
+++ b/packaging/check_package.sh
@@ -142,11 +142,11 @@ function check_main_path() {
function check_bin_path() {
# check install bin dir and all sub dir
- bin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
+ bin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
for i in "${bin_dir[@]}";do
check_file ${sbin_dir} $i
done
- lbin_dir=("taos" "taosd" "blm3" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
+ lbin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
for i in "${lbin_dir[@]}";do
check_link ${bin_link_dir}/$i
done
@@ -177,11 +177,11 @@ function check_header_path() {
echo -e "Check bin path:\033[32mOK\033[0m!"
}
-function check_blm3_config_dir() {
+function check_taosadapter_config_dir() {
# check all config
- check_file ${cfg_install_dir} blm.toml
- check_file ${cfg_install_dir} blm3.service
- check_file ${install_main_dir}/cfg blm.toml.org
+ check_file ${cfg_install_dir} taosadapter.toml
+ check_file ${cfg_install_dir} taosadapter.service
+ check_file ${install_main_dir}/cfg taosadapter.toml.org
echo -e "Check conf path:\033[32mOK\033[0m!"
}
@@ -222,7 +222,7 @@ function test_TDengine() {
check_lib_path
check_header_path
check_config_dir
- check_blm3_config_dir
+ check_taosadapter_config_dir
check_log_path
check_data_path
result=`taos -s 'create database test ;create table test.tt(ts timestamp ,i int);insert into test.tt values(now,11);select * from test.tt' 2>&1 ||:`
diff --git a/packaging/deb/DEBIAN/preinst b/packaging/deb/DEBIAN/preinst
index aaa052639ba5a95884accdf9c09a9351a0400cc5..4b8b72e9abd9e12d9f669cf5658be2468ebab40b 100644
--- a/packaging/deb/DEBIAN/preinst
+++ b/packaging/deb/DEBIAN/preinst
@@ -28,12 +28,12 @@ if [ -f "${install_main_dir}/taos.cfg" ]; then
${csudo} rm -f ${install_main_dir}/cfg/taos.cfg || :
fi
-if [ -f "${install_main_dir}/blm.toml" ]; then
- ${csudo} rm -f ${install_main_dir}/cfg/blm.toml || :
+if [ -f "${install_main_dir}/taosadapter.toml" ]; then
+ ${csudo} rm -f ${install_main_dir}/cfg/taosadapter.toml || :
fi
-if [ -f "${install_main_dir}/blm3.service" ]; then
- ${csudo} rm -f ${install_main_dir}/cfg/blm3.service || :
+if [ -f "${install_main_dir}/taosadapter.service" ]; then
+ ${csudo} rm -f ${install_main_dir}/cfg/taosadapter.service || :
fi
# there can not libtaos.so*, otherwise ln -s error
diff --git a/packaging/deb/DEBIAN/prerm b/packaging/deb/DEBIAN/prerm
index e2043ba54cef0db4f4fd729f2c2285c342b6b109..235834a747e82886eef6c4540877307aa4dd3996 100644
--- a/packaging/deb/DEBIAN/prerm
+++ b/packaging/deb/DEBIAN/prerm
@@ -25,7 +25,7 @@ else
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
diff --git a/packaging/deb/makedeb.sh b/packaging/deb/makedeb.sh
index cefdcdb1f1aab081ac286ecf199539abd7fcfa3b..f753668b3b1a83d15c126ae6b0d94c06e97c80aa 100755
--- a/packaging/deb/makedeb.sh
+++ b/packaging/deb/makedeb.sh
@@ -44,11 +44,11 @@ mkdir -p ${pkg_dir}${install_home_path}/init.d
mkdir -p ${pkg_dir}${install_home_path}/script
cp ${compile_dir}/../packaging/cfg/taos.cfg ${pkg_dir}${install_home_path}/cfg
-if [ -f "${compile_dir}/test/cfg/blm.toml" ]; then
- cp ${compile_dir}/test/cfg/blm.toml ${pkg_dir}${install_home_path}/cfg
+if [ -f "${compile_dir}/test/cfg/taosadapter.toml" ]; then
+ cp ${compile_dir}/test/cfg/taosadapter.toml ${pkg_dir}${install_home_path}/cfg
fi
-if [ -f "${compile_dir}/test/cfg/blm3.service" ]; then
- cp ${compile_dir}/test/cfg/blm3.service ${pkg_dir}${install_home_path}/cfg ||:
+if [ -f "${compile_dir}/test/cfg/taosadapter.service" ]; then
+ cp ${compile_dir}/test/cfg/taosadapter.service ${pkg_dir}${install_home_path}/cfg ||:
fi
cp ${compile_dir}/../packaging/deb/taosd ${pkg_dir}${install_home_path}/init.d
@@ -62,8 +62,8 @@ cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_pat
cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
-if [ -f "${compile_dir}/build/bin/blm3" ]; then
- cp ${compile_dir}/build/bin/blm3 ${pkg_dir}${install_home_path}/bin ||:
+if [ -f "${compile_dir}/build/bin/taosadapter" ]; then
+ cp ${compile_dir}/build/bin/taosadapter ${pkg_dir}${install_home_path}/bin ||:
fi
cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin
diff --git a/packaging/deb/taosd b/packaging/deb/taosd
index a14e61ac8cfb67b970ee89a2fd4cda9d7937b23f..5002607da20b621ca69a8a2a25e713879d0308af 100644
--- a/packaging/deb/taosd
+++ b/packaging/deb/taosd
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/bash
#
# Modified from original source: Elastic Search
# https://github.com/elasticsearch/elasticsearch
@@ -25,7 +25,7 @@ GROUP="root"
DAEMON="/usr/local/taos/bin/taosd"
DAEMON_OPTS=""
-HTTPD_NAME="blm3"
+HTTPD_NAME="taosadapter"
DAEMON_HTTPD_NAME=$HTTPD_NAME
DAEMON_HTTPD="/usr/local/taos/bin/$HTTPD_NAME"
diff --git a/packaging/rpm/tdengine.spec b/packaging/rpm/tdengine.spec
index 85aa7e072476b089352d3e5da4d2abc801d8e24b..f7b8462dbedc74a270a8560bb51a853e292cff27 100644
--- a/packaging/rpm/tdengine.spec
+++ b/packaging/rpm/tdengine.spec
@@ -54,11 +54,11 @@ mkdir -p %{buildroot}%{homepath}/init.d
mkdir -p %{buildroot}%{homepath}/script
cp %{_compiledir}/../packaging/cfg/taos.cfg %{buildroot}%{homepath}/cfg
-if [ -f %{_compiledir}/test/cfg/blm.toml ]; then
- cp %{_compiledir}/test/cfg/blm.toml %{buildroot}%{homepath}/cfg
+if [ -f %{_compiledir}/test/cfg/taosadapter.toml ]; then
+ cp %{_compiledir}/test/cfg/taosadapter.toml %{buildroot}%{homepath}/cfg
fi
-if [ -f %{_compiledir}/test/cfg/blm3.service ]; then
- cp %{_compiledir}/test/cfg/blm3.service %{buildroot}%{homepath}/cfg
+if [ -f %{_compiledir}/test/cfg/taosadapter.service ]; then
+ cp %{_compiledir}/test/cfg/taosadapter.service %{buildroot}%{homepath}/cfg
fi
cp %{_compiledir}/../packaging/rpm/taosd %{buildroot}%{homepath}/init.d
cp %{_compiledir}/../packaging/tools/post.sh %{buildroot}%{homepath}/script
@@ -68,8 +68,8 @@ cp %{_compiledir}/../packaging/tools/set_core.sh %{buildroot}%{homepath}/bin
cp %{_compiledir}/../packaging/tools/taosd-dump-cfg.gdb %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taos %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin
-if [ -f %{_compiledir}/build/bin/blm3 ]; then
- cp %{_compiledir}/build/bin/blm3 %{buildroot}%{homepath}/bin ||:
+if [ -f %{_compiledir}/build/bin/taosadapter ]; then
+ cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||:
fi
cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin
@@ -158,17 +158,12 @@ if pidof taosd &> /dev/null; then
fi
# if taos.cfg already exist, remove it
if [ -f %{cfg_install_dir}/taos.cfg ]; then
- ${csudo} rm -f %{homepath}/cfg/taos.cfg || :
+ ${csudo} rm -f %{cfg_install_dir}/cfg/taos.cfg || :
fi
-# if blm.toml already exist, remove it
-if [ -f %{cfg_install_dir}/blm.toml ]; then
- ${csudo} rm -f %{homepath}/cfg/blm.toml || :
-fi
-
-# if blm3.service already softlink, remove it
-if [ -f %{cfg_install_dir}/blm3.service ]; then
- ${csudo} rm -f %{homepath}/cfg/blm3.service || :
+# if taosadapter.toml already exist, remove it
+if [ -f %{cfg_install_dir}/taosadapter.toml ]; then
+ ${csudo} rm -f %{cfg_install_dir}/cfg/taosadapter.toml || :
fi
# there can not libtaos.so*, otherwise ln -s error
@@ -209,7 +204,7 @@ if [ $1 -eq 0 ];then
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
diff --git a/packaging/tools/install.sh b/packaging/tools/install.sh
index 33097bd5411f0fc0239b7e571a69de4a6f8408fc..61fcd3e51982dab6a72245fe0ffb9de5ac51a664 100755
--- a/packaging/tools/install.sh
+++ b/packaging/tools/install.sh
@@ -185,7 +185,7 @@ function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
@@ -197,7 +197,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
- [ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || :
+ [ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/remove.sh ] && ${csudo} ln -s ${install_main_dir}/bin/remove.sh ${bin_link_dir}/rmtaos || :
@@ -447,18 +447,18 @@ function local_fqdn_check() {
fi
}
-function install_blm3_config() {
- if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
+function install_taosadapter_config() {
+ if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir}
- [ -f ${script_dir}/cfg/blm.toml ] && ${csudo} cp ${script_dir}/cfg/blm.toml ${cfg_install_dir}
- [ -f ${cfg_install_dir}/blm.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/blm.toml
+ [ -f ${script_dir}/cfg/taosadapter.toml ] && ${csudo} cp ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}
+ [ -f ${cfg_install_dir}/taosadapter.toml ] && ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi
- [ -f ${script_dir}/cfg/blm.toml ] &&
- ${csudo} cp -f ${script_dir}/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org
+ [ -f ${script_dir}/cfg/taosadapter.toml ] &&
+ ${csudo} cp -f ${script_dir}/cfg/taosadapter.toml ${cfg_install_dir}/taosadapter.toml.new
- [ -f ${cfg_install_dir}/blm.toml ] &&
- ${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml
+ [ -f ${cfg_install_dir}/taosadapter.toml ] &&
+ ${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${install_main_dir}/cfg/taosadapter.toml
[ ! -z $1 ] && return 0 || : # only install client
@@ -473,7 +473,7 @@ function install_config() {
${csudo} chmod 644 ${cfg_install_dir}/*
fi
- ${csudo} cp -f ${script_dir}/cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
+ ${csudo} cp -f ${script_dir}/cfg/taos.cfg ${cfg_install_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg
[ ! -z $1 ] && return 0 || : # only install client
@@ -679,8 +679,8 @@ function install_service_on_systemd() {
taosd_service_config="${service_config_dir}/taosd.service"
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'After=network-online.target blm3.service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'Wants=network-online.target blm3.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
@@ -756,9 +756,9 @@ function install_service_on_systemd() {
fi
}
-function install_blm3_service() {
- [ -f ${script_dir}/cfg/blm3.service ] &&\
- ${csudo} cp ${script_dir}/cfg/blm3.service ${service_config_dir}/
+function install_taosadapter_service() {
+ [ -f ${script_dir}/cfg/taosadapter.service ] &&\
+ ${csudo} cp ${script_dir}/cfg/taosadapter.service ${service_config_dir}/
}
function install_service() {
@@ -883,9 +883,9 @@ function update_TDengine() {
if [ -z $1 ]; then
install_bin
install_service
- install_blm3_service
+ install_taosadapter_service
install_config
- install_blm3_config
+ install_taosadapter_config
openresty_work=false
if [ "$verMode" == "cluster" ]; then
@@ -965,7 +965,7 @@ function install_TDengine() {
# For installing new
install_bin
install_service
- install_blm3_service
+ install_taosadapter_service
openresty_work=false
if [ "$verMode" == "cluster" ]; then
diff --git a/packaging/tools/make_install.sh b/packaging/tools/make_install.sh
index c29c1cd665a11596b83234d1b0343bbab1cf5dc1..8309fa516c4ffdcd9e5a17056304427543dad0a9 100755
--- a/packaging/tools/make_install.sh
+++ b/packaging/tools/make_install.sh
@@ -114,8 +114,8 @@ if [ "$osType" != "Darwin" ]; then
fi
fi
-function kill_blm3() {
- pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
+function kill_taosadapter() {
+ pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
@@ -156,7 +156,7 @@ function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
@@ -176,7 +176,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
[ -x ${install_main_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || :
- [ -x ${install_main_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || :
+ [ -x ${install_main_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/perfMonitor ] && ${csudo} ln -s ${install_main_dir}/bin/perfMonitor ${bin_link_dir}/perfMonitor || :
@@ -191,7 +191,7 @@ function install_bin() {
#Make link
[ -x ${install_main_dir}/bin/taos ] || [ -x ${install_main_2_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || ${csudo} ln -s ${install_main_2_dir}/bin/taos || :
[ -x ${install_main_dir}/bin/taosd ] || [ -x ${install_main_2_dir}/bin/taosd ] && ${csudo} ln -s ${install_main_dir}/bin/taosd ${bin_link_dir}/taosd || ${csudo} ln -s ${install_main_2_dir}/bin/taosd || :
- [ -x ${install_main_dir}/bin/blm3 ] || [ -x ${install_main_2_dir}/bin/blm3 ] && ${csudo} ln -s ${install_main_dir}/bin/blm3 ${bin_link_dir}/blm3 || ${csudo} ln -s ${install_main_2_dir}/bin/blm3 || :
+ [ -x ${install_main_dir}/bin/taosadapter ] || [ -x ${install_main_2_dir}/bin/taosadapter ] && ${csudo} ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter || ${csudo} ln -s ${install_main_2_dir}/bin/taosadapter || :
[ -x ${install_main_dir}/bin/taosdump ] || [ -x ${install_main_2_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || ln -s ${install_main_2_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
[ -x ${install_main_dir}/bin/taosdemo ] || [ -x ${install_main_2_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || ln -s ${install_main_2_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
fi
@@ -351,39 +351,33 @@ function install_config() {
[ -f ${script_dir}/../cfg/taos.cfg ] &&
${csudo} cp ${script_dir}/../cfg/taos.cfg ${cfg_install_dir}
${csudo} chmod 644 ${cfg_install_dir}/taos.cfg
- ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
- ${csudo} ln -s ${cfg_install_dir}/taos.cfg ${install_main_dir}/cfg/taos.cfg
+ ${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
+ ${cfg_install_dir}/taos.cfg.${verNumber}
+ ${csudo} ln -s ${cfg_install_dir}/taos.cfg \
+ ${install_main_dir}/cfg/taos.cfg
else
- if [ "$osType" != "Darwin" ]; then
- ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org
- else
- ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_dir}/cfg/taos.cfg.org\
- || ${csudo} cp -f ${script_dir}/../cfg/taos.cfg ${install_main_2_dir}/cfg/taos.cfg.org
- fi
+ ${csudo} cp -f ${script_dir}/../cfg/taos.cfg \
+ ${cfg_install_dir}/taos.cfg.${verNumber}
fi
}
-function install_blm3_config() {
- if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
+function install_taosadapter_config() {
+ if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
${csudo} mkdir -p ${cfg_install_dir}
- [ -f ${binary_dir}/test/cfg/blm.toml ] &&
- ${csudo} cp ${binary_dir}/test/cfg/blm.toml ${cfg_install_dir}
- [ -f ${cfg_install_dir}/blm.toml ] &&
- ${csudo} chmod 644 ${cfg_install_dir}/blm.toml
- [ -f ${binary_dir}/test/cfg/blm.toml ] &&
- ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org
- [ -f ${cfg_install_dir}/blm.toml ] &&
- ${csudo} ln -s ${cfg_install_dir}/blm.toml ${install_main_dir}/cfg/blm.toml
+ [ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
+ ${csudo} cp ${binary_dir}/test/cfg/taosadapter.toml ${cfg_install_dir}
+ [ -f ${cfg_install_dir}/taosadapter.toml ] &&
+ ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
+ [ -f ${binary_dir}/test/cfg/taosadapter.toml ] &&
+ ${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
+ ${cfg_install_dir}/taosadapter.toml.${verNumber}
+ [ -f ${cfg_install_dir}/taosadapter.toml ] && \
+ ${csudo} ln -s ${cfg_install_dir}/taosadapter.toml \
+ ${install_main_dir}/cfg/taosadapter.toml
else
- if [ -f "${binary_dir}/test/cfg/blm.toml" ]; then
- if [ "$osType" != "Darwin" ]; then
- ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \
- ${install_main_dir}/cfg/blm.toml.org
- else
- ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml ${install_main_dir}/cfg/blm.toml.org \
- || ${csudo} cp -f ${binary_dir}/test/cfg/blm.toml \
- ${install_main_2_dir}/cfg/blm.toml.org
- fi
+ if [ -f "${binary_dir}/test/cfg/taosadapter.toml" ]; then
+ ${csudo} cp -f ${binary_dir}/test/cfg/taosadapter.toml \
+ ${cfg_install_dir}/taosadapter.toml.${verNumber}
fi
fi
}
@@ -503,8 +497,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'After=network-online.target blm3.service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'Wants=network-online.target blm3.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
@@ -525,9 +519,14 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd
}
-function install_blm3_service() {
- [ -f ${script_dir}/cfg/blm3.service ] &&\
- ${csudo} cp ${script_dir}/cfg/blm3.service ${service_config_dir}/
+function install_taosadapter_service() {
+ if ((${service_mod}==0)); then
+ [ -f ${binary_dir}/test/cfg/taosadapter.service ] &&\
+ ${csudo} cp ${binary_dir}/test/cfg/taosadapter.service\
+ ${service_config_dir}/ || :
+ else
+ kill_taosadapter
+ fi
}
function install_service() {
@@ -537,7 +536,6 @@ function install_service() {
install_service_on_sysvinit
else
# must manual stop taosd
- kill_blm3
kill_taosd
fi
}
@@ -553,7 +551,7 @@ function update_TDengine() {
elif ((${service_mod}==1)); then
${csudo} service taosd stop || :
else
- kill_blm3
+ kill_taosadapter
kill_taosd
fi
sleep 1
@@ -571,11 +569,11 @@ function update_TDengine() {
if [ "$osType" != "Darwin" ]; then
install_service
- install_blm3_service
+ install_taosadapter_service
fi
install_config
- install_blm3_config
+ install_taosadapter_config
if [ "$osType" != "Darwin" ]; then
echo
@@ -583,7 +581,7 @@ function update_TDengine() {
echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
- echo -e "${GREEN_DARK}To configure blm3 (if has) ${NC}: edit /etc/taos/blm.toml"
+ echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then
@@ -626,11 +624,11 @@ function install_TDengine() {
if [ "$osType" != "Darwin" ]; then
install_service
- install_blm3_service
+ install_taosadapter_service
fi
install_config
- install_blm3_config
+ install_taosadapter_config
if [ "$osType" != "Darwin" ]; then
# Ask if to start the service
@@ -638,7 +636,7 @@ function install_TDengine() {
echo -e "\033[44;32;1mTDengine is installed successfully!${NC}"
echo
echo -e "${GREEN_DARK}To configure TDengine ${NC}: edit /etc/taos/taos.cfg"
- echo -e "${GREEN_DARK}To configure blm (if has) ${NC}: edit /etc/taos/blm.toml"
+ echo -e "${GREEN_DARK}To configure taosadapter (if has) ${NC}: edit /etc/taos/taosadapter.toml"
if ((${service_mod}==0)); then
echo -e "${GREEN_DARK}To start TDengine ${NC}: ${csudo} systemctl start taosd${NC}"
elif ((${service_mod}==1)); then
diff --git a/packaging/tools/makepkg.sh b/packaging/tools/makepkg.sh
index 7071912fc8133fb2bf1b15f992ff61c514bb79a1..05b49ff6a9599c6050d2ccad778f63d285981420 100755
--- a/packaging/tools/makepkg.sh
+++ b/packaging/tools/makepkg.sh
@@ -35,12 +35,12 @@ fi
if [ "$pagMode" == "lite" ]; then
strip ${build_dir}/bin/taosd
strip ${build_dir}/bin/taos
- # lite version doesn't include blm3, which will lead to no restful interface
+ # lite version doesn't include taosadapter, which will lead to no restful interface
bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh"
else
bin_files="${build_dir}/bin/taosd \
${build_dir}/bin/taos \
- ${build_dir}/bin/blm3 \
+ ${build_dir}/bin/taosadapter \
${build_dir}/bin/taosdump \
${build_dir}/bin/taosdemo \
${build_dir}/bin/tarbitrator\
@@ -78,7 +78,7 @@ mkdir -p ${install_dir}
mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc
mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg
-[ -f ${cfg_dir}/blm.toml ] && cp ${cfg_dir}/blm.toml ${install_dir}/cfg/blm.toml
+[ -f ${cfg_dir}/taosadapter.toml ] && cp ${cfg_dir}/taosadapter.toml ${install_dir}/cfg/taosadapter.toml
mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x ${install_dir}/bin/* || :
mkdir -p ${install_dir}/init.d && cp ${init_file_deb} ${install_dir}/init.d/taosd.deb
diff --git a/packaging/tools/makepkg_power.sh b/packaging/tools/makepkg_power.sh
index 0b24100c3eb6be74ee4b415759a263647a395da3..65200ddd047358f92f8e3a612c08eedb60053311 100755
--- a/packaging/tools/makepkg_power.sh
+++ b/packaging/tools/makepkg_power.sh
@@ -81,7 +81,7 @@ else
# bin_files="${build_dir}/bin/powerd ${build_dir}/bin/power ${build_dir}/bin/powerdemo ${build_dir}/bin/tarbitrator ${script_dir}/remove_power.sh ${script_dir}/set_core.sh"
cp ${build_dir}/bin/taos ${install_dir}/bin/power
cp ${build_dir}/bin/taosd ${install_dir}/bin/powerd
- cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
+ cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_power.sh ${install_dir}/bin
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/powerdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/powerdump
diff --git a/packaging/tools/makepkg_pro.sh b/packaging/tools/makepkg_pro.sh
index a69e542c3c5969d609f8d5a00b6428add15fd950..457cb0de6f02f7000dc7437cde61bfec28c7205c 100755
--- a/packaging/tools/makepkg_pro.sh
+++ b/packaging/tools/makepkg_pro.sh
@@ -62,7 +62,7 @@ else
fi
cp ${build_dir}/bin/taos ${install_dir}/bin/prodbc
cp ${build_dir}/bin/taosd ${install_dir}/bin/prodbs
-cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
+cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${script_dir}/remove_pro.sh ${install_dir}/bin
chmod a+x ${install_dir}/bin/* || :
diff --git a/packaging/tools/makepkg_tq.sh b/packaging/tools/makepkg_tq.sh
index ccf42a8aab090b95de8e889b3a8186be9a6cba7a..07032379d7e4bab2636f3685b6edb620780a124a 100755
--- a/packaging/tools/makepkg_tq.sh
+++ b/packaging/tools/makepkg_tq.sh
@@ -82,7 +82,7 @@ else
cp ${build_dir}/bin/taos ${install_dir}/bin/tq
cp ${build_dir}/bin/taosd ${install_dir}/bin/tqd
cp ${script_dir}/remove_tq.sh ${install_dir}/bin
- cp ${build_dir}/bin/blm3 ${install_dir}/bin/blm3 ||:
+ cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/tqdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/tqdump
cp ${build_dir}/bin/tarbitrator ${install_dir}/bin
diff --git a/packaging/tools/post.sh b/packaging/tools/post.sh
index 418b9bd0f9b5ea82f49ad5c8165f628c90f472d2..c3db7e417adb11b92d55464b69c715e3aee2d6bb 100755
--- a/packaging/tools/post.sh
+++ b/packaging/tools/post.sh
@@ -64,9 +64,9 @@ else
service_mod=2
fi
-function kill_blm3() {
-# ${csudo} pkill -f blm3 || :
- pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
+function kill_taosadapter() {
+# ${csudo} pkill -f taosadapter || :
+ pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
@@ -123,7 +123,7 @@ function install_bin() {
# Remove links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
@@ -134,7 +134,7 @@ function install_bin() {
#Make link
[ -x ${bin_dir}/taos ] && ${csudo} ln -s ${bin_dir}/taos ${bin_link_dir}/taos || :
[ -x ${bin_dir}/taosd ] && ${csudo} ln -s ${bin_dir}/taosd ${bin_link_dir}/taosd || :
- [ -x ${bin_dir}/blm3 ] && ${csudo} ln -s ${bin_dir}/blm3 ${bin_link_dir}/blm3 || :
+ [ -x ${bin_dir}/taosadapter ] && ${csudo} ln -s ${bin_dir}/taosadapter ${bin_link_dir}/taosadapter || :
[ -x ${bin_dir}/taosdemo ] && ${csudo} ln -s ${bin_dir}/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${bin_dir}/taosdump ] && ${csudo} ln -s ${bin_dir}/taosdump ${bin_link_dir}/taosdump || :
[ -x ${bin_dir}/set_core.sh ] && ${csudo} ln -s ${bin_dir}/set_core.sh ${bin_link_dir}/set_core || :
@@ -291,20 +291,20 @@ function local_fqdn_check() {
fi
}
-function install_blm3_config() {
- if [ ! -f "${cfg_install_dir}/blm.toml" ]; then
+function install_taosadapter_config() {
+ if [ ! -f "${cfg_install_dir}/taosadapter.toml" ]; then
[ ! -d %{cfg_install_dir} ] &&
${csudo} ${csudo} mkdir -p ${cfg_install_dir}
- [ -f ${cfg_dir}/blm.toml ] && ${csudo} cp ${cfg_dir}/blm.toml ${cfg_install_dir}
- [ -f ${cfg_install_dir}/blm.toml ] &&
- ${csudo} chmod 644 ${cfg_install_dir}/blm.toml
+ [ -f ${cfg_dir}/taosadapter.toml ] && ${csudo} cp ${cfg_dir}/taosadapter.toml ${cfg_install_dir}
+ [ -f ${cfg_install_dir}/taosadapter.toml ] &&
+ ${csudo} chmod 644 ${cfg_install_dir}/taosadapter.toml
fi
- [ -f ${cfg_dir}/blm.toml ] &&
- ${csudo} mv ${cfg_dir}/blm.toml ${cfg_dir}/blm.toml.org
+ [ -f ${cfg_dir}/taosadapter.toml ] &&
+ ${csudo} mv ${cfg_dir}/taosadapter.toml ${cfg_dir}/taosadapter.toml.new
- [ -f ${cfg_install_dir}/blm.toml ] &&
- ${csudo} ln -s ${cfg_install_dir}/blm.toml ${cfg_dir}
+ [ -f ${cfg_install_dir}/taosadapter.toml ] &&
+ ${csudo} ln -s ${cfg_install_dir}/taosadapter.toml ${cfg_dir}
}
function install_config() {
@@ -322,7 +322,7 @@ function install_config() {
# restore the backup standard input, and turn off 6
exec 0<&6 6<&-
- ${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.org
+ ${csudo} mv ${cfg_dir}/taos.cfg ${cfg_dir}/taos.cfg.new
${csudo} ln -s ${cfg_install_dir}/taos.cfg ${cfg_dir}
#FQDN_FORMAT="(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"
#FQDN_FORMAT="(:[1-6][0-9][0-9][0-9][0-9]$)"
@@ -444,8 +444,8 @@ function install_service_on_systemd() {
${csudo} bash -c "echo '[Unit]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Description=TDengine server service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'After=network-online.target blm3.service' >> ${taosd_service_config}"
- ${csudo} bash -c "echo 'Wants=network-online.target blm3.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'After=network-online.target taosadapter.service' >> ${taosd_service_config}"
+ ${csudo} bash -c "echo 'Wants=network-online.target taosadapter.service' >> ${taosd_service_config}"
${csudo} bash -c "echo >> ${taosd_service_config}"
${csudo} bash -c "echo '[Service]' >> ${taosd_service_config}"
${csudo} bash -c "echo 'Type=simple' >> ${taosd_service_config}"
@@ -466,9 +466,8 @@ function install_service_on_systemd() {
${csudo} systemctl enable taosd
}
-function install_blm3_service() {
- [ -f ${script_dir}/cfg/blm3.service ] &&\
- ${csudo} cp ${script_dir}/cfg/blm3.service ${service_config_dir}/
+function install_taosadapter_service() {
+ [ -f ${cfg_dir}/taosadapter.service ] && ${csudo} cp ${cfg_dir}/taosadapter.service ${service_config_dir}
}
function install_service() {
@@ -478,7 +477,7 @@ function install_service() {
install_service_on_sysvinit
else
# manual start taosd
- kill_blm3
+ kill_taosadapter
kill_taosd
fi
}
@@ -501,10 +500,10 @@ function install_TDengine() {
install_lib
install_avro_lib
install_bin
- install_service
- install_blm3_service
install_config
- install_blm3_config
+ install_taosadapter_config
+ install_taosadapter_service
+ install_service
# Ask if to start the service
#echo
diff --git a/packaging/tools/preun.sh b/packaging/tools/preun.sh
index 7f6ef5c27d182fcaa1f9ea80f1169b389db8b014..d2d36364208f23492d2ba6aefa783c85ad6d5572 100755
--- a/packaging/tools/preun.sh
+++ b/packaging/tools/preun.sh
@@ -43,8 +43,8 @@ else
service_mod=2
fi
-function kill_blm3() {
- pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
+function kill_taosadapter() {
+ pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
@@ -58,10 +58,10 @@ function kill_taosd() {
}
function clean_service_on_systemd() {
- blm3_service_config="${service_config_dir}/blm3.service"
- if systemctl is-active --quiet blm3; then
- echo "blm3 is running, stopping it..."
- ${csudo} systemctl stop blm3 &> /dev/null || echo &> /dev/null
+ taosadapter_service_config="${service_config_dir}/taosadapter.service"
+ if systemctl is-active --quiet taosadapter; then
+ echo "taosadapter is running, stopping it..."
+ ${csudo} systemctl stop taosadapter &> /dev/null || echo &> /dev/null
fi
taosd_service_config="${service_config_dir}/${taos_service_name}.service"
@@ -74,7 +74,7 @@ function clean_service_on_systemd() {
${csudo} rm -f ${taosd_service_config}
- [ -f ${blm3_service_config} ] && ${csudo} rm -f ${blm3_service_config}
+ [ -f ${taosadapter_service_config} ] && ${csudo} rm -f ${taosadapter_service_config}
}
@@ -109,7 +109,7 @@ function clean_service() {
clean_service_on_sysvinit
else
# must manual stop taosd
- kill_blm3
+ kill_taosadapter
kill_taosd
fi
}
@@ -120,11 +120,11 @@ clean_service
# Remove all links
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
-${csudo} rm -f ${bin_link_dir}/blm3 || :
+${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/set_core || :
-${csudo} rm -f ${cfg_link_dir}/* || :
+${csudo} rm -f ${cfg_link_dir}/*.new || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || :
@@ -134,7 +134,7 @@ ${csudo} rm -f ${log_link_dir} || :
${csudo} rm -f ${data_link_dir} || :
if ((${service_mod}==2)); then
- kill_blm3
+ kill_taosadapter
kill_taosd
fi
diff --git a/packaging/tools/remove.sh b/packaging/tools/remove.sh
index be5163c4540e04e5f381357a984175904dadccef..07a8362b2c45676986513020da668ff9235f00fa 100755
--- a/packaging/tools/remove.sh
+++ b/packaging/tools/remove.sh
@@ -54,8 +54,8 @@ else
service_mod=2
fi
-function kill_blm3() {
- pid=$(ps -ef | grep "blm3" | grep -v "grep" | awk '{print $2}')
+function kill_taosadapter() {
+ pid=$(ps -ef | grep "taosadapter" | grep -v "grep" | awk '{print $2}')
if [ -n "$pid" ]; then
${csudo} kill -9 $pid || :
fi
@@ -78,7 +78,7 @@ function clean_bin() {
# Remove link
${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
- ${csudo} rm -f ${bin_link_dir}/blm3 || :
+ ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/rmtaos || :
@@ -111,14 +111,14 @@ function clean_log() {
function clean_service_on_systemd() {
taosd_service_config="${service_config_dir}/${taos_service_name}.service"
- blm3_service_config="${service_config_dir}/blm3.service"
+ taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet ${taos_service_name}; then
echo "TDengine taosd is running, stopping it..."
${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null
fi
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config}
- [ -f ${blm3_service_config} ] && ${sudo} rm -f ${blm3_service_config}
+ [ -f ${taosadapter_service_config} ] && ${sudo} rm -f ${taosadapter_service_config}
tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service"
if systemctl is-active --quiet ${tarbitrator_service_name}; then
@@ -193,7 +193,7 @@ function clean_service() {
clean_service_on_sysvinit
else
# must manual stop taosd
- kill_blm3
+ kill_taosadapter
kill_taosd
kill_tarbitrator
fi
diff --git a/packaging/tools/startPre.sh b/packaging/tools/startPre.sh
index 731f5b396f4bed78488a659dbb2b13e832deccf6..8a0ab11a4d37ffb9ad244faa2946cbbf10ce2026 100755
--- a/packaging/tools/startPre.sh
+++ b/packaging/tools/startPre.sh
@@ -9,8 +9,8 @@ line=`grep StartLimitBurst ${taosd}`
num=${line##*=}
#echo "burst num: ${num}"
-startSeqFile=/usr/local/taos/.startSeq
-recordFile=/usr/local/taos/.startRecord
+startSeqFile=/var/log/taos/.startSeq
+recordFile=/var/log/taos/.startRecord
startSeq=0
diff --git a/src/client/src/tscLocal.c b/src/client/src/tscLocal.c
index da51961d0ce8cd1a73cbef3272bc4d4471858cdc..c3c65018a50aea8e7f36d89c15c6b7faa12f2047 100644
--- a/src/client/src/tscLocal.c
+++ b/src/client/src/tscLocal.c
@@ -358,9 +358,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
int num_fields = taos_num_fields(pSql);
TAOS_FIELD *fields = taos_fetch_fields(pSql);
- char buf[TSDB_COL_NAME_LEN + 16];
for (int i = 0; i < num_fields; i++) {
- memset(buf, 0, sizeof(buf));
+ char *buf = calloc(1, lengths[i] + 1);
+ if (buf == NULL) {
+ return TSDB_CODE_TSC_OUT_OF_MEMORY;
+ }
+
+ memset(buf, 0, lengths[i] + 1);
int32_t ret = tscGetNthFieldResult(row, fields, lengths, i, buf);
if (i == 0) {
@@ -373,10 +377,13 @@ static int32_t tscGetTableTagValue(SCreateBuilder *builder, char *result) {
} else {
snprintf(result + strlen(result), TSDB_MAX_BINARY_LEN - strlen(result), "%s,", buf);
}
+
+ free(buf);
+
if (i == num_fields - 1) {
sprintf(result + strlen(result) - 1, "%s", ")");
}
- }
+ }
if (0 == strlen(result)) {
return TSDB_CODE_TSC_INVALID_TABLE_NAME;
diff --git a/src/client/src/tscSQLParser.c b/src/client/src/tscSQLParser.c
index 0465c2aa15832192b4340e8531e644d2d48c054c..f2708025c194b868595aa198001f16257b3332df 100644
--- a/src/client/src/tscSQLParser.c
+++ b/src/client/src/tscSQLParser.c
@@ -5939,6 +5939,7 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
const char* msg9 = "orderby column must projected in subquery";
const char* msg10 = "not support distinct mixed with order by";
const char* msg11 = "not support order with udf";
+ const char* msg12 = "order by tags not supported with diff/derivative/csum/mavg";
setDefaultOrderInfo(pQueryInfo);
STableMetaInfo* pTableMetaInfo = tscGetMetaInfo(pQueryInfo, 0);
@@ -6037,6 +6038,9 @@ int32_t validateOrderbyNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSq
size_t s = taosArrayGetSize(pSortOrder);
if (s == 1) {
if (orderByTags) {
+ if (tscIsDiffDerivLikeQuery(pQueryInfo)) {
+ return invalidOperationMsg(pMsgBuf, msg12);
+ }
pQueryInfo->groupbyExpr.orderIndex = index.columnIndex - tscGetNumOfColumns(pTableMetaInfo->pTableMeta);
tVariantListItem* p1 = taosArrayGet(pSqlNode->pSortOrder, 0);
diff --git a/src/client/src/tscSql.c b/src/client/src/tscSql.c
index bb3bddeefd798366fe205eb67b55b3b4a7301df4..89da3c5640c6523d4d2a816b8ae0293310c5830a 100644
--- a/src/client/src/tscSql.c
+++ b/src/client/src/tscSql.c
@@ -781,6 +781,16 @@ bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col) {
return isNull(((char*) pSql->res.urow[col]) + row * pInfo->field.bytes, pInfo->field.type);
}
+bool taos_is_update_query(TAOS_RES *res) {
+ SSqlObj *pSql = (SSqlObj *)res;
+ if (pSql == NULL || pSql->signature != pSql) {
+ return false;
+ }
+
+ SSqlCmd* pCmd = &pSql->cmd;
+ return ((pCmd->command >= TSDB_SQL_INSERT && pCmd->command <= TSDB_SQL_DROP_DNODE) || TSDB_SQL_RESET_CACHE == pCmd->command || TSDB_SQL_USE_DB == pCmd->command);
+}
+
int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields) {
int len = 0;
@@ -909,7 +919,6 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
strtolower(pSql->sqlstr, sql);
-// pCmd->curSql = NULL;
if (NULL != pCmd->insertParam.pTableBlockHashList) {
taosHashCleanup(pCmd->insertParam.pTableBlockHashList);
pCmd->insertParam.pTableBlockHashList = NULL;
@@ -934,6 +943,17 @@ int taos_validate_sql(TAOS *taos, const char *sql) {
return code;
}
+void taos_reset_current_db(TAOS *taos) {
+ STscObj* pObj = (STscObj*) taos;
+ if (pObj == NULL || pObj->signature != pObj) {
+ return;
+ }
+
+ pthread_mutex_lock(&pObj->mutex);
+ memset(pObj->db, 0, tListLen(pObj->db));
+ pthread_mutex_unlock(&pObj->mutex);
+}
+
void loadMultiTableMetaCallback(void *param, TAOS_RES *res, int code) {
SSqlObj* pSql = (SSqlObj*)taosAcquireRef(tscObjRef, (int64_t)param);
if (pSql == NULL) {
diff --git a/src/connector/grafanaplugin b/src/connector/grafanaplugin
index 9ae793ad2d567eb11d10627b65698f612542e988..792ef7c3036f15068796e09883d3f4d47a038fe2 160000
--- a/src/connector/grafanaplugin
+++ b/src/connector/grafanaplugin
@@ -1 +1 @@
-Subproject commit 9ae793ad2d567eb11d10627b65698f612542e988
+Subproject commit 792ef7c3036f15068796e09883d3f4d47a038fe2
diff --git a/src/connector/nodejs/nodetaos/cinterface.js b/src/connector/nodejs/nodetaos/cinterface.js
index 5ba2739c35b1f0aef61ba3e52ae5d2f3a901df77..3c395ec205a9c39b3c6e62532de536feef093544 100644
--- a/src/connector/nodejs/nodetaos/cinterface.js
+++ b/src/connector/nodejs/nodetaos/cinterface.js
@@ -12,6 +12,7 @@ const FieldTypes = require('./constants');
const errors = require('./error');
const TaosObjects = require('./taosobjects');
const { NULL_POINTER } = require('ref-napi');
+const { Console } = require('console');
module.exports = CTaosInterface;
@@ -53,6 +54,18 @@ function convertTinyint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0
}
return res;
}
+function convertTinyintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
+ data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
+ let res = [];
+ let currOffset = 0;
+ while (currOffset < data.length) {
+ let d = data.readUIntLE(currOffset, 1);
+ res.push(d == FieldTypes.C_TINYINT_UNSIGNED_NULL ? null : d);
+ currOffset += nbytes;
+ }
+ return res;
+}
+
function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
@@ -64,6 +77,18 @@ function convertSmallint(data, num_of_rows, nbytes = 0, offset = 0, precision =
}
return res;
}
+function convertSmallintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
+ data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
+ let res = [];
+ let currOffset = 0;
+ while (currOffset < data.length) {
+ let d = data.readUIntLE(currOffset, 2);
+ res.push(d == FieldTypes.C_SMALLINT_UNSIGNED_NULL ? null : d);
+ currOffset += nbytes;
+ }
+ return res;
+}
+
function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
@@ -75,6 +100,19 @@ function convertInt(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
}
return res;
}
+function convertIntUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
+ data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
+ let res = [];
+ let currOffset = 0;
+ while (currOffset < data.length) {
+ let d = data.readUInt32LE(currOffset);
+ res.push(d == FieldTypes.C_INT_UNSIGNED_NULL ? null : d);
+ currOffset += nbytes;
+ }
+ return res;
+}
+
+
function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
@@ -86,6 +124,19 @@ function convertBigint(data, num_of_rows, nbytes = 0, offset = 0, precision = 0)
}
return res;
}
+function convertBigintUnsigned(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
+ data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
+ let res = [];
+ let currOffset = 0;
+ while (currOffset < data.length) {
+ let d = data.readUInt64LE(currOffset);
+ res.push(d == FieldTypes.C_BIGINT_UNSIGNED_NULL ? null : BigInt(d));
+ currOffset += nbytes;
+ }
+ return res;
+}
+
+
function convertFloat(data, num_of_rows, nbytes = 0, offset = 0, precision = 0) {
data = ref.reinterpret(data.deref(), nbytes * num_of_rows, offset);
let res = [];
@@ -156,7 +207,11 @@ let convertFunctions = {
[FieldTypes.C_DOUBLE]: convertDouble,
[FieldTypes.C_BINARY]: convertBinary,
[FieldTypes.C_TIMESTAMP]: convertTimestamp,
- [FieldTypes.C_NCHAR]: convertNchar
+ [FieldTypes.C_NCHAR]: convertNchar,
+ [FieldTypes.C_TINYINT_UNSIGNED]: convertTinyintUnsigned,
+ [FieldTypes.C_SMALLINT_UNSIGNED]: convertSmallintUnsigned,
+ [FieldTypes.C_INT_UNSIGNED]: convertIntUnsigned,
+ [FieldTypes.C_BIGINT_UNSIGNED]: convertBigintUnsigned
}
// Define TaosField structure
@@ -321,6 +376,7 @@ CTaosInterface.prototype.close = function close(connection) {
CTaosInterface.prototype.query = function query(connection, sql) {
return this.libtaos.taos_query(connection, ref.allocCString(sql));
}
+
CTaosInterface.prototype.affectedRows = function affectedRows(result) {
return this.libtaos.taos_affected_rows(result);
}
@@ -413,6 +469,7 @@ CTaosInterface.prototype.query_a = function query_a(connection, sql, callback, p
this.libtaos.taos_query_a(connection, ref.allocCString(sql), callback, param);
return param;
}
+
/** Asynchrnously fetches the next block of rows. Wraps callback and transfers a 4th argument to the cursor, the row data as blocks in javascript form
* Note: This isn't a recursive function, in order to fetch all data either use the TDengine cursor object, TaosQuery object, or implement a recrusive
* function yourself using the libtaos.taos_fetch_rows_a function
diff --git a/src/connector/nodejs/nodetaos/constants.js b/src/connector/nodejs/nodetaos/constants.js
index cd6a0c9fbaff51e7f0ecd3ab06907b7b1fb7dcb1..3a866315507371fdfc69efb6de550b7c21f660b7 100644
--- a/src/connector/nodejs/nodetaos/constants.js
+++ b/src/connector/nodejs/nodetaos/constants.js
@@ -36,13 +36,21 @@ module.exports = {
C_BINARY : 8,
C_TIMESTAMP : 9,
C_NCHAR : 10,
+ C_TINYINT_UNSIGNED : 11,
+ C_SMALLINT_UNSIGNED : 12,
+ C_INT_UNSIGNED : 13,
+ C_BIGINT_UNSIGNED : 14,
// NULL value definition
// NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL : 2,
C_TINYINT_NULL : -128,
+ C_TINYINT_UNSIGNED_NULL : 255,
C_SMALLINT_NULL : -32768,
+ C_SMALLINT_UNSIGNED_NULL : 65535,
C_INT_NULL : -2147483648,
- C_BIGINT_NULL : -9223372036854775808,
+ C_INT_UNSIGNED_NULL : 4294967295,
+ C_BIGINT_NULL : -9223372036854775808n,
+ C_BIGINT_UNSIGNED_NULL : 18446744073709551615n,
C_FLOAT_NULL : 2146435072,
C_DOUBLE_NULL : -9223370937343148032,
C_NCHAR_NULL : 4294967295,
@@ -64,6 +72,10 @@ const typeCodesToName = {
8 : 'Binary',
9 : 'Timestamp',
10 : 'Nchar',
+ 11 : 'TINYINT_UNSIGNED',
+ 12 : 'SMALLINT_UNSIGNED',
+ 13 : 'INT_UNSIGNED',
+ 14 : 'BIGINT_UNSIGNED',
}
/**
diff --git a/src/connector/nodejs/package.json b/src/connector/nodejs/package.json
index 6a2c66100b3d1921b3ce8997e70d33f024e5c3f2..711db94b84fab40d8d1809a44c45b24a9ab5bafb 100644
--- a/src/connector/nodejs/package.json
+++ b/src/connector/nodejs/package.json
@@ -7,7 +7,7 @@
"test": "test"
},
"scripts": {
- "test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js"
+ "test": "node test/test.js && node test/testMicroseconds.js && node test/testNanoseconds.js && node test/testUnsignedType.js "
},
"repository": {
"type": "git",
diff --git a/src/connector/nodejs/test/test.js b/src/connector/nodejs/test/test.js
index caf05955da4c960ebedc872f400c17d18be767dd..06adf912a57bfa369b9567d0b5b3a1c8fb105ce8 100644
--- a/src/connector/nodejs/test/test.js
+++ b/src/connector/nodejs/test/test.js
@@ -90,7 +90,7 @@ c1.execute("create table if not exists td_connector_test.weather(ts timestamp, t
c1.execute("insert into t1 using weather tags('北京') values(now, 11.11, 11)");
c1.execute("insert into t1(ts, temperature) values(now, 22.22)");
c1.execute("insert into t1(ts, humidity) values(now, 33)");
-c1.query('select * from test.t1', true).then(function (result) {
+c1.query('select * from td_connector_test.t1', true).then(function (result) {
result.pretty();
});
diff --git a/src/connector/nodejs/test/testUnsignedType.js b/src/connector/nodejs/test/testUnsignedType.js
new file mode 100644
index 0000000000000000000000000000000000000000..82413afebad0b75116fe3ea46e50716843d87c84
--- /dev/null
+++ b/src/connector/nodejs/test/testUnsignedType.js
@@ -0,0 +1,26 @@
+const taos = require('../tdengine');
+var conn = taos.connect({ host: "127.0.0.1", user: "root", password: "taosdata", config: "/etc/taos", port: 10 });
+var c1 = conn.cursor();
+executeUpdate("create database nodedb;");
+executeUpdate("use nodedb;");
+executeUpdate("create table unsigntest(ts timestamp,ut tinyint unsigned,us smallint unsigned,ui int unsigned,ub bigint unsigned,bi bigint);");
+executeUpdate("insert into unsigntest values (now, 254,65534,4294967294,18446744073709551614,9223372036854775807);");
+executeUpdate("insert into unsigntest values (now, 0,0,0,0,-9223372036854775807);");
+executeQuery("select * from unsigntest;");
+executeUpdate("drop database nodedb;");
+
+
+function executeUpdate(sql) {
+ console.log(sql);
+ c1.execute(sql);
+}
+function executeQuery(sql) {
+ c1.execute(sql)
+ var data = c1.fetchall();
+ // Latest query's Field metadata is stored in cursor.fields
+ console.log(c1.fields);
+ // Latest query's result data is stored in cursor.data, also returned by fetchall.
+ console.log(c1.data);
+}
+setTimeout(()=>conn.close(),2000);
+
diff --git a/src/inc/taos.h b/src/inc/taos.h
index 4afec942ff991ce1009cb8c54113562f93f9c92d..6cd62d3177d2490c5c89bf910e258c956c2f69fc 100644
--- a/src/inc/taos.h
+++ b/src/inc/taos.h
@@ -175,11 +175,13 @@ DLL_EXPORT int taos_select_db(TAOS *taos, const char *db);
DLL_EXPORT int taos_print_row(char *str, TAOS_ROW row, TAOS_FIELD *fields, int num_fields);
DLL_EXPORT void taos_stop_query(TAOS_RES *res);
DLL_EXPORT bool taos_is_null(TAOS_RES *res, int32_t row, int32_t col);
+DLL_EXPORT bool taos_is_update_query(TAOS_RES *res);
DLL_EXPORT int taos_fetch_block(TAOS_RES *res, TAOS_ROW *rows);
-DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
-
DLL_EXPORT int* taos_fetch_lengths(TAOS_RES *res);
+DLL_EXPORT int taos_validate_sql(TAOS *taos, const char *sql);
+DLL_EXPORT void taos_reset_current_db(TAOS *taos);
+
// TAOS_RES *taos_list_tables(TAOS *mysql, const char *wild);
// TAOS_RES *taos_list_dbs(TAOS *mysql, const char *wild);
@@ -192,7 +194,6 @@ DLL_EXPORT int taos_errno(TAOS_RES *tres);
DLL_EXPORT void taos_query_a(TAOS *taos, const char *sql, void (*fp)(void *param, TAOS_RES *, int code), void *param);
DLL_EXPORT void taos_fetch_rows_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, int numOfRows), void *param);
-//DLL_EXPORT void taos_fetch_row_a(TAOS_RES *res, void (*fp)(void *param, TAOS_RES *, TAOS_ROW row), void *param);
typedef void (*TAOS_SUBSCRIBE_CALLBACK)(TAOS_SUB* tsub, TAOS_RES *res, void* param, int code);
DLL_EXPORT TAOS_SUB *taos_subscribe(TAOS* taos, int restart, const char* topic, const char *sql, TAOS_SUBSCRIBE_CALLBACK fp, void *param, int interval);
diff --git a/src/kit/taosdump/taosdump.c b/src/kit/taosdump/taosdump.c
index 41394c6b0fbdb81d05c464a966b15fa122a05549..c8f880662e35448adcb71806c04a39235c053651 100644
--- a/src/kit/taosdump/taosdump.c
+++ b/src/kit/taosdump/taosdump.c
@@ -1167,7 +1167,7 @@ static int getTableDes(
while ((row = taos_fetch_row(res)) != NULL) {
tstrncpy(tableDes->cols[colCount].field,
(char *)row[TSDB_DESCRIBE_METRIC_FIELD_INDEX],
- min(TSDB_COL_NAME_LEN + 1,
+ min(TSDB_COL_NAME_LEN,
fields[TSDB_DESCRIBE_METRIC_FIELD_INDEX].bytes + 1));
tstrncpy(tableDes->cols[colCount].type,
(char *)row[TSDB_DESCRIBE_METRIC_TYPE_INDEX],
@@ -1226,7 +1226,6 @@ static int getTableDes(
int32_t* length = taos_fetch_lengths(res);
- //int32_t* length = taos_fetch_lengths(tmpResult);
switch (fields[0].type) {
case TSDB_DATA_TYPE_BOOL:
sprintf(tableDes->cols[i].value, "%d",
@@ -1261,11 +1260,11 @@ static int getTableDes(
case TSDB_DATA_TYPE_BINARY:
memset(tableDes->cols[i].value, 0,
sizeof(tableDes->cols[i].value));
- int len = strlen((char *)row[0]);
+ int len = strlen((char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
// FIXME for long value
if (len < (COL_VALUEBUF_LEN - 2)) {
converStringToReadable(
- (char *)row[0],
+ (char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
length[0],
tableDes->cols[i].value,
len);
@@ -1284,27 +1283,44 @@ static int getTableDes(
break;
case TSDB_DATA_TYPE_NCHAR:
- {
- memset(tableDes->cols[i].value, 0, sizeof(tableDes->cols[i].note));
- char tbuf[COMMAND_SIZE-2]; // need reserve 2 bytes for ' '
+ memset(tableDes->cols[i].value, 0,
+ sizeof(tableDes->cols[i].note));
+ int nlen = strlen((char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
+ if (nlen < (COL_VALUEBUF_LEN-2)) {
+ char tbuf[COL_VALUEBUF_LEN-2]; // need reserve 2 bytes for ' '
convertNCharToReadable(
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
- length[0], tbuf, COMMAND_SIZE-2);
- sprintf(tableDes->cols[i].value, "%.*s", COL_VALUEBUF_LEN-1, tbuf);
- break;
+ length[0], tbuf, COL_VALUEBUF_LEN-2);
+ sprintf(tableDes->cols[i].value, "%s", tbuf);
+ } else {
+ tableDes->cols[i].var_value = calloc(1, len * 4);
+ if (tableDes->cols[i].var_value == NULL) {
+ errorPrint("%s() LN%d, memory alalocation failed!\n",
+ __func__, __LINE__);
+ taos_free_result(res);
+ return -1;
+ }
+ converStringToReadable(
+ (char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
+ length[0],
+ (char *)(tableDes->cols[i].var_value), len);
}
+ break;
case TSDB_DATA_TYPE_TIMESTAMP:
- sprintf(tableDes->cols[i].value, "%" PRId64 "", *(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
+ sprintf(tableDes->cols[i].value, "%" PRId64 "",
+ *(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
#if 0
if (!g_args.mysqlFlag) {
- sprintf(tableDes->cols[i].value, "%" PRId64 "", *(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
+ sprintf(tableDes->cols[i].value, "%" PRId64 "",
+ *(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
} else {
char buf[64] = "\0";
int64_t ts = *((int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
time_t tt = (time_t)(ts / 1000);
struct tm *ptm = localtime(&tt);
strftime(buf, 64, "%y-%m-%d %H:%M:%S", ptm);
- sprintf(tableDes->cols[i].value, "\'%s.%03d\'", buf, (int)(ts % 1000));
+ sprintf(tableDes->cols[i].value, "\'%s.%03d\'", buf,
+ (int)(ts % 1000));
}
#endif
break;
diff --git a/src/plugins/CMakeLists.txt b/src/plugins/CMakeLists.txt
index 66d15e48ed13e1dce9a38bd2db65e9e610209e50..f2889b73daa8101200b31328b27d766734049190 100644
--- a/src/plugins/CMakeLists.txt
+++ b/src/plugins/CMakeLists.txt
@@ -10,42 +10,42 @@ IF (TD_BUILD_HTTP)
ADD_SUBDIRECTORY(http)
ELSE ()
MESSAGE("")
- MESSAGE("${Green} use blm3 as httpd ${ColourReset}")
+ MESSAGE("${Green} use taosadapter as httpd ${ColourReset}")
EXECUTE_PROCESS(
- COMMAND cd ${CMAKE_CURRENT_SOURCE_DIR}/blm3
+ COMMAND cd ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter
)
EXECUTE_PROCESS(
COMMAND git rev-parse --short HEAD
RESULT_VARIABLE commit_sha1
- OUTPUT_VARIABLE blm3_commit_sha1
+ OUTPUT_VARIABLE taosadapter_commit_sha1
)
- IF ("${blm3_commit_sha1}" STREQUAL "")
- SET(blm3_commit_sha1 "unknown")
+ IF ("${taosadapter_commit_sha1}" STREQUAL "")
+ SET(taosadapter_commit_sha1 "unknown")
ELSE ()
- STRING(SUBSTRING "${blm3_commit_sha1}" 0 7 blm3_commit_sha1)
- STRING(STRIP "${blm3_commit_sha1}" blm3_commit_sha1)
+ STRING(SUBSTRING "${taosadapter_commit_sha1}" 0 7 taosadapter_commit_sha1)
+ STRING(STRIP "${taosadapter_commit_sha1}" taosadapter_commit_sha1)
ENDIF ()
- MESSAGE("${Green} blm3 commit: ${blm3_commit_sha1} ${ColourReset}")
+ MESSAGE("${Green} taosadapter commit: ${taosadapter_commit_sha1} ${ColourReset}")
EXECUTE_PROCESS(
COMMAND cd ..
)
include(ExternalProject)
- ExternalProject_Add(blm3
- PREFIX "blm3"
- SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/blm3
+ ExternalProject_Add(taosadapter
+ PREFIX "taosadapter"
+ SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter
BUILD_ALWAYS off
DEPENDS taos
BUILD_IN_SOURCE 1
- CONFIGURE_COMMAND cmake -E echo "blm3 no need cmake to config"
+ CONFIGURE_COMMAND cmake -E echo "taosadapter no need cmake to config"
PATCH_COMMAND
COMMAND git clean -f -d
- BUILD_COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../inc CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -ldflags "-s -w -X github.com/taosdata/blm3/version.CommitID=${blm3_commit_sha1}"
+ BUILD_COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../inc CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -ldflags "-s -w -X github.com/taosdata/taosadapter/version.CommitID=${taosadapter_commit_sha1}"
INSTALL_COMMAND
- COMMAND curl -sL https://github.com/upx/upx/releases/download/v3.96/upx-3.96-amd64_linux.tar.xz -o upx.tar.xz && tar xvJf upx.tar.xz --strip-components 1 && ./upx blm3
- COMMAND cmake -E copy blm3 ${CMAKE_BINARY_DIR}/build/bin
+ COMMAND curl -sL https://github.com/upx/upx/releases/download/v3.96/upx-3.96-amd64_linux.tar.xz -o upx.tar.xz && tar xvJf upx.tar.xz --strip-components 1 && ./upx taosadapter
+ COMMAND cmake -E copy taosadapter ${CMAKE_BINARY_DIR}/build/bin
COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/test/cfg/
- COMMAND cmake -E copy ./example/config/blm.toml ${CMAKE_BINARY_DIR}/test/cfg/
- COMMAND cmake -E copy ./blm3.service ${CMAKE_BINARY_DIR}/test/cfg/
+ COMMAND cmake -E copy ./example/config/taosadapter.toml ${CMAKE_BINARY_DIR}/test/cfg/
+ COMMAND cmake -E copy ./taosadapter.service ${CMAKE_BINARY_DIR}/test/cfg/
)
ENDIF ()
diff --git a/src/plugins/blm3 b/src/plugins/blm3
deleted file mode 160000
index 598cb96ee60ec6a16c5b8b07ea8ca9748799e7e1..0000000000000000000000000000000000000000
--- a/src/plugins/blm3
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 598cb96ee60ec6a16c5b8b07ea8ca9748799e7e1
diff --git a/src/plugins/taosadapter b/src/plugins/taosadapter
new file mode 160000
index 0000000000000000000000000000000000000000..6397bf5963f62f0aa5c4b9b961b16ed5c62579f1
--- /dev/null
+++ b/src/plugins/taosadapter
@@ -0,0 +1 @@
+Subproject commit 6397bf5963f62f0aa5c4b9b961b16ed5c62579f1
diff --git a/src/query/inc/qExecutor.h b/src/query/inc/qExecutor.h
index 7867c1692d48548e4aea720bf8af9d67bd25ed9a..ed54723adeafdcd3cdff8b438d2f823a73a04a33 100644
--- a/src/query/inc/qExecutor.h
+++ b/src/query/inc/qExecutor.h
@@ -355,16 +355,16 @@ enum OPERATOR_TYPE_E {
typedef struct SOperatorInfo {
uint8_t operatorType;
- bool blockingOptr; // block operator or not
- uint8_t status; // denote if current operator is completed
- int32_t numOfOutput; // number of columns of the current operator results
- char *name; // name, used to show the query execution plan
- void *info; // extension attribution
+ bool blockingOptr; // block operator or not
+ uint8_t status; // denote if current operator is completed
+ int32_t numOfOutput; // number of columns of the current operator results
+ char *name; // name, used to show the query execution plan
+ void *info; // extension attribution
SExprInfo *pExpr;
SQueryRuntimeEnv *pRuntimeEnv;
- struct SOperatorInfo **upstream; // upstream pointer list
- int32_t numOfUpstream; // number of upstream. The value is always ONE expect for join operator
+ struct SOperatorInfo **upstream; // upstream pointer list
+ int32_t numOfUpstream; // number of upstream. The value is always ONE expect for join operator
__operator_fn_t exec;
__optr_cleanup_fn_t cleanup;
} SOperatorInfo;
diff --git a/src/query/src/qAggMain.c b/src/query/src/qAggMain.c
index 3240368aa8c49db8377a59a26f2ca5518cd1dd99..a65798fcce3cbf5e03ac10e550a55d43abaa1703 100644
--- a/src/query/src/qAggMain.c
+++ b/src/query/src/qAggMain.c
@@ -3208,7 +3208,14 @@ static void deriv_function(SQLFunctionCtx *pCtx) {
default:
qError("error input type");
}
-
+ if (notNullElems > 0) {
+ for (int t = 0; t < pCtx->tagInfo.numOfTagCols; ++t) {
+ SQLFunctionCtx* tagCtx = pCtx->tagInfo.pTagCtxList[t];
+ if (tagCtx->functionId == TSDB_FUNC_TAG_DUMMY) {
+ aAggs[TSDB_FUNC_TAGPRJ].xFunction(tagCtx);
+ }
+ }
+ }
GET_RES_INFO(pCtx)->numOfRes += notNullElems;
}
@@ -3383,6 +3390,12 @@ static void diff_function(SQLFunctionCtx *pCtx) {
*/
assert(pCtx->hasNull);
} else {
+ for (int t = 0; t < pCtx->tagInfo.numOfTagCols; ++t) {
+ SQLFunctionCtx* tagCtx = pCtx->tagInfo.pTagCtxList[t];
+ if (tagCtx->functionId == TSDB_FUNC_TAG_DUMMY) {
+ aAggs[TSDB_FUNC_TAGPRJ].xFunction(tagCtx);
+ }
+ }
int32_t forwardStep = (isFirstBlock) ? notNullElems - 1 : notNullElems;
GET_RES_INFO(pCtx)->numOfRes += forwardStep;
@@ -4758,8 +4771,6 @@ static void csum_function(SQLFunctionCtx *pCtx) {
TSKEY* pTimestamp = pCtx->ptsOutputBuf;
TSKEY* tsList = GET_TS_LIST(pCtx);
- qDebug("%p csum_function() size:%d, hasNull:%d", pCtx, pCtx->size, pCtx->hasNull);
-
for (; i < pCtx->size && i >= 0; i += step) {
char* pData = GET_INPUT_DATA(pCtx, i);
if (pCtx->hasNull && isNull(pData, pCtx->inputType)) {
@@ -4801,6 +4812,12 @@ static void csum_function(SQLFunctionCtx *pCtx) {
if (notNullElems == 0) {
assert(pCtx->hasNull);
} else {
+ for (int t = 0; t < pCtx->tagInfo.numOfTagCols; ++t) {
+ SQLFunctionCtx* tagCtx = pCtx->tagInfo.pTagCtxList[t];
+ if (tagCtx->functionId == TSDB_FUNC_TAG_DUMMY) {
+ aAggs[TSDB_FUNC_TAGPRJ].xFunction(tagCtx);
+ }
+ }
GET_RES_INFO(pCtx)->numOfRes += notNullElems;
GET_RES_INFO(pCtx)->hasResult = DATA_SET_FLAG;
}
@@ -4874,6 +4891,12 @@ static void mavg_function(SQLFunctionCtx *pCtx) {
if (notNullElems <= 0) {
assert(pCtx->hasNull);
} else {
+ for (int t = 0; t < pCtx->tagInfo.numOfTagCols; ++t) {
+ SQLFunctionCtx* tagCtx = pCtx->tagInfo.pTagCtxList[t];
+ if (tagCtx->functionId == TSDB_FUNC_TAG_DUMMY) {
+ aAggs[TSDB_FUNC_TAGPRJ].xFunction(tagCtx);
+ }
+ }
GET_RES_INFO(pCtx)->numOfRes += notNullElems;
GET_RES_INFO(pCtx)->hasResult = DATA_SET_FLAG;
}
diff --git a/src/query/src/qExecutor.c b/src/query/src/qExecutor.c
index 5b82c7d5ef64c8df15844d185666021b93d7fdf5..d77ef2e0ff6f9037fe7f68405fdf5c395f401911 100644
--- a/src/query/src/qExecutor.c
+++ b/src/query/src/qExecutor.c
@@ -4263,7 +4263,7 @@ static int32_t doCopyToSDataBlock(SQueryRuntimeEnv* pRuntimeEnv, SGroupResInfo*
int32_t start = 0;
int32_t step = -1;
- qDebug("QInfo:0x%"PRIx64" start to copy data from windowResInfo to output buf", GET_QID(pRuntimeEnv));
+ qDebug("QInfo:0x%"PRIx64" start to copy data from resultrowInfo to output buf", GET_QID(pRuntimeEnv));
assert(orderType == TSDB_ORDER_ASC || orderType == TSDB_ORDER_DESC);
if (orderType == TSDB_ORDER_ASC) {
@@ -5252,7 +5252,6 @@ SOperatorInfo* createTableScanOperator(void* pTsdbQueryHandle, SQueryRuntimeEnv*
pInfo->reverseTimes = 0;
pInfo->order = pRuntimeEnv->pQueryAttr->order.order;
pInfo->current = 0;
-// pInfo->prevGroupId = -1;
SOperatorInfo* pOperator = calloc(1, sizeof(SOperatorInfo));
pOperator->name = "TableScanOperator";
diff --git a/src/query/src/qUtil.c b/src/query/src/qUtil.c
index 3b5f6a9d439f827da66cf829050b4e1d4440d69d..a150f3a717afaa0ddd79a33a9c8be5285c327574 100644
--- a/src/query/src/qUtil.c
+++ b/src/query/src/qUtil.c
@@ -448,7 +448,9 @@ int32_t tsDescOrder(const void* p1, const void* p2) {
}
}
-void orderTheResultRows(SQueryRuntimeEnv* pRuntimeEnv) {
+void
+
+orderTheResultRows(SQueryRuntimeEnv* pRuntimeEnv) {
__compar_fn_t fn = NULL;
if (pRuntimeEnv->pQueryAttr->order.order == TSDB_ORDER_ASC) {
fn = tsAscOrder;
diff --git a/src/tsdb/src/tsdbRead.c b/src/tsdb/src/tsdbRead.c
index 9ae73f9b92a5ef04c4b57c34ffa166e939b572e9..8a902851f6db75b4e82f366e892fa1123cea6bf9 100644
--- a/src/tsdb/src/tsdbRead.c
+++ b/src/tsdb/src/tsdbRead.c
@@ -3452,9 +3452,12 @@ void filterPrepare(void* expr, void* param) {
int dummy = -1;
SHashObj *pObj = NULL;
if (pInfo->sch.colId == TSDB_TBNAME_COLUMN_INDEX) {
- pObj = taosHashInit(256, taosGetDefaultHashFunction(pInfo->sch.type), true, false);
SArray *arr = (SArray *)(pCond->arr);
- for (size_t i = 0; i < taosArrayGetSize(arr); i++) {
+
+ size_t size = taosArrayGetSize(arr);
+ pObj = taosHashInit(size * 2, taosGetDefaultHashFunction(pInfo->sch.type), true, false);
+
+ for (size_t i = 0; i < size; i++) {
char* p = taosArrayGetP(arr, i);
strntolower_s(varDataVal(p), varDataVal(p), varDataLen(p));
taosHashPut(pObj, varDataVal(p), varDataLen(p), &dummy, sizeof(dummy));
@@ -3462,12 +3465,14 @@ void filterPrepare(void* expr, void* param) {
} else {
buildFilterSetFromBinary((void **)&pObj, pCond->pz, pCond->nLen);
}
+
pInfo->q = (char *)pObj;
} else if (pCond != NULL) {
uint32_t size = pCond->nLen * TSDB_NCHAR_SIZE;
if (size < (uint32_t)pSchema->bytes) {
size = pSchema->bytes;
}
+
// to make sure tonchar does not cause invalid write, since the '\0' needs at least sizeof(wchar_t) space.
pInfo->q = calloc(1, size + TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE);
tVariantDump(pCond, pInfo->q, pSchema->type, true);
@@ -3615,7 +3620,7 @@ SArray* createTableGroup(SArray* pTableList, STSchema* pTagSchema, SColIndex* pC
return pTableGroup;
}
-int32_t tsdbQuerySTableByTagCond(STsdbRepo* tsdb, uint64_t uid, TSKEY skey, const char* pTagCond, size_t len,
+int32_t tsdbQuerySTableByTagCond(STsdbRepo* tsdb, uint64_t uid, TSKEY skey, const char* pTagCond, size_t len,
STableGroupInfo* pGroupInfo, SColIndex* pColIndex, int32_t numOfCols) {
if (tsdbRLockRepoMeta(tsdb) < 0) goto _error;
@@ -3677,19 +3682,19 @@ int32_t tsdbQuerySTableByTagCond(STsdbRepo* tsdb, uint64_t uid, TSKEY skey, cons
} END_TRY
void *filterInfo = NULL;
-
+
ret = filterInitFromTree(expr, &filterInfo, 0);
if (ret != TSDB_CODE_SUCCESS) {
terrno = ret;
goto _error;
}
-
+
tsdbQueryTableList(pTable, res, filterInfo);
filterFreeInfo(filterInfo);
tExprTreeDestroy(expr, NULL);
-
+
pGroupInfo->numOfTables = (uint32_t)taosArrayGetSize(res);
pGroupInfo->pGroupList = createTableGroup(res, pTagSchema, pColIndex, numOfCols, skey);
@@ -3876,7 +3881,7 @@ void tsdbDestroyTableGroup(STableGroupInfo *pGroupList) {
static FORCE_INLINE int32_t tsdbGetTagDataFromId(void *param, int32_t id, void **data) {
STable* pTable = (STable*)(SL_GET_NODE_DATA((SSkipListNode *)param));
-
+
if (id == TSDB_TBNAME_COLUMN_INDEX) {
*data = TABLE_NAME(pTable);
} else {
@@ -3909,7 +3914,7 @@ static void queryIndexedColumn(SSkipList* pSkipList, void* filterInfo, SArray* r
iter = tSkipListCreateIterFromVal(pSkipList, startVal, pSkipList->type, TSDB_ORDER_DESC);
FILTER_CLR_FLAG(order, TSDB_ORDER_DESC);
}
-
+
while (tSkipListIterNext(iter)) {
SSkipListNode *pNode = tSkipListIterGet(iter);
@@ -3918,7 +3923,7 @@ static void queryIndexedColumn(SSkipList* pSkipList, void* filterInfo, SArray* r
filterSetColFieldData(filterInfo, pNode, tsdbGetTagDataFromId);
all = filterExecute(filterInfo, 1, &addToResult, NULL, 0);
}
-
+
char *pData = SL_GET_NODE_DATA(pNode);
tsdbDebug("filter index column, table:%s, result:%d", ((STable *)pData)->name->data, all);
@@ -3950,7 +3955,7 @@ static void queryIndexlessColumn(SSkipList* pSkipList, void* filterInfo, SArray*
SSkipListNode *pNode = tSkipListIterGet(iter);
filterSetColFieldData(filterInfo, pNode, tsdbGetTagDataFromId);
-
+
char *pData = SL_GET_NODE_DATA(pNode);
bool all = filterExecute(filterInfo, 1, &addToResult, NULL, 0);
@@ -3958,7 +3963,7 @@ static void queryIndexlessColumn(SSkipList* pSkipList, void* filterInfo, SArray*
if (all || (addToResult && *addToResult)) {
STableKeyInfo info = {.pTable = (void*)pData, .lastKey = TSKEY_INITIAL_VAL};
taosArrayPush(res, &info);
- }
+ }
}
tfree(addToResult);
@@ -3971,9 +3976,9 @@ static int32_t tsdbQueryTableList(STable* pTable, SArray* pRes, void* filterInfo
STSchema* pTSSchema = pTable->tagSchema;
bool indexQuery = false;
SSkipList *pSkipList = pTable->pIndex;
-
+
filterIsIndexedColumnQuery(filterInfo, pTSSchema->columns->colId, &indexQuery);
-
+
if (indexQuery) {
queryIndexedColumn(pSkipList, filterInfo, pRes);
} else {
diff --git a/tests/pytest/query/query.py b/tests/pytest/query/query.py
index 1d9d6e5ea4d5c41c13222ceb4e23b165f0062837..8e79fc5f686d77aa276da5bca7d9493ff1a00ffb 100644
--- a/tests/pytest/query/query.py
+++ b/tests/pytest/query/query.py
@@ -149,6 +149,22 @@ class TDTestCase:
tdLog.info("case for bug_6387")
self.bug_6387()
+ #JIRA TS-583
+ tdLog.info("case for JIRA TS-583")
+ tdSql.execute("create database test2")
+ tdSql.execute("use test2")
+ tdSql.execute("create table stb(ts timestamp, c1 int) tags(t1 binary(120))")
+ tdSql.execute("create table t0 using stb tags('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz')")
+
+ tdSql.query("show create table t0")
+ tdSql.checkRows(1)
+
+ tdSql.execute("create table stb2(ts timestamp, c1 int) tags(t1 nchar(120))")
+ tdSql.execute("create table t1 using stb2 tags('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz')")
+
+ tdSql.query("show create table t1")
+ tdSql.checkRows(1)
+
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
diff --git a/tests/pytest/util/dnodes.py b/tests/pytest/util/dnodes.py
index 2d854643b8a2980bf38d4aacc3c20ab8843abdf8..55c964c2557eff3204cf31bfb63cd5e3f3dd5501 100644
--- a/tests/pytest/util/dnodes.py
+++ b/tests/pytest/util/dnodes.py
@@ -275,7 +275,7 @@ class TDDnode:
tdLog.info("taosd found in %s" % buildPath)
binPath = buildPath + "/build/bin/taosd"
- blm3BinPath = buildPath + "/build/bin/blm3"
+ taosadapterBinPath = buildPath + "/build/bin/taosadapter"
if self.deployed == 0:
tdLog.exit("dnode:%d is not deployed" % (self.index))
@@ -291,10 +291,10 @@ class TDDnode:
print(cmd)
- blm3Cmd = "nohup %s > /dev/null 2>&1 & " % (
- blm3BinPath)
- if os.system(blm3Cmd) != 0:
- tdLog.exit(blm3Cmd)
+ taosadapterCmd = "nohup %s > /dev/null 2>&1 & " % (
+ taosadapterBinPath)
+ if os.system(taosadapterCmd) != 0:
+ tdLog.exit(taosadapterCmd)
if os.system(cmd) != 0:
tdLog.exit(cmd)
@@ -340,7 +340,7 @@ class TDDnode:
tdLog.info("taosd found in %s" % buildPath)
binPath = buildPath + "/build/bin/taosd"
- blm3BinPath = buildPath + "/build/bin/blm3"
+ taosadapterBinPath = buildPath + "/build/bin/taosadapter"
if self.deployed == 0:
tdLog.exit("dnode:%d is not deployed" % (self.index))
@@ -356,9 +356,9 @@ class TDDnode:
print(cmd)
- blm3Cmd = "%s > /dev/null 2>&1 & " % (blm3BinPath)
- if os.system(blm3Cmd) != 0:
- tdLog.exit(blm3Cmd)
+ taosadapterCmd = "%s > /dev/null 2>&1 & " % (taosadapterBinPath)
+ if os.system(taosadapterCmd) != 0:
+ tdLog.exit(taosadapterCmd)
if os.system(cmd) != 0:
tdLog.exit(cmd)
@@ -366,18 +366,18 @@ class TDDnode:
tdLog.debug("dnode:%d is running with %s " % (self.index, cmd))
def stop(self):
- blm3ToBeKilled = "blm3"
+ taosadapterToBeKilled = "taosadapter"
- blm3PsCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}'" % blm3ToBeKilled
- blm3ProcessID = subprocess.check_output(
- blm3PsCmd, shell=True).decode("utf-8")
+ taosadapterPsCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}'" % taosadapterToBeKilled
+ taosadapterProcessID = subprocess.check_output(
+ taosadapterPsCmd, shell=True).decode("utf-8")
- while(blm3ProcessID):
- blm3KillCmd = "kill -INT %s > /dev/null 2>&1" % blm3ProcessID
- os.system(blm3KillCmd)
+ while(taosadapterProcessID):
+ taosadapterKillCmd = "kill -INT %s > /dev/null 2>&1" % taosadapterProcessID
+ os.system(taosadapterKillCmd)
time.sleep(1)
- blm3ProcessID = subprocess.check_output(
- blm3PsCmd, shell=True).decode("utf-8")
+ taosadapterProcessID = subprocess.check_output(
+ taosadapterPsCmd, shell=True).decode("utf-8")
if self.valgrind == 0:
toBeKilled = "taosd"
diff --git a/tests/script/general/compute/csum.sim b/tests/script/general/compute/csum.sim
index 4d6f748566fdfedc3b6ac2ccf5fa6a22c7a5340f..b350e4f403a02702741e0f10ab91fb9799e776d3 100644
--- a/tests/script/general/compute/csum.sim
+++ b/tests/script/general/compute/csum.sim
@@ -100,6 +100,76 @@ if $data11 != -2 then
return -1
endi
+print ==========>TD10758
+sql create stable st(ts timestamp, c1 int) tags(t int);
+sql create table ct1 using st tags(1)
+sql insert into ct1 values(now, 1)(now+1s, 2)(now+2s, 3)
+sql select csum(c1),ts,tbname,t from ct1
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select csum(c1),ts,tbname,t from st group by tbname
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select diff(c1),ts,tbname,t from ct1
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select diff(c1),ts,tbname,t from st group by tbname
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select mavg(c1,2),ts,tbname,t from ct1
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select mavg(c1,2),ts,tbname,t from st group by tbname
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select derivative(c1,1s,0),ts,tbname,t from ct1
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+sql select derivative(c1,1s,0),ts,tbname,t from st group by tbname
+print $data10 , $data11 , $data12, $data13, $data14
+if $data13 != ct1 then
+ return -1
+endi
+if $data14 != 1 then
+ return -1
+endi
+
+
print =============== clear
sql drop database $db
sql show databases