diff --git a/Jenkinsfile b/Jenkinsfile
index 35a2bf82606313fe015457cda3a6a57c23e2ef4d..03af9ba24408deb9bfa1a5baa1e924b262ccbd77 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -367,6 +367,7 @@ pipeline {
else{
sh'''
cd ${WKC}
+ git reset --hard HEAD~10
git fetch
git checkout ${CHANGE_BRANCH}
git pull
diff --git a/cmake/define.inc b/cmake/define.inc
index 9e39dc9463f63c935ec67dbb712377dc3d0ac96f..00fd015b4ccfc5e06e40f963d8f19598b071a74e 100755
--- a/cmake/define.inc
+++ b/cmake/define.inc
@@ -136,7 +136,6 @@ IF (TD_ALPINE)
MESSAGE(STATUS "aplhine is defined")
ENDIF ()
-
IF ("${BUILD_HTTP}" STREQUAL "")
IF (TD_LINUX)
IF (TD_ARM_32)
@@ -153,6 +152,9 @@ ELSEIF (${BUILD_HTTP} MATCHES "false")
SET(TD_BUILD_HTTP FALSE)
ELSEIF (${BUILD_HTTP} MATCHES "true")
SET(TD_BUILD_HTTP TRUE)
+ELSEIF (${BUILD_HTTP} MATCHES "internal")
+ SET(TD_BUILD_HTTP FALSE)
+ SET(TD_BUILD_TAOSA_INTERNAL TRUE)
ELSE ()
SET(TD_BUILD_HTTP TRUE)
ENDIF ()
diff --git a/deps/jemalloc b/deps/jemalloc
index 9015e129bd7de389afa4196495451669700904d0..ea6b3e973b477b8061e0076bb257dbd7f3faa756 160000
--- a/deps/jemalloc
+++ b/deps/jemalloc
@@ -1 +1 @@
-Subproject commit 9015e129bd7de389afa4196495451669700904d0
+Subproject commit ea6b3e973b477b8061e0076bb257dbd7f3faa756
diff --git a/documentation20/cn/03.architecture/docs.md b/documentation20/cn/03.architecture/docs.md
index 5eafea00c8ca84dff466d835e3016d5818e2a1d5..84a3e45014eb08f0e1b2333f5478db61e5a784e7 100644
--- a/documentation20/cn/03.architecture/docs.md
+++ b/documentation20/cn/03.architecture/docs.md
@@ -210,7 +210,7 @@ TDengine 分布式架构的逻辑结构图如下:
![TDengine典型的操作流程](../images/architecture/message.png)
图 2 TDengine 典型的操作流程
-1. 应用通过 JDBC、ODBC 或其他API接口发起插入数据的请求。
+1. 应用通过 JDBC 或其他API接口发起插入数据的请求。
2. taosc 会检查缓存,看是否保存有该表的 meta data。如果有,直接到第 4 步。如果没有,taosc 将向 mnode 发出 get meta-data 请求。
3. mnode 将该表的 meta-data 返回给 taosc。Meta-data 包含有该表的 schema, 而且还有该表所属的 vgroup信息(vnode ID 以及所在的 dnode 的 End Point,如果副本数为 N,就有 N 组 End Point)。如果 taosc 迟迟得不到 mnode 回应,而且存在多个 mnode, taosc 将向下一个 mnode 发出请求。
4. taosc 向 master vnode 发起插入请求。
@@ -301,20 +301,6 @@ Master Vnode 遵循下面的写入流程:
与 master vnode 相比,slave vnode 不存在转发环节,也不存在回复确认环节,少了两步。但写内存与 WAL 是完全一样的。
-### 异地容灾、IDC迁移
-
-从上述 master 和 slave 流程可以看出,TDengine 采用的是异步复制的方式进行数据同步。这种方式能够大幅提高写入性能,网络延时对写入速度不会有大的影响。通过配置每个物理节点的IDC和机架号,可以保证对于一个虚拟节点组,虚拟节点由来自不同 IDC、不同机架的物理节点组成,从而实现异地容灾。因此 TDengine 原生支持异地容灾,无需再使用其他工具。
-
-另一方面,TDengine 支持动态修改副本数,一旦副本数增加,新加入的虚拟节点将立即进入数据同步流程,同步结束后,新加入的虚拟节点即可提供服务。而在同步过程中,master 以及其他已经同步的虚拟节点都可以对外提供服务。利用这一特性,TDengine 可以实现无服务中断的 IDC 机房迁移。只需要将新 IDC 的物理节点加入现有集群,等数据同步完成后,再将老的 IDC 的物理节点从集群中剔除即可。
-
-但是,这种异步复制的方式,存在极小的时间窗口,丢失写入的数据。具体场景如下:
-
-1. master vnode 完成了它的 5 步操作,已经给 APP 确认写入成功,然后宕机
-2. slave vnode 收到写入请求后,在第 2 步写入日志之前,处理失败
-3. slave vnode 将成为新的 master,从而丢失了一条记录
-
-理论上,只要是异步复制,就无法保证 100% 不丢失。但是这个窗口极小,master 与 slave 要同时发生故障,而且发生在刚给应用确认写入成功之后。
-
### 主从选择
Vnode 会保持一个数据版本号(version),对内存数据进行持久化存储时,对该版本号也进行持久化存储。每个数据更新操作,无论是采集的时序数据还是元数据,这个版本号将增加 1。
diff --git a/documentation20/cn/08.connector/01.java/docs.md b/documentation20/cn/08.connector/01.java/docs.md
index dd3de6b0171212509c730364651af023dc50681d..f7d002bac4727cd58ea26e7fd201bcac26a2846f 100644
--- a/documentation20/cn/08.connector/01.java/docs.md
+++ b/documentation20/cn/08.connector/01.java/docs.md
@@ -53,33 +53,38 @@ INSERT INTO test.t1 USING test.weather (ts, temperature) TAGS('beijing') VALUES(
## TAOS-JDBCDriver 版本以及支持的 TDengine 版本和 JDK 版本
-| taos-jdbcdriver 版本 | TDengine 版本 | JDK 版本 |
-| -------------------- | ----------------- | -------- |
-| 2.0.33 - 2.0.34 | 2.0.3.0 及以上 | 1.8.x |
-| 2.0.31 - 2.0.32 | 2.1.3.0 及以上 | 1.8.x |
+| taos-jdbcdriver 版本 | TDengine 版本 | JDK 版本 |
+|--------------------|--------------------| -------- |
+| 2.0.36 | 2.4.0 及以上 | 1.8.x |
+| 2.0.35 | 2.3.0 及以上 | 1.8.x |
+| 2.0.33 - 2.0.34 | 2.0.3.0 及以上 | 1.8.x |
+| 2.0.31 - 2.0.32 | 2.1.3.0 及以上 | 1.8.x |
| 2.0.22 - 2.0.30 | 2.0.18.0 - 2.1.2.x | 1.8.x |
-| 2.0.12 - 2.0.21 | 2.0.8.0 - 2.0.17.x | 1.8.x |
-| 2.0.4 - 2.0.11 | 2.0.0.0 - 2.0.7.x | 1.8.x |
-| 1.0.3 | 1.6.1.x 及以上 | 1.8.x |
-| 1.0.2 | 1.6.1.x 及以上 | 1.8.x |
-| 1.0.1 | 1.6.1.x 及以上 | 1.8.x |
+| 2.0.12 - 2.0.21 | 2.0.8.0 - 2.0.17.x | 1.8.x |
+| 2.0.4 - 2.0.11 | 2.0.0.0 - 2.0.7.x | 1.8.x |
+| 1.0.3 | 1.6.1.x 及以上 | 1.8.x |
+| 1.0.2 | 1.6.1.x 及以上 | 1.8.x |
+| 1.0.1 | 1.6.1.x 及以上 | 1.8.x |
## TDengine DataType 和 Java DataType
TDengine 目前支持时间戳、数字、字符、布尔类型,与 Java 对应类型转换如下:
| TDengine DataType | JDBCType (driver 版本 < 2.0.24) | JDBCType (driver 版本 >= 2.0.24) |
-| ----------------- | ------------------ | ------------------ |
-| TIMESTAMP | java.lang.Long | java.sql.Timestamp |
-| INT | java.lang.Integer | java.lang.Integer |
-| BIGINT | java.lang.Long | java.lang.Long |
-| FLOAT | java.lang.Float | java.lang.Float |
-| DOUBLE | java.lang.Double | java.lang.Double |
-| SMALLINT | java.lang.Short | java.lang.Short |
-| TINYINT | java.lang.Byte | java.lang.Byte |
-| BOOL | java.lang.Boolean | java.lang.Boolean |
-| BINARY | java.lang.String | byte array |
-| NCHAR | java.lang.String | java.lang.String |
+|-------------------|-------------------------------| ------------------ |
+| TIMESTAMP | java.lang.Long | java.sql.Timestamp |
+| INT | java.lang.Integer | java.lang.Integer |
+| BIGINT | java.lang.Long | java.lang.Long |
+| FLOAT | java.lang.Float | java.lang.Float |
+| DOUBLE | java.lang.Double | java.lang.Double |
+| SMALLINT | java.lang.Short | java.lang.Short |
+| TINYINT | java.lang.Byte | java.lang.Byte |
+| BOOL | java.lang.Boolean | java.lang.Boolean |
+| BINARY | java.lang.String | byte array |
+| NCHAR | java.lang.String | java.lang.String |
+| JSON | - | java.lang.String |
+
+注意:JSON类型仅在tag中支持。
## 安装Java Connector
@@ -800,17 +805,16 @@ Query OK, 1 row(s) in set (0.000141s)
请参考:[JDBC example](https://github.com/taosdata/TDengine/tree/develop/tests/examples/JDBC)
## 常见问题
-
+* 使用Statement的addBatch和executeBatch来执行“批量写入/更行”,为什么没有带来性能上的提升?
+ **原因**:TDengine的JDBC实现中,通过addBatch方法提交的sql语句,会按照添加的顺序,依次执行,这种方式没有减少与服务端的交互次数,不会带来性能上的提升。
+ **解决方法**:1. 在一条insert语句中拼接多个values值;2. 使用多线程的方式并发插入;3. 使用参数绑定的写入方式
+
* java.lang.UnsatisfiedLinkError: no taos in java.library.path
-
**原因**:程序没有找到依赖的本地函数库 taos。
-
**解决方法**:Windows 下可以将 C:\TDengine\driver\taos.dll 拷贝到 C:\Windows\System32\ 目录下,Linux 下将建立如下软链 `ln -s /usr/local/taos/driver/libtaos.so.x.x.x.x /usr/lib/libtaos.so` 即可。
* java.lang.UnsatisfiedLinkError: taos.dll Can't load AMD 64 bit on a IA 32-bit platform
-
**原因**:目前 TDengine 只支持 64 位 JDK。
-
**解决方法**:重新安装 64 位 JDK。
* 其它问题请参考 [Issues](https://github.com/taosdata/TDengine/issues)
diff --git a/documentation20/cn/08.connector/docs.md b/documentation20/cn/08.connector/docs.md
index ee5d8e9f825e12bd65331736ecb23db62e5fe388..ecd9770e6a52ce06440d4788daaa527b194b5fef 100644
--- a/documentation20/cn/08.connector/docs.md
+++ b/documentation20/cn/08.connector/docs.md
@@ -749,6 +749,49 @@ conn.execute("drop database pytest")
conn.close()
```
+#### JSON 类型
+
+从 `taospy` `v2.2.0` 开始,Python连接器开始支持 JSON 数据类型的标签(TDengine版本要求 Beta 版 2.3.5+, 稳定版 2.4.0+)。
+
+创建一个使用JSON类型标签的超级表及其子表:
+
+```python
+# encoding:UTF-8
+import taos
+
+conn = taos.connect()
+conn.execute("create database if not exists py_test_json_type")
+conn.execute("use py_test_json_type")
+
+conn.execute("create stable s1 (ts timestamp, v1 int) tags (info json)")
+conn.execute("create table s1_1 using s1 tags ('{\"k1\": \"v1\"}')")
+```
+
+查询子表标签及表名:
+
+```python
+tags = conn.query("select info, tbname from s1").fetch_all_into_dict()
+tags
+```
+
+`tags` 内容为:
+
+```python
+[{'info': '{"k1":"v1"}', 'tbname': 's1_1'}]
+```
+
+获取 JSON 中某值:
+
+```python
+k1 = conn.query("select info->'k1' as k1 from s1").fetch_all_into_dict()
+"""
+>>> k1
+[{'k1': '"v1"'}]
+"""
+```
+
+更多JSON类型的操作方式请参考 [JSON 类型使用说明](https://www.taosdata.com/cn/documentation/taos-sql)。
+
#### 关于纳秒 (nanosecond) 在 Python 连接器中的说明
由于目前 Python 对 nanosecond 支持的不完善(参见链接 1. 2. ),目前的实现方式是在 nanosecond 精度时返回整数,而不是 ms 和 us 返回的 datetime 类型,应用开发者需要自行处理,建议使用 pandas 的 to_datetime()。未来如果 Python 正式完整支持了纳秒,涛思数据可能会修改相关接口。
diff --git a/documentation20/cn/09.connections/docs.md b/documentation20/cn/09.connections/docs.md
index d79d07661b95f8a807bff226185d3804e0ce0f4d..69825e655940045669fedeafdc9ab709c7ed15d9 100644
--- a/documentation20/cn/09.connections/docs.md
+++ b/documentation20/cn/09.connections/docs.md
@@ -94,78 +94,3 @@ GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=tdengine-datasource
在 2.3.3.0 及以上版本,您可以导入 TDinsight Dashboard (Grafana Dashboard ID: [15167](https://grafana.com/grafana/dashboards/15167)) 作为 TDengine 集群的监控可视化工具。安装和使用说明请见 [TDinsight 用户手册](https://www.taosdata.com/cn/documentation/tools/insight)。
-## MATLAB
-
-MATLAB 可以通过安装包内提供的 JDBC Driver 直接连接到 TDengine 获取数据到本地工作空间。
-
-### MATLAB 的 JDBC 接口适配
-
-MATLAB 的适配有下面几个步骤,下面以 Windows 10 上适配 MATLAB2021a 为例:
-
-- 将 TDengine 客户端安装路径下的 `\TDengine\connector\jdbc的驱动程序taos-jdbcdriver-2.0.25-dist.jar` 拷贝到 `${matlab_root}\MATLAB\R2021a\java\jar\toolbox`。
-- 将 TDengine 安装包内的 `taos.lib` 文件拷贝至 `${matlab_root_dir}\MATLAB\R2021\lib\win64`。
-- 将新添加的驱动 jar 包加入 MATLAB 的 classpath。在 `${matlab_root_dir}\MATLAB\R2021a\toolbox\local\classpath.txt` 文件中添加下面一行:
-```
-$matlabroot/java/jar/toolbox/taos-jdbcdriver-2.0.25-dist.jar
-```
-- 在 `${user_home}\AppData\Roaming\MathWorks\MATLAB\R2021a\` 下添加一个文件 `javalibrarypath.txt`,并在该文件中添加 taos.dll 的路径,比如您的 taos.dll 是在安装时拷贝到了 `C:\Windows\System32` 下,那么就应该在 `javalibrarypath.txt` 中添加如下一行:
-```
-C:\Windows\System32
-```
-
-### 在 MATLAB 中连接 TDengine 获取数据
-
-在成功进行了上述配置后,打开 MATLAB。
-
-- 创建一个连接:
-```matlab
-conn = database(‘test’, ‘root’, ‘taosdata’, ‘com.taosdata.jdbc.TSDBDriver’, ‘jdbc:TSDB://192.168.1.94:6030/’)
-```
-- 执行一次查询:
-```matlab
-sql0 = [‘select * from tb’]
-data = select(conn, sql0);
-```
-- 插入一条记录:
-```matlab
-sql1 = [‘insert into tb values (now, 1)’]
-exec(conn, sql1)
-```
-
-更多例子细节请参考安装包内 `examples\Matlab\TDengineDemo.m` 文件。
-
-## R
-
-R语言支持通过JDBC接口来连接TDengine数据库。首先需要安装R语言的JDBC包。启动R语言环境,然后执行以下命令安装R语言的JDBC支持库:
-
-```R
-install.packages('RJDBC', repos='http://cran.us.r-project.org')
-```
-
-安装完成以后,通过执行`library('RJDBC')`命令加载 _RJDBC_ 包:
-
-然后加载TDengine的JDBC驱动:
-
-```R
-drv<-JDBC("com.taosdata.jdbc.TSDBDriver","JDBCDriver-2.0.0-dist.jar", identifier.quote="\"")
-```
-如果执行成功,不会出现任何错误信息。之后通过以下命令尝试连接数据库:
-
-```R
-conn<-dbConnect(drv,"jdbc:TSDB://192.168.0.1:0/?user=root&password=taosdata","root","taosdata")
-```
-
-注意将上述命令中的IP地址替换成正确的IP地址。如果没有任务错误的信息,则连接数据库成功,否则需要根据错误提示调整连接的命令。TDengine支持以下的 _RJDBC_ 包中函数:
-
-
-- dbWriteTable(conn, "test", iris, overwrite=FALSE, append=TRUE):将数据框iris写入表test中,overwrite必须设置为false,append必须设为TRUE,且数据框iris要与表test的结构一致。
-- dbGetQuery(conn, "select count(*) from test"):查询语句。
-- dbSendUpdate(conn, "use db"):执行任何非查询sql语句。例如dbSendUpdate(conn, "use db"), 写入数据dbSendUpdate(conn, "insert into t1 values(now, 99)")等。
-- dbReadTable(conn, "test"):读取表test中数据。
-- dbDisconnect(conn):关闭连接。
-- dbRemoveTable(conn, "test"):删除表test。
-
-TDengine客户端暂不支持如下函数:
-- dbExistsTable(conn, "test"):是否存在表test。
-- dbListTables(conn):显示连接中的所有表。
-
diff --git a/documentation20/cn/11.administrator/docs.md b/documentation20/cn/11.administrator/docs.md
index 7b7b2262d470f5226eef780a9971894a65663579..b24b432fd4eee893b077a8a85306bfa9642851f5 100644
--- a/documentation20/cn/11.administrator/docs.md
+++ b/documentation20/cn/11.administrator/docs.md
@@ -222,7 +222,11 @@ taosd -C
| 104 | maxWildCardsLength | | **C** | bytes | 设定 LIKE 算子的通配符字符串允许的最大长度 | 0-16384 | 100 | 2.1.6.1 版本新增。 |
| 105 | compressColData | | **S** | bytes | 客户端与服务器之间进行消息通讯过程中,对服务器端查询结果进行列压缩的阈值。 | 0: 对所有查询结果均进行压缩 >0: 查询结果中任意列大小超过该值的消息才进行压缩 -1: 不压缩 | -1 | 2.3.0.0 版本新增。 |
| 106 | tsdbMetaCompactRatio | | **C** | | tsdb meta文件中冗余数据超过多少阈值,开启meta文件的压缩功能 | 0:不开启,[1-100]:冗余数据比例 | 0 | |
-| 107 | rpcForceTcp | | **SC**| | 强制使用TCP传输 | 0: 不开启 1: 开启 | 0 | 在网络比较差的环境中,建议开启。2.0版本新增。|
+| 107 | rpcForceTcp | | **SC**| | 强制使用TCP传输 | 0: 不开启 1: 开启 | 0 | 在网络比较差的环境中,建议开启。2.0版本新增。|
+| 108 | maxNumOfDistinctRes | | **S**| | 允许返回的distinct结果最大行数 |默认值为10万,最大值1亿 | 10万 | 2.3版本新增。|
+| 109 | clientMerge | | **C**| | 是否允许客户端对写入数据去重 |0:不开启,1:开启| 0 | 2.3版本新增。|
+| 110 | httpDBNameMandatory | | **S**| | 是否在URL中输入 数据库名称|0:不开启,1:开启| 0 | 2.3版本新增。|
+| 111 | maxRegexStringLen | | **C**| | 正则表达式最大允许长度 |默认值128,最大长度 16384 | 128 | 2.3版本新增。|
**注意:**对于端口,TDengine会使用从serverPort起13个连续的TCP和UDP端口号,请务必在防火墙打开。因此如果是缺省配置,需要打开从6030到6042共13个端口,而且必须TCP和UDP都打开。(详细的端口情况请参见 [TDengine 2.0 端口说明](https://www.taosdata.com/cn/documentation/faq#port))
@@ -615,35 +619,6 @@ COMPACT 命令对指定的一个或多个 VGroup 启动碎片重整,系统会
需要注意的是,碎片重整操作会大幅消耗磁盘 I/O。因此在重整进行期间,有可能会影响节点的写入和查询性能,甚至在极端情况下导致短时间的阻写。
-
-## 浮点数有损压缩
-
-在车联网等物联网智能应用场景中,经常会采集和存储海量的浮点数类型数据,如果能更高效地对此类数据进行压缩,那么不但能够节省数据存储的硬件资源,也能够因降低磁盘 I/O 数据量而提升系统性能表现。
-
-从 2.1.6.0 版本开始,TDengine 提供一种名为 TSZ 的新型数据压缩算法,无论设置为有损压缩还是无损压缩,都能够显著提升浮点数类型数据的压缩率表现。目前该功能以可选模块的方式进行发布,可以通过添加特定的编译参数来启用该功能(也即常规安装包中暂未包含该功能)。
-
-**需要注意的是,该功能一旦启用,效果是全局的,也即会对系统中所有的 FLOAT、DOUBLE 类型的数据生效。同时,在启用了浮点数有损压缩功能后写入的数据,也无法被未启用该功能的版本载入,并有可能因此而导致数据库服务报错退出。**
-
-### 创建支持 TSZ 压缩算法的 TDengine 版本
-
-TSZ 模块保存在单独的代码仓库 https://github.com/taosdata/TSZ 中。可以通过以下步骤创建包含此模块的 TDengine 版本:
-1. TDengine 中的插件目前只支持通过 SSH 的方式拉取和编译,所以需要自己先配置好通过 SSH 拉取 GitHub 代码的环境。
-2. `git clone git@github.com:taosdata/TDengine -b your_branchname --recurse-submodules` 通过 `--recurse-submodules` 使依赖模块的源代码可以被一并下载。
-3. `mkdir debug && cd debug` 进入单独的编译目录。
-4. `cmake .. -DTSZ_ENABLED=true` 其中参数 `-DTSZ_ENABLED=true` 表示在编译过程中加入对 TSZ 插件功能的支持。如果成功激活对 TSZ 模块的编译,那么 CMAKE 过程中也会显示 `build with TSZ enabled` 字样。
-5. 编译成功后,包含 TSZ 浮点压缩功能的插件便已经编译进了 TDengine 中了,可以通过调整 taos.cfg 中的配置参数来使用此功能了。
-
-### 通过配置文件来启用 TSZ 压缩算法
-
-如果要启用 TSZ 压缩算法,除了在 TDengine 的编译过程需要声明启用 TSZ 模块之外,还需要在 taos.cfg 配置文件中对以下参数进行设置:
-* lossyColumns:配置要进行有损压缩的浮点数数据类型。参数值类型为字符串,含义为:空 - 关闭有损压缩;float - 只对 FLOAT 类型进行有损压缩;double - 只对 DOUBLE 类型进行有损压缩;float|double:对 FLOAT 和 DOUBLE 类型都进行有损压缩。默认值是“空”,也即关闭有损压缩。
-* fPrecision:设置 float 类型浮点数压缩精度,小于此值的浮点数尾数部分将被截断。参数值类型为 FLOAT,最小值为 0.0,最大值为 100,000.0。缺省值为 0.00000001(1E-8)。
-* dPrecision:设置 double 类型浮点数压缩精度,小于此值的浮点数尾数部分将被截断。参数值类型为 DOUBLE,最小值为 0.0,最大值为 100,000.0。缺省值为 0.0000000000000001(1E-16)。
-* maxRange:表示数据的最大浮动范围。一般无需调整,在数据具有特定特征时可以配合 range 参数来实现极高的数据压缩率。默认值为 500。
-* range:表示数据大体浮动范围。一般无需调整,在数据具有特定特征时可以配合 maxRange 参数来实现极高的数据压缩率。默认值为 100。
-
-**注意:**对 cfg 配置文件中参数值的任何调整,都需要重新启动 taosd 才能生效。并且以上选项为全局配置选项,配置后对所有数据库中所有表的 FLOAT 及 DOUBLE 类型的字段生效。
-
## 文件目录结构
安装TDengine后,默认会在操作系统中生成下列目录或文件:
@@ -900,10 +875,14 @@ taosd 服务端日志文件标志位 debugflag 默认为 131,在 debug 时往
一旦设定为 135 或 143,日志文件增长很快,特别是写入、查询请求量较大时,增长速度惊人。如合并保存日志,很容易把日志内的关键信息(如配置信息、错误信息等)冲掉。为此,服务端将重要信息日志与其他日志分开存放:
-- taosinfo 存放重要信息日志
-- taosdlog 存放其他日志
+- taosinfo 存放重要信息日志, 包括:INFO/ERROR/WARNING 级别的日志信息。不记录DEBUG、TRACE级别的日志。
+- taosdlog 服务器端生成的日志,记录taosinfo中全部信息外,还根据设置的日志输出级别,记录DEBUG(日志级别135)、TRACE(日志级别是 143)。
+
+### 客户端日志
+每个独立运行的客户端(一个进程)生成一个独立的客户端日志,其命名方式采用 taoslog+<序号> 的方式命名。文件标志位 debugflag 默认为 131,在 debug 时往往需要将其提升到 135 或 143 。
+- taoslog 客户端(driver)生成的日志,默认记录客户端INFO/ERROR/WARNING 级别日志,还根据设置的日志输出级别,记录DEBUG(日志级别135)、TRACE(日志级别是 143)。
-其中,taosinfo 日志文件最大长度由 numOfLogLines 来进行配置,一个 taosd 实例最多保留两个文件。
+其中,日志文件最大长度由 numOfLogLines 来进行配置,一个 taosd 实例最多保留两个文件。
taosd 服务端日志采用异步落盘写入机制,优点是可以避免硬盘写入压力太大,对性能造成很大影响。缺点是,在极端情况下,存在少量日志行数丢失的可能。
diff --git a/documentation20/cn/12.taos-sql/02.udf/docs.md b/documentation20/cn/12.taos-sql/02.udf/docs.md
index 5b068d43fda8d765c052582dc1bdda163d9d72e3..b247048c9e2e6fcb52405316b955be2a914528c0 100644
--- a/documentation20/cn/12.taos-sql/02.udf/docs.md
+++ b/documentation20/cn/12.taos-sql/02.udf/docs.md
@@ -83,7 +83,7 @@ TDengine 提供 3 个 UDF 的源代码示例,分别为:
gcc -g -O0 -fPIC -shared add_one.c -o add_one.so
```
-这样就准备好了动态链接库 add_one.so 文件,可以供后文创建 UDF 时使用了。
+这样就准备好了动态链接库 add_one.so 文件,可以供后文创建 UDF 时使用了。为了保证可靠的系统运行,编译器 GCC 推荐使用 7.5及以上版本。
## 在系统中管理和使用 UDF
diff --git a/documentation20/cn/12.taos-sql/docs.md b/documentation20/cn/12.taos-sql/docs.md
index 1ff25d062419fec95ab843e08f17ad92bc09d363..9f15d05cec005f9abe6c8f29a80361b6a8e111fe 100755
--- a/documentation20/cn/12.taos-sql/docs.md
+++ b/documentation20/cn/12.taos-sql/docs.md
@@ -59,6 +59,7 @@ TDengine 缺省的时间戳是毫秒精度,但通过在 CREATE DATABASE 时传
**Tips**:
1. TDengine 对 SQL 语句中的英文字符不区分大小写,自动转化为小写执行。因此用户大小写敏感的字符串及密码,需要使用单引号将字符串引起来。
2. **注意**,虽然 Binary 类型在底层存储上支持字节型的二进制字符,但不同编程语言对二进制数据的处理方式并不保证一致,因此建议在 Binary 类型中只存储 ASCII 可见字符,而避免存储不可见字符。多字节的数据,例如中文字符,则需要使用 nchar 类型进行保存。如果强行使用 Binary 类型保存中文字符,虽然有时也能正常读写,但并不带有字符集信息,很容易出现数据乱码甚至数据损坏等情况。
+3. **注意**,SQL语句中的数值类型将依据是否存在小数点,或使用科学计数法表示,来判断数值类型是否为整型或者浮点型,因此在使用时要注意相应类型越界的情况。例如,9999999999999999999会认为超过长整型的上边界而溢出,而9999999999999999999.0会被认为是有效的浮点数。
## 数据库管理
@@ -683,6 +684,48 @@ taos> SELECT SERVER_STATUS() AS status;
Query OK, 1 row(s) in set (0.000081s)
```
+函数_block_dist()使用说明
+
语法
+
+SELECT _block_dist() FROM { tb_name | stb_name }
+
+功能说明:获得指定的(超级)表的数据块分布信息
+
+返回结果类型:字符串。
+
+
+适用数据类型:不能输入任何参数。
+
+嵌套子查询支持:不支持子查询或嵌套查询。
+
+
+说明:
+
+返回 FROM 子句中输入的表或超级表的数据块分布情况。不支持查询条件。
+
+返回的结果是该表或超级表的数据块所包含的行数的数据分布直方图。
+
+返回结果如下:
+```
+summary:
+5th=[392], 10th=[392], 20th=[392], 30th=[392], 40th=[792], 50th=[792] 60th=[792], 70th=[792], 80th=[792], 90th=[792], 95th=[792], 99th=[792] Min=[392(Rows)] Max=[800(Rows)] Avg=[666(Rows)] Stddev=[2.17] Rows=[2000], Blocks=[3], Size=[5.440(Kb)] Comp=[0.23] RowsInMem=[0] SeekHeaderTime=[1(us)]
+```
+上述信息的说明如下:
+
1、查询的(超级)表所包含的存储在文件中的数据块(data block)中所包含的数据行的数量分布直方图信息:5%, 10%, 20%, 30%, 40%, 50%, 60%, 70%, 80%, 90%, 95%, 99% 的数值;
+
2、所有数据块中,包含行数最少的数据块所包含的行数量, 其中的 Min 指标 392 行。
+
3、所有数据块中,包含行数最多的数据块所包含的行数量, 其中的 Max 指标 800 行。
+
4、所有数据块行数的算数平均值 666行(其中的 Avg 项)。
+
5、所有数据块中行数分布的均方差为 2.17 ( stddev )。
+
6、数据块包含的行的总数为 2000 行(Rows)。
+
7、数据块总数是 3 个数据块 (Blocks)。
+
8、数据块占用磁盘空间大小 5.44 Kb (size)。
+
9、压缩后的数据块的大小除以原始数据的所获得的压缩比例: 23%(Comp),及压缩后的数据规模是原始数据规模的 23%。
+
10、内存中存在的数据行数是0,表示内存中没有数据缓存。
+
11、获取数据块信息的过程中读取头文件的时间开销 1 微秒(SeekHeaderTime)。
+
+支持版本:指定计算算法的功能从2.1.0.x 版本开始,2.1.0.0之前的版本不支持指定使用算法的功能。
+
+
#### TAOS SQL中特殊关键词
> TBNAME: 在超级表查询中可视为一个特殊的标签,代表查询涉及的子表名
@@ -799,6 +842,7 @@ WHERE t1.ts = t2.ts AND t1.deviceid = t2.deviceid AND t1.status=0;
3. 暂不支持参与 JOIN 操作的表之间聚合后的四则运算。
4. 不支持只对其中一部分表做 GROUP BY。
5. JOIN 查询的不同表的过滤条件之间不能为 OR。
+6. JOIN 查询要求连接条件不能是普通列,只能针对标签和主时间字段列(第一列)。
### 嵌套查询
@@ -1332,9 +1376,61 @@ TDengine支持针对数据的聚合查询。提供支持的聚合和选择函数
10.30000 |
Query OK, 1 row(s) in set (0.001042s)
```
+
+- **INTERP [2.3.1及之后的版本]**
+
+ ```mysql
+ SELECT INTERP(field_name) FROM { tb_name | stb_name } [WHERE where_condition] [ RANGE(timestamp1,timestamp2) ] [EVERY(interval)] [FILL ({ VALUE | PREV | NULL | LINEAR | NEXT})];
+ ```
+
+功能说明:返回表/超级表的指定时间截面指定列的记录值(插值)。
+
+返回结果数据类型:同字段类型。
+
+应用字段:数值型字段。
+
+适用于:**表、超级表、嵌套查询**。
+
+说明:
+1)INTERP用于在指定时间断面获取指定列的记录值,如果该时间断面不存在符合条件的行数据,那么会根据 FILL 参数的设定进行插值。
+
+2)INTERP的输入数据为指定列的数据,可以通过条件语句(where子句)来对原始列数据进行过滤,如果没有指定过滤条件则输入为全部数据。
+
+3)INTERP的输出时间范围根据RANGE(timestamp1,timestamp2)字段来指定,需满足timestamp1<=timestamp2。其中timestamp1(必选值)为输出时间范围的起始值,即如果timestamp1时刻符合插值条件则timestamp1为输出的第一条记录,timestamp2(必选值)为输出时间范围的结束值,即输出的最后一条记录的timestamp不能大于timestamp2。如果没有指定RANGE,那么满足过滤条件的输入数据中第一条记录的timestamp即为timestamp1,最后一条记录的timestamp即为timestamp2,同样也满足timestamp1 <= timestamp2。
+
+4)INTERP根据EVERY字段来确定输出时间范围内的结果条数,即从timestamp1开始每隔固定长度的时间(EVERY值)进行插值。如果没有指定EVERY,则默认窗口大小为无穷大,即从timestamp1开始只有一个窗口。
+
+5)INTERP根据FILL字段来决定在每个符合输出条件的时刻如何进行插值,如果没有FILL字段则默认不插值,即输出为原始记录值或不输出(原始记录不存在)。
+
+6)INTERP只能在一个时间序列内进行插值,因此当作用于超级表时必须跟group by tbname一起使用,当作用嵌套查询外层时内层子查询不能含GROUP BY信息。
+
+7)INTERP的插值结果不受ORDER BY timestamp的影响,ORDER BY timestamp只影响输出结果的排序。
+
+SQL示例:
+
+ 1) 单点线性插值
+ ```mysql
+ taos> SELECT INTERP(*) FROM t1 RANGE('2017-7-14 18:40:00','2017-7-14 18:40:00') FILL(LINEAR);
+ ```
+ 2) 在2017-07-14 18:00:00到2017-07-14 19:00:00间每隔5秒钟进行取值(不插值)
+ ```mysql
+ taos> SELECT INTERP(*) FROM t1 RANGE('2017-7-14 18:00:00','2017-7-14 19:00:00') EVERY(5s);
+ ```
+ 3) 在2017-07-14 18:00:00到2017-07-14 19:00:00间每隔5秒钟进行线性插值
+ ```mysql
+ taos> SELECT INTERP(*) FROM t1 RANGE('2017-7-14 18:00:00','2017-7-14 19:00:00') EVERY(5s) FILL(LINEAR);
+ ```
+ 4.在所有时间范围内每隔5秒钟进行向后插值
+ ```mysql
+ taos> SELECT INTERP(*) FROM t1 EVERY(5s) FILL(NEXT);
+ ```
+ 5.根据2017-07-14 17:00:00到2017-07-14 20:00:00间的数据进行从2017-07-14 18:00:00到2017-07-14 19:00:00间每隔5秒钟进行线性插值
+ ```mysql
+ taos> SELECT INTERP(*) FROM t1 where ts >= '2017-07-14 17:00:00' and ts <= '2017-07-14 20:00:00' RANGE('2017-7-14 18:00:00','2017-7-14 19:00:00') EVERY(5s) FILL(LINEAR);
+ ```
-- **INTERP**
+- **INTERP [2.3.1之前的版本]**
```mysql
SELECT INTERP(field_name) FROM { tb_name | stb_name } WHERE ts='timestamp' [FILL ({ VALUE | PREV | NULL | LINEAR | NEXT})];
@@ -1529,7 +1625,56 @@ TDengine支持针对数据的聚合查询。提供支持的聚合和选择函数
## 按窗口切分聚合
-TDengine 支持按时间段等窗口切分方式进行聚合结果查询,比如温度传感器每秒采集一次数据,但需查询每隔 10 分钟的温度平均值。这类聚合适合于降维(down sample)操作,语法如下:
+TDengine 支持按时间段窗口切分方式进行聚合结果查询,比如温度传感器每秒采集一次数据,但需查询每隔 10 分钟的温度平均值。这种场景下可以使用窗口子句来获得需要的查询结果。
+窗口子句用于针对查询的数据集合进行按照窗口切分成为查询子集并进行聚合,窗口包含时间窗口(time window)、状态窗口(status window)、会话窗口(session window)三种窗口。其中时间窗口又可划分为滑动时间窗口和翻转时间窗口。
+
+时间窗口
+
+INTERVAL子句用于产生相等时间周期的窗口,SLIDING用以指定窗口向前滑动的时间。每次执行的查询是一个时间窗口,时间窗口随着时间流动向前滑动。在定义连续查询的时候需要指定时间窗口(time window )大小和每次前向增量时间(forward sliding times)。如图,[t0s, t0e] ,[t1s , t1e], [t2s, t2e] 是分别是执行三次连续查询的时间窗口范围,窗口的前向滑动的时间范围sliding time标识 。查询过滤、聚合等操作按照每个时间窗口为独立的单位执行。当SLIDING与INTERVAL相等的时候,滑动窗口即为翻转窗口。
+
+![时间窗口示意图](../images/sql/timewindow-1.png)
+
+INTERVAL和SLIDING子句需要配合聚合和选择函数来使用。以下SQL语句非法:
+```mysql
+SELECT * FROM temp_table INTERVAL(1S)
+```
+
+SLIDING的向前滑动的时间不能超过一个窗口的时间范围。以下语句非法:
+```mysql
+SELECT COUNT(*) FROM temp_table INTERVAL(1D) SLIDING(2D)
+```
+当 SLIDING 与 INTERVAL 取值相等的时候,滑动窗口即为翻转窗口。
+ * 聚合时间段的窗口宽度由关键词 INTERVAL 指定,最短时间间隔 10 毫秒(10a);并且支持偏移 offset(偏移必须小于间隔),也即时间窗口划分与“UTC 时刻 0”相比的偏移量。SLIDING 语句用于指定聚合时间段的前向增量,也即每次窗口向前滑动的时长。
+ * 从 2.1.5.0 版本开始,INTERVAL 语句允许的最短时间间隔调整为 1 微秒(1u),当然如果所查询的 DATABASE 的时间精度设置为毫秒级,那么允许的最短时间间隔为 1 毫秒(1a)。
+ * **注意**:用到 INTERVAL 语句时,除非极特殊的情况,都要求把客户端和服务端的 taos.cfg 配置文件中的 timezone 参数配置为相同的取值,以避免时间处理函数频繁进行跨时区转换而导致的严重性能影响。
+
+
+状态窗口
+
+使用整数(布尔值)或字符串来标识产生记录时候设备的状态量。产生的记录如果具有相同的状态量数值则归属于同一个状态窗口,数值改变后该窗口关闭。如下图所示,根据状态量确定的状态窗口分别是[2019-04-28 14:22:07,2019-04-28 14:22:10]和[2019-04-28 14:22:11,2019-04-28 14:22:12]两个。(状态窗口暂不支持对超级表使用)
+
+![时间窗口示意图](../images/sql/timewindow-2.png)
+
+
+使用STATE_WINDOW来确定状态窗口划分的列。例如:
+```mysql
+SELECT COUNT(*), FIRST(ts), status FROM temp_tb_1 STATE_WINDOW(status)
+```
+
+会话窗口
+
+会话窗口根据记录的时间戳主键的值来确定是否属于同一个会话。如下图所示,如果设置时间戳的连续的间隔小于等于12秒,则以下6条记录构成2个会话窗口,分别是:[2019-04-28 14:22:10,2019-04-28 14:22:30]和[2019-04-28 14:23:10,2019-04-28 14:23:30]。因为2019-04-28 14:22:30与2019-04-28 14:23:10之间的时间间隔是40秒,超过了连续时间间隔(12秒)。
+
+![时间窗口示意图](../images/sql/timewindow-3.png)
+
+在tol_value时间间隔范围内的结果都认为归属于同一个窗口,如果连续的两条记录的时间超过tol_val,则自动开启下一个窗口。(会话窗口暂不支持对超级表使用)
+
+```mysql
+
+SELECT COUNT(*), FIRST(ts) FROM temp_tb_1 SESSION_WINDOW(ts, tol_val)
+```
+
+这种类型的查询语法如下:
```mysql
SELECT function_list FROM tb_name
@@ -1547,12 +1692,8 @@ SELECT function_list FROM stb_name
```
- 在聚合查询中,function_list 位置允许使用聚合和选择函数,并要求每个函数仅输出单个结果(例如:COUNT、AVG、SUM、STDDEV、LEASTSQUARES、PERCENTILE、MIN、MAX、FIRST、LAST),而不能使用具有多行输出结果的函数(例如:TOP、BOTTOM、DIFF 以及四则运算)。
-- 查询过滤、聚合等操作按照每个切分窗口为独立的单位执行。聚合查询目前支持三种窗口的划分方式:
- 1. 时间窗口:聚合时间段的窗口宽度由关键词 INTERVAL 指定,最短时间间隔 10 毫秒(10a);并且支持偏移 offset(偏移必须小于间隔),也即时间窗口划分与“UTC 时刻 0”相比的偏移量。SLIDING 语句用于指定聚合时间段的前向增量,也即每次窗口向前滑动的时长。当 SLIDING 与 INTERVAL 取值相等的时候,滑动窗口即为翻转窗口。
- * 从 2.1.5.0 版本开始,INTERVAL 语句允许的最短时间间隔调整为 1 微秒(1u),当然如果所查询的 DATABASE 的时间精度设置为毫秒级,那么允许的最短时间间隔为 1 毫秒(1a)。
- * **注意:**用到 INTERVAL 语句时,除非极特殊的情况,都要求把客户端和服务端的 taos.cfg 配置文件中的 timezone 参数配置为相同的取值,以避免时间处理函数频繁进行跨时区转换而导致的严重性能影响。
- 2. 状态窗口:使用整数或布尔值来标识产生记录时设备的状态量,产生的记录如果具有相同的状态量取值则归属于同一个状态窗口,数值改变后该窗口关闭。状态量所对应的列作为 STATE_WINDOW 语句的参数来指定。(状态窗口暂不支持对超级表使用)
- 3. 会话窗口:时间戳所在的列由 SESSION 语句的 ts_col 参数指定,会话窗口根据相邻两条记录的时间戳差值来确定是否属于同一个会话——如果时间戳差异在 tol_val 以内,则认为记录仍属于同一个窗口;如果时间变化超过 tol_val,则自动开启下一个窗口。(会话窗口暂不支持对超级表使用)
+
+
- WHERE 语句可以指定查询的起止时间和其他过滤条件。
- FILL 语句指定某一窗口区间数据缺失的情况下的填充模式。填充模式包括以下几种:
1. 不进行填充:NONE(默认填充模式)。
@@ -1590,7 +1731,7 @@ SELECT AVG(current), MAX(current), APERCENTILE(current, 50) FROM meters
- 表名最大长度为 192,每行数据最大长度 16k 个字符, 从 2.1.7.0 版本开始,每行数据最大长度 48k 个字符(注意:数据行内每个 BINARY/NCHAR 类型的列还会额外占用 2 个字节的存储位置)。
- 列名最大长度为 64,最多允许 1024 列,最少需要 2 列,第一列必须是时间戳。(从 2.1.7.0 版本开始,改为最多允许 4096 列)
- 标签名最大长度为 64,最多允许 128 个,可以 1 个,一个表中标签值的总长度不超过 16k 个字符。
-- SQL 语句最大长度 1048576 个字符,也可通过系统配置参数 maxSQLLength 修改,取值范围 65480 ~ 1048576。
+- SQL 语句最大长度 1048576 个字符,也可通过客户端配置参数 maxSQLLength 修改,取值范围 65480 ~ 1048576。
- SELECT 语句的查询结果,最多允许返回 1024 列(语句中的函数调用可能也会占用一些列空间),超限时需要显式指定较少的返回数据列,以避免语句执行报错。(从 2.1.7.0 版本开始,改为最多允许 4096 列)
- 库的数目,超级表的数目、表的数目,系统不做限制,仅受系统资源限制。
diff --git a/documentation20/cn/13.faq/docs.md b/documentation20/cn/13.faq/docs.md
index 9132e8dca63c47e4b22ad87ef9fd4d4a1997077a..507ffc09ba954ed6acba39ece128ebbbe5a4142e 100644
--- a/documentation20/cn/13.faq/docs.md
+++ b/documentation20/cn/13.faq/docs.md
@@ -1,7 +1,6 @@
-# 常见问题
-
-## 0. 怎么报告问题?
+# 常见问题及反馈
+## 问题反馈
如果 FAQ 中的信息不能够帮到您,需要 TDengine 技术团队的技术支持与协助,请将以下两个目录中内容打包:
1. /var/log/taos (如果没有修改过默认路径)
2. /etc/taos
@@ -14,7 +13,9 @@
```
但系统正常运行时,请一定将debugFlag设置为131,否则会产生大量的日志信息,降低系统效率。
-## 1. TDengine2.0之前的版本升级到2.0及以上的版本应该注意什么?☆☆☆
+## 常见问题列表
+
+**1. TDengine2.0之前的版本升级到2.0及以上的版本应该注意什么?☆☆☆**
2.0版在之前版本的基础上,进行了完全的重构,配置文件和数据文件是不兼容的。在升级之前务必进行如下操作:
@@ -24,19 +25,19 @@
4. 安装最新稳定版本的 TDengine
5. 如果需要迁移数据或者数据文件损坏,请联系涛思数据官方技术支持团队,进行协助解决
-## 2. Windows平台下JDBCDriver找不到动态链接库,怎么办?
+**2. Windows平台下JDBCDriver找不到动态链接库,怎么办?**
请看为此问题撰写的[技术博客](https://www.taosdata.com/blog/2019/12/03/950.html)。
-## 3. 创建数据表时提示more dnodes are needed
+**3. 创建数据表时提示more dnodes are needed**
请看为此问题撰写的[技术博客](https://www.taosdata.com/blog/2019/12/03/965.html)。
-## 4. 如何让TDengine crash时生成core文件?
+**4. 如何让TDengine crash时生成core文件?**
请看为此问题撰写的[技术博客](https://www.taosdata.com/blog/2019/12/06/974.html)。
-## 5. 遇到错误“Unable to establish connection”, 我怎么办?
+**5. 遇到错误“Unable to establish connection”, 我怎么办?**
客户端遇到连接故障,请按照下面的步骤进行检查:
@@ -70,7 +71,7 @@
10. 也可以使用taos程序内嵌的网络连通检测功能,来验证服务器和客户端之间指定的端口连接是否通畅(包括TCP和UDP):[TDengine 内嵌网络检测工具使用指南](https://www.taosdata.com/blog/2020/09/08/1816.html)。
-## 6. 遇到错误“Unexpected generic error in RPC”或者“Unable to resolve FQDN”,我怎么办?
+**6. 遇到错误“Unexpected generic error in RPC”或者“Unable to resolve FQDN”,我怎么办?**
产生这个错误,是由于客户端或数据节点无法解析FQDN(Fully Qualified Domain Name)导致。对于TAOS Shell或客户端应用,请做如下检查:
@@ -79,16 +80,17 @@
3. 如果网络没有配置DNS server,请检查客户端所在机器的hosts文件,查看该FQDN是否配置,并是否有正确的IP地址
4. 如果网络配置OK,从客户端所在机器,你需要能Ping该连接的FQDN,否则客户端是无法连接服务器的
-## 7. 虽然语法正确,为什么我还是得到 "Invalid SQL" 错误
+**7. 虽然语法正确,为什么我还是得到 "Invalid SQL" 错误**
如果你确认语法正确,2.0之前版本,请检查SQL语句长度是否超过64K。如果超过,也会返回这个错误。
-## 8. 是否支持validation queries?
+**8. 是否支持validation queries?**
TDengine还没有一组专用的validation queries。然而建议你使用系统监测的数据库”log"来做。
-## 9. 我可以删除或更新一条记录吗?
+
+**9. 我可以删除或更新一条记录吗?**
TDengine 目前尚不支持删除功能,未来根据用户需求可能会支持。
@@ -98,15 +100,15 @@ TDengine 目前尚不支持删除功能,未来根据用户需求可能会支
此外,从 2.1.7.0 版本开始,支持将 UPDATE 参数设为 2,表示“支持部分列更新”。也即,当 UPDATE 设为 1 时,如果更新一个数据行,其中某些列没有提供取值,那么这些列会被设为 NULL;而当 UPDATE 设为 2 时,如果更新一个数据行,其中某些列没有提供取值,那么这些列会保持原有数据行中的对应值。
-## 10. 我怎么创建超过1024列的表?
+**10. 我怎么创建超过1024列的表?**
使用 2.0 及其以上版本,默认支持 1024 列;2.0 之前的版本,TDengine 最大允许创建 250 列的表。但是如果确实超过限值,建议按照数据特性,逻辑地将这个宽表分解成几个小表。(从 2.1.7.0 版本开始,表的最大列数增加到了 4096 列。)
-## 11. 最有效的写入数据的方法是什么?
+**11. 最有效的写入数据的方法是什么?**
批量插入。每条写入语句可以一张表同时插入多条记录,也可以同时插入多张表的多条记录。
-## 12. Windows系统下插入的nchar类数据中的汉字被解析成了乱码如何解决?
+**12. Windows系统下插入的nchar类数据中的汉字被解析成了乱码如何解决?**
Windows下插入nchar类的数据中如果有中文,请先确认系统的地区设置成了中国(在Control Panel里可以设置),这时cmd中的`taos`客户端应该已经可以正常工作了;如果是在IDE里开发Java应用,比如Eclipse, Intellij,请确认IDE里的文件编码为GBK(这是Java默认的编码类型),然后在生成Connection时,初始化客户端的配置,具体语句如下:
```JAVA
@@ -116,7 +118,7 @@ properties.setProperty(TSDBDriver.LOCALE_KEY, "UTF-8");
Connection = DriverManager.getConnection(url, properties);
```
-## 13.JDBC报错: the excuted SQL is not a DML or a DDL?
+**13.JDBC报错: the excuted SQL is not a DML or a DDL?**
请更新至最新的JDBC驱动
```xml
@@ -127,15 +129,15 @@ Connection = DriverManager.getConnection(url, properties);
```
-## 14. taos connect failed, reason: invalid timestamp
+**14. taos connect failed, reason: invalid timestamp**
常见原因是服务器和客户端时间没有校准,可以通过和时间服务器同步的方式(Linux 下使用 ntpdate 命令,Windows 在系统时间设置中选择自动同步)校准。
-## 15. 表名显示不全
+**15. 表名显示不全**
由于 taos shell 在终端中显示宽度有限,有可能比较长的表名显示不全,如果按照显示的不全的表名进行相关操作会发生 Table does not exist 错误。解决方法可以是通过修改 taos.cfg 文件中的设置项 maxBinaryDisplayWidth, 或者直接输入命令 set max_binary_display_width 100。或者在命令结尾使用 \G 参数来调整结果的显示方式。
-## 16. 如何进行数据迁移?
+**16. 如何进行数据迁移?**
TDengine是根据hostname唯一标志一台机器的,在数据文件从机器A移动机器B时,注意如下两件事:
@@ -143,7 +145,7 @@ TDengine是根据hostname唯一标志一台机器的,在数据文件从机器A
- 2.0.7.0 及以后的版本,到/var/lib/taos/dnode下,修复dnodeEps.json的dnodeId对应的FQDN,重启。确保机器内所有机器的此文件是完全相同的。
- 1.x 和 2.x 版本的存储结构不兼容,需要使用迁移工具或者自己开发应用导出导入数据。
-## 17. 如何在命令行程序 taos 中临时调整日志级别
+**17. 如何在命令行程序 taos 中临时调整日志级别**
为了调试方便,从 2.0.16 版本开始,命令行程序 taos 新增了与日志记录相关的两条指令:
@@ -162,7 +164,8 @@ ALTER LOCAL RESETLOG;
其含义是,清空本机所有由客户端生成的日志文件。
-## 18. 时间戳的时区信息是怎样处理的?
+
+**18. 时间戳的时区信息是怎样处理的?**
TDengine 中时间戳的时区总是由客户端进行处理,而与服务端无关。具体来说,客户端会对 SQL 语句中的时间戳进行时区转换,转为 UTC 时区(即 Unix 时间戳——Unix Timestamp)再交由服务端进行写入和查询;在读取数据时,服务端也是采用 UTC 时区提供原始数据,客户端收到后再根据本地设置,把时间戳转换为本地系统所要求的时区进行显示。
@@ -173,7 +176,8 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端
4. 在书写 SQL 语句时,也可以直接使用 Unix 时间戳(例如 `1554984068000`)或带有时区的时间戳字符串,也即以 RFC 3339 格式(例如 `2013-04-12T15:52:01.123+08:00`)或 ISO-8601 格式(例如 `2013-04-12T15:52:01.123+0800`)来书写时间戳,此时这些时间戳的取值将不再受其他时区设置的影响。
-## 19. TDengine 都会用到哪些网络端口?
+
+**19. TDengine 都会用到哪些网络端口?**
在 TDengine 2.0 版本中,会用到以下这些网络端口(以默认端口 6030 为前提进行说明,如果修改了配置文件中的设置,那么这里列举的端口都会出现变化),管理员可以参考这里的信息调整防火墙设置:
@@ -191,7 +195,7 @@ TDengine 中时间戳的时区总是由客户端进行处理,而与服务端
| UDP | 6030-6034 | 客户端与服务端之间通讯。 | 随 serverPort 端口变化。 |
| UDP | 6035-6039 | 多节点集群的节点间通讯。 | 随 serverPort 端口变化。 |
-## 20. go 语言编写组件编译失败怎样解决?
+**20. go 语言编写组件编译失败怎样解决?**
新版本 TDengine 2.3.0.0 包含一个使用 go 语言开发的 taosAdapter 组件,取代之前内置的 httpd ,提供包含原 httpd 功能以及支持多种其他软件(Prometheus、Telegraf、collectd、StatsD等)的数据接入功能。
使用最新 develop 分支代码编译需要先 `git submodule update --init --recursive` 下载 taosAdapter 仓库代码后再编译。
@@ -205,3 +209,5 @@ go env -w GOPROXY=https://goproxy.cn,direct
如果希望继续使用之前的内置 httpd,可以关闭 taosAdapter 编译,使用
`cmake .. -DBUILD_HTTP=true` 使用原来内置的 httpd。
+
+
diff --git a/documentation20/cn/images/sql/timewindow-1.png b/documentation20/cn/images/sql/timewindow-1.png
new file mode 100644
index 0000000000000000000000000000000000000000..5fda3b57cb7aa2c98e94d438a50a5d7a7cbfb40c
Binary files /dev/null and b/documentation20/cn/images/sql/timewindow-1.png differ
diff --git a/documentation20/cn/images/sql/timewindow-2.png b/documentation20/cn/images/sql/timewindow-2.png
new file mode 100644
index 0000000000000000000000000000000000000000..5cf77d2fd4bea45b41929634f18e6c4cb5d5f72e
Binary files /dev/null and b/documentation20/cn/images/sql/timewindow-2.png differ
diff --git a/documentation20/cn/images/sql/timewindow-3.png b/documentation20/cn/images/sql/timewindow-3.png
new file mode 100644
index 0000000000000000000000000000000000000000..cb257231aa77fc00a973b65923775c5ed4d6295e
Binary files /dev/null and b/documentation20/cn/images/sql/timewindow-3.png differ
diff --git a/documentation20/en/08.connector/01.java/docs.md b/documentation20/en/08.connector/01.java/docs.md
index 984560e82b17e84855e135e78a3586543e23175a..1aaeb824f7ba5df6c7ad4b778736148cf0f618c7 100644
--- a/documentation20/en/08.connector/01.java/docs.md
+++ b/documentation20/en/08.connector/01.java/docs.md
@@ -54,23 +54,25 @@ INSERT INTO test.t1 USING test.weather (ts, temperature) TAGS('beijing') VALUES(
## JDBC driver version and supported TDengine and JDK versions
-| taos-jdbcdriver | TDengine | JDK |
-| --------------- | ------------------ | ----- |
-| 2.0.33 - 2.0.34 | 2.0.3.0 and above | 1.8.x |
-| 2.0.31 - 2.0.32 | 2.1.3.0 and above | 1.8.x |
-| 2.0.22 - 2.0.30 | 2.0.18.0 - 2.1.2.x | 1.8.x |
-| 2.0.12 - 2.0.21 | 2.0.8.0 - 2.0.17.x | 1.8.x |
-| 2.0.4 - 2.0.11 | 2.0.0.0 - 2.0.7.x | 1.8.x |
-| 1.0.3 | 1.6.1.x and above | 1.8.x |
-| 1.0.2 | 1.6.1.x and above | 1.8.x |
-| 1.0.1 | 1.6.1.x and above | 1.8.x |
+| taos-jdbcdriver | TDengine | JDK |
+| --------------- |--------------------|--------|
+| 2.0.36 | 2.4.0 and above | 1.8.x |
+| 2.0.35 | 2.3.0 and above | 1.8.x |
+| 2.0.33 - 2.0.34 | 2.0.3.0 and above | 1.8.x |
+| 2.0.31 - 2.0.32 | 2.1.3.0 and above | 1.8.x |
+| 2.0.22 - 2.0.30 | 2.0.18.0 - 2.1.2.x | 1.8.x |
+| 2.0.12 - 2.0.21 | 2.0.8.0 - 2.0.17.x | 1.8.x |
+| 2.0.4 - 2.0.11 | 2.0.0.0 - 2.0.7.x | 1.8.x |
+| 1.0.3 | 1.6.1.x and above | 1.8.x |
+| 1.0.2 | 1.6.1.x and above | 1.8.x |
+| 1.0.1 | 1.6.1.x and above | 1.8.x |
## DataType in TDengine and Java connector
The TDengine supports the following data types and Java data types:
| TDengine DataType | JDBCType (driver version < 2.0.24) | JDBCType (driver version >= 2.0.24) |
-| ----------------- | ---------------------------------- | ----------------------------------- |
+|-------------------|------------------------------------| ----------------------------------- |
| TIMESTAMP | java.lang.Long | java.sql.Timestamp |
| INT | java.lang.Integer | java.lang.Integer |
| BIGINT | java.lang.Long | java.lang.Long |
@@ -81,7 +83,8 @@ The TDengine supports the following data types and Java data types:
| BOOL | java.lang.Boolean | java.lang.Boolean |
| BINARY | java.lang.String | byte array |
| NCHAR | java.lang.String | java.lang.String |
-
+| JSON | - | java.lang.String |
+**Note**: JSON type can only be used in tag.
## Install Java connector
### Runtime Requirements
diff --git a/documentation20/en/08.connector/docs.md b/documentation20/en/08.connector/docs.md
index bc1197d2ed3c4c566e9618b505183123b1e31eb9..f8b444281587e03bb0b143d5ecd1c41abed9dd64 100644
--- a/documentation20/en/08.connector/docs.md
+++ b/documentation20/en/08.connector/docs.md
@@ -575,6 +575,49 @@ Close connection.
conn.close()
```
+#### JSON Type Support
+
+Python connector `taospy` starts supporting JSON type as tags since `v2.2.0` (requires TDengine beta v2.3.5+, or stable v2.4.0+).
+
+Create stable and table with JSON tag.
+
+```python
+# encoding:UTF-8
+import taos
+
+conn = taos.connect()
+conn.execute("create database if not exists py_test_json_type")
+conn.execute("use py_test_json_type")
+
+conn.execute("create stable s1 (ts timestamp, v1 int) tags (info json)")
+conn.execute("create table s1_1 using s1 tags ('{\"k1\": \"v1\"}')")
+```
+
+Query JSON tag and table name from a stable.
+
+```python
+tags = conn.query("select info, tbname from s1").fetch_all_into_dict()
+tags
+```
+
+The `tags` value is:
+
+```python
+[{'info': '{"k1":"v1"}', 'tbname': 's1_1'}]
+```
+
+To get value from JSON tag by key:
+
+```python
+k1 = conn.query("select info->'k1' as k1 from s1").fetch_all_into_dict()
+"""
+>>> k1
+[{'k1': '"v1"'}]
+"""
+```
+
+Refer to [JSON type instructions](https://www.taosdata.com/en/documentation/taos-sql) for more usage of JSON type.
+
#### Using nanosecond in Python connector
So far Python still does not completely support nanosecond type. Please refer to the link 1 and 2. The implementation of the python connector is to return an integer number for nanosecond value rather than datatime type as what ms and us do. The developer needs to handle it themselves. We recommend using pandas to_datetime() function. If Python officially support nanosecond in the future, TAOS Data might be possible to change the interface accordingly, which mean the application need change too.
diff --git a/documentation20/en/12.taos-sql/docs.md b/documentation20/en/12.taos-sql/docs.md
index 60f3ad44c62f9ba1123f8920ef626baa58cc1bb0..5cac5a78c79265b49b42225963cd097e49d60dbb 100755
--- a/documentation20/en/12.taos-sql/docs.md
+++ b/documentation20/en/12.taos-sql/docs.md
@@ -60,6 +60,7 @@ In TDengine, the following 10 data types can be used in data model of an ordinar
1. TDengine is case-insensitive to English characters in SQL statements and automatically converts them to lowercase for execution. Therefore, the user's case-sensitive strings and passwords need to be enclosed in single quotation marks.
2. Avoid using BINARY type to save non-ASCII type strings, which will easily lead to errors such as garbled data. The correct way is to use NCHAR type to save Chinese characters.
+3. The numerical values in SQL statements are treated as floating or integer numbers, depends on if the value contains decimal point or is in scientific notation format. Therefore, caution is needed since overflow might happen for corresponding data types. E.g., 9999999999999999999 is overflowed as the number is greater than the largest integer number. However, 9999999999999999999.0 is treated as a valid floating number.
## Database Management
diff --git a/packaging/release.sh b/packaging/release.sh
index 866a21e552909ca9ad8e6083f4e571f5da91cc91..38e5dd929e78ce1a167464892089c42a044d94f6 100755
--- a/packaging/release.sh
+++ b/packaging/release.sh
@@ -418,6 +418,10 @@ else
BUILD_HTTP=false
fi
+if [[ "$verMode" == "cluster" ]]; then
+ BUILD_HTTP=internal
+fi
+
if [[ "$pagMode" == "full" ]]; then
BUILD_TOOLS=true
else
diff --git a/src/client/src/tscParseInsert.c b/src/client/src/tscParseInsert.c
index 05b8b031d99c2b0f4e54e9fc3392a20a9e1bcfcc..de974bec46426a892b9c645a95c7b959ac97d9ff 100644
--- a/src/client/src/tscParseInsert.c
+++ b/src/client/src/tscParseInsert.c
@@ -360,7 +360,7 @@ int32_t tsParseOneColumn(SSchema *pSchema, SStrToken *pToken, char *payload, cha
return tscInvalidOperationMsg(msg, "json tag length too long", pToken->z);
}
if (pToken->type == TK_NULL) {
- *(int8_t *)payload = TSDB_DATA_TINYINT_NULL;
+ *(int8_t *)payload = TSDB_DATA_JSON_PLACEHOLDER;
} else if (pToken->type != TK_STRING){
tscInvalidOperationMsg(msg, "invalid json data", pToken->z);
} else{
@@ -1063,7 +1063,7 @@ static int32_t tscCheckIfCreateTable(char **sqlstr, SSqlObj *pSql, char** boundC
sToken.n -= 2;
}
- char tagVal[TSDB_MAX_TAGS_LEN];
+ char tagVal[TSDB_MAX_TAGS_LEN] = {0};
code = tsParseOneColumn(pSchema, &sToken, tagVal, pInsertParam->msg, &sql, false, tinfo.precision);
if (code != TSDB_CODE_SUCCESS) {
tdDestroyKVRowBuilder(&kvRowBuilder);
diff --git a/src/client/src/tscParseLineProtocol.c b/src/client/src/tscParseLineProtocol.c
index 404c5f8b604d9ba7f72d1dbeefbc5622de15db14..9f69a8a66de5c71886e550115aa5168d54b248dc 100644
--- a/src/client/src/tscParseLineProtocol.c
+++ b/src/client/src/tscParseLineProtocol.c
@@ -1060,8 +1060,8 @@ static int32_t insertChildTablePointsBatch(TAOS* taos, char* cTableName, char* s
tscDebug("SML:0x%"PRIx64" insert child table table %s of super table %s : %s", info->id, cTableName, sTableName, sql);
+ size_t maxBatchSize = TSDB_MAX_WAL_SIZE/rowSize * 2 / 3;
size_t rows = taosArrayGetSize(rowsBind);
- size_t maxBatchSize = TSDB_MAX_WAL_SIZE/rowSize * 4 / 5;
size_t batchSize = MIN(maxBatchSize, rows);
tscDebug("SML:0x%"PRIx64" insert rows into child table %s. num of rows: %zu, batch size: %zu",
info->id, cTableName, rows, batchSize);
diff --git a/src/client/src/tscSQLParser.c b/src/client/src/tscSQLParser.c
index b045c566f1a72b8f0ca970d8e30d1b0e6486be68..4d63438c9a5582981b86e41599b6b3a30314c9c9 100644
--- a/src/client/src/tscSQLParser.c
+++ b/src/client/src/tscSQLParser.c
@@ -6871,7 +6871,7 @@ int32_t setAlterTableInfo(SSqlObj* pSql, struct SSqlInfo* pInfo) {
}
SKVRowBuilder kvRowBuilder = {0};
if (pTagsSchema->type == TSDB_DATA_TYPE_JSON) {
- if (pItem->pVar.nType != TSDB_DATA_TYPE_BINARY) {
+ if (pItem->pVar.nType != TSDB_DATA_TYPE_BINARY && pItem->pVar.nType != TSDB_DATA_TYPE_NULL) {
tscError("json type error, should be string");
return invalidOperationMsg(pMsg, msg25);
}
@@ -8714,7 +8714,7 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg5);
}
tVariantListItem* pItem = taosArrayGet(pValList, 0);
- if(pItem->pVar.nType != TSDB_DATA_TYPE_BINARY){
+ if(pItem->pVar.nType != TSDB_DATA_TYPE_BINARY && pItem->pVar.nType != TSDB_DATA_TYPE_NULL){
tscError("json type error, should be string");
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg7);
@@ -8724,6 +8724,7 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg3);
}
+
ret = parseJsontoTagData(pItem->pVar.pz, &kvRowBuilder, tscGetErrorMsgPayload(pCmd), pTagSchema[0].colId);
if (ret != TSDB_CODE_SUCCESS) {
tdDestroyKVRowBuilder(&kvRowBuilder);
diff --git a/src/client/src/tscServer.c b/src/client/src/tscServer.c
index fe7cc11d8892ae0075c794292b5a140306a2842a..57362499a4fcaaa1500b199de8f63c07a03af898 100644
--- a/src/client/src/tscServer.c
+++ b/src/client/src/tscServer.c
@@ -3124,11 +3124,12 @@ int tscRenewTableMeta(SSqlObj *pSql) {
pSql->rootObj->retryReason = pSql->retryReason;
- SSqlObj *tmpSql = pSql->rootObj;
- tscFreeSubobj(pSql->rootObj);
- tfree(tmpSql->pSubs);
+ SSqlObj *rootSql = pSql->rootObj;
+ tscFreeSubobj(rootSql);
+ tfree(rootSql->pSubs);
+ tscResetSqlCmd(&rootSql->cmd, true, rootSql->self);
- code = getMultiTableMetaFromMnode(tmpSql, pNameList, vgroupList, NULL, tscTableMetaCallBack, true);
+ code = getMultiTableMetaFromMnode(rootSql, pNameList, vgroupList, NULL, tscTableMetaCallBack, true);
taosArrayDestroyEx(&pNameList, freeElem);
taosArrayDestroyEx(&vgroupList, freeElem);
diff --git a/src/client/src/tscUtil.c b/src/client/src/tscUtil.c
index e96e3c16da84b3ffc25b33e3864c4e38dcc3977f..322413a3cd7e637c477903b09522f60c11056885 100644
--- a/src/client/src/tscUtil.c
+++ b/src/client/src/tscUtil.c
@@ -5459,7 +5459,7 @@ int parseJsontoTagData(char* json, SKVRowBuilder* kvRowBuilder, char* errMsg, in
varDataSetLen(nullTypeVal + CHAR_BYTES, INT_BYTES);
*(uint32_t*)(varDataVal(nullTypeKey)) = jsonNULL;
tdAddColToKVRow(kvRowBuilder, jsonIndex++, TSDB_DATA_TYPE_NCHAR, nullTypeKey, false); // add json null type
- if (strtrim(json) == 0 || strcasecmp(json, "null") == 0){
+ if (!json || strtrim(json) == 0 || strcasecmp(json, "null") == 0){
*(uint32_t*)(varDataVal(nullTypeVal + CHAR_BYTES)) = jsonNULL;
tdAddColToKVRow(kvRowBuilder, jsonIndex++, TSDB_DATA_TYPE_NCHAR, nullTypeVal, true); // add json null value
return TSDB_CODE_SUCCESS;
diff --git a/src/common/src/tvariant.c b/src/common/src/tvariant.c
index 0d00856f9be76ee917d12ff7435142d6d55ccecf..3c9d62294776bfa639620249416eee738fe24b99 100644
--- a/src/common/src/tvariant.c
+++ b/src/common/src/tvariant.c
@@ -953,7 +953,7 @@ int32_t tVariantDumpEx(tVariant *pVariant, char *payload, int16_t type, bool inc
break;
}
case TSDB_DATA_TYPE_JSON:{
- if (pVariant->nType == TSDB_DATA_TYPE_BINARY){
+ if (pVariant->nType == TSDB_DATA_TYPE_BINARY || pVariant->nType == TSDB_DATA_TYPE_NULL){
*((int8_t *)payload) = TSDB_DATA_JSON_PLACEHOLDER;
} else if (pVariant->nType == TSDB_DATA_TYPE_JSON){ // select * from stable, set tag type to json,from setTagValue/tag_project_function
memcpy(payload, pVariant->pz, pVariant->nLen);
diff --git a/src/connector/C#/src/TDengineDriver/TaosBind.cs b/src/connector/C#/src/TDengineDriver/TaosBind.cs
index 694dcd900bccedc913ce9d1956650f97957965da..3ac71e75396dcd8a0e517a35ed1282d826866b77 100644
--- a/src/connector/C#/src/TDengineDriver/TaosBind.cs
+++ b/src/connector/C#/src/TDengineDriver/TaosBind.cs
@@ -1,5 +1,6 @@
using System;
using System.Runtime.InteropServices;
+using System.Text;
namespace TDengineDriver
@@ -249,7 +250,8 @@ namespace TDengineDriver
TAOS_BIND bind = new TAOS_BIND();
IntPtr umanageBinary = Marshal.StringToHGlobalAnsi(val);
- int leng = val.Length;
+ var strToBytes = System.Text.Encoding.Default.GetBytes(val);
+ int leng = strToBytes.Length;
IntPtr lenPtr = Marshal.AllocHGlobal(sizeof(ulong));
Marshal.WriteInt64(lenPtr, leng);
@@ -264,9 +266,11 @@ namespace TDengineDriver
public static TAOS_BIND BindNchar(String val)
{
TAOS_BIND bind = new TAOS_BIND();
+ var strToBytes = System.Text.Encoding.Default.GetBytes(val);
IntPtr umanageNchar = (IntPtr)Marshal.StringToHGlobalAnsi(val);
- int leng = val.Length;
+
+ int leng = strToBytes.Length;
IntPtr lenPtr = Marshal.AllocHGlobal(sizeof(ulong));
Marshal.WriteInt64(lenPtr, leng);
diff --git a/src/connector/C#/src/TDengineDriver/TaosMultiBind.cs b/src/connector/C#/src/TDengineDriver/TaosMultiBind.cs
index e01558caeb0905826c77fe97ee6d7147ff8b923e..00ec336be636a10e895e77e3ce20c50b7d5648ab 100644
--- a/src/connector/C#/src/TDengineDriver/TaosMultiBind.cs
+++ b/src/connector/C#/src/TDengineDriver/TaosMultiBind.cs
@@ -449,28 +449,27 @@ namespace TDengineDriver
for (int i = 0; i < elementCount; i++)
{
int itemLength = 0;
+ byte[] decodeByte = GetStringEncodeByte(arr[i]);
+ itemLength = decodeByte.Length;
// if element if not null and element length is less then typeSize
// fill the memory with default char.Since arr element memory need align.
- if (!String.IsNullOrEmpty(arr[i]) && typeSize <= arr[i].Length)
+ if (!String.IsNullOrEmpty(arr[i]) && typeSize == itemLength)
{
- itemLength = arr[i].Length;
arrStrBuilder.Append(arr[i]);
}
- else if (!String.IsNullOrEmpty(arr[i]) && typeSize > arr[i].Length)
+ else if (!String.IsNullOrEmpty(arr[i]) && typeSize > itemLength)
{
- itemLength = arr[i].Length;
arrStrBuilder.Append(arr[i]);
- arrStrBuilder.Append(AlignCharArr(typeSize - arr[i].Length));
+ arrStrBuilder.Append(AlignCharArr(typeSize - itemLength));
}
else
{
// if is null value,fill the memory with default values.
- itemLength = 0;
arrStrBuilder.Append(AlignCharArr(typeSize));
}
//set TAOS_MULTI_BIND.length
- Marshal.WriteInt32(lengthArr, intSize * i, itemLength);
+ Marshal.WriteInt32(lengthArr, intSize * i, typeSize);
//set TAOS_MULTI_BIND.is_null
Marshal.WriteByte(nullArr, byteSize * i, Convert.ToByte(String.IsNullOrEmpty(arr[i]) ? 1 : 0));
}
@@ -505,28 +504,27 @@ namespace TDengineDriver
for (int i = 0; i < elementCount; i++)
{
int itemLength = 0;
+ byte[] decodeByte = GetStringEncodeByte(arr[i]);
+ itemLength = decodeByte.Length;
// if element if not null and element length is less then typeSize
// fill the memory with default char.Since arr element memory need align.
- if (!String.IsNullOrEmpty(arr[i]) && typeSize <= arr[i].Length)
+ if (!String.IsNullOrEmpty(arr[i]) && typeSize == itemLength)
{
- itemLength = arr[i].Length;
arrStrBuilder.Append(arr[i]);
}
- else if (!String.IsNullOrEmpty(arr[i]) && typeSize > arr[i].Length)
+ else if (!String.IsNullOrEmpty(arr[i]) && typeSize > itemLength)
{
- itemLength = arr[i].Length;
arrStrBuilder.Append(arr[i]);
- arrStrBuilder.Append(AlignCharArr(typeSize - arr[i].Length));
+ arrStrBuilder.Append(AlignCharArr(typeSize - itemLength));
}
else
{
// if is null value,fill the memory with default values.
- itemLength = 0;
arrStrBuilder.Append(AlignCharArr(typeSize));
}
//set TAOS_MULTI_BIND.length
- Marshal.WriteInt32(lengthArr, intSize * i, itemLength);
+ Marshal.WriteInt32(lengthArr, intSize * i, typeSize);
//set TAOS_MULTI_BIND.is_null
Marshal.WriteByte(nullArr, byteSize * i, Convert.ToByte(String.IsNullOrEmpty(arr[i]) ? 1 : 0));
}
@@ -604,13 +602,28 @@ namespace TDengineDriver
int max = 0;
for (int i = 0; i < strArr.Length; i++)
{
- if (!String.IsNullOrEmpty(strArr[i]) && max < strArr[i].Length)
+ int tmpLength = GetStringEncodeByte(strArr[i]).Length;
+ if (!String.IsNullOrEmpty(strArr[i]) && max < tmpLength)
{
- max = strArr[i].Length;
+ max = tmpLength;
}
}
return max;
}
+
+ private static Byte[] GetStringEncodeByte(string str)
+ {
+ Byte[] strToBytes = null;
+ if(String.IsNullOrEmpty(str))
+ {
+ strToBytes = System.Text.Encoding.Default.GetBytes(String.Empty);
+ }
+ else
+ {
+ strToBytes = System.Text.Encoding.Default.GetBytes(str);
+ }
+ return strToBytes;
+ }
}
}
\ No newline at end of file
diff --git a/src/connector/C#/src/test/Cases/DataSource.cs b/src/connector/C#/src/test/Cases/DataSource.cs
index e422c70bf1d4b45a752984e3290fa8751d8ff41c..25f639c9772ac656f1ba8effff798a05b370f9a0 100644
--- a/src/connector/C#/src/test/Cases/DataSource.cs
+++ b/src/connector/C#/src/test/Cases/DataSource.cs
@@ -21,7 +21,8 @@ namespace Test.UtilsTools.DataSource
public static string[] binaryArr = new string[5] { "1234567890~!@#$%^&*()_+=-`[]{}:,./<>?", String.Empty, null, "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM", "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890~!@#$%^&*()_+=-`[]{}:,./<>?" };
public static string[] ncharArr = new string[5] { "1234567890~!@#$%^&*()_+=-`[]{}:,./<>?", null, "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM", "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890~!@#$%^&*()_+=-`[]{}:,./<>?", string.Empty };
-
+ public static string[] binaryArrCn = new string[5] { "涛思数据", String.Empty, null, "taosdata涛思数据", "涛思数据TDengine" };
+ public static string[] NcharArrCn = new string[5] { "涛思数据", null, "taosdata涛思数据", "涛思数据TDengine", String.Empty };
public static TAOS_BIND[] getTags()
{
TAOS_BIND[] binds = new TAOS_BIND[13];
@@ -40,6 +41,47 @@ namespace Test.UtilsTools.DataSource
binds[12] = TaosBind.BindNchar("qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKZXCVBNM`1234567890-=+_)(*&^%$#@!~[];,./<>?:{}");
return binds;
}
+
+ public static TAOS_BIND[] getCNTags()
+ {
+ TAOS_BIND[] binds = new TAOS_BIND[13];
+ binds[0] = TaosBind.BindBool(true);
+ binds[1] = TaosBind.BindTinyInt(-2);
+ binds[2] = TaosBind.BindSmallInt(short.MaxValue - 1);
+ binds[3] = TaosBind.BindInt(int.MaxValue - 1);
+ binds[4] = TaosBind.BindBigInt(Int64.MaxValue - 1);
+ binds[5] = TaosBind.BindUTinyInt(byte.MaxValue - 1);
+ binds[6] = TaosBind.BindUSmallInt(UInt16.MaxValue - 1);
+ binds[7] = TaosBind.BindUInt(uint.MinValue + 1);
+ binds[8] = TaosBind.BindUBigInt(UInt64.MinValue + 1);
+ binds[9] = TaosBind.BindFloat(11.11F);
+ binds[10] = TaosBind.BindDouble(22.22D);
+ binds[11] = TaosBind.BindBinary("TDengine涛思数据");
+ binds[12] = TaosBind.BindNchar("涛思");
+ return binds;
+ }
+
+ public static TAOS_BIND[] getNtableCNRow()
+ {
+ TAOS_BIND[] binds = new TAOS_BIND[15];
+ binds[0] = TaosBind.BindTimestamp(1637064040000);
+ binds[1] = TaosBind.BindTinyInt(-2);
+ binds[2] = TaosBind.BindSmallInt(short.MaxValue);
+ binds[3] = TaosBind.BindInt(int.MaxValue);
+ binds[4] = TaosBind.BindBigInt(Int64.MaxValue);
+ binds[5] = TaosBind.BindUTinyInt(byte.MaxValue - 1);
+ binds[6] = TaosBind.BindUSmallInt(UInt16.MaxValue - 1);
+ binds[7] = TaosBind.BindUInt(uint.MinValue + 1);
+ binds[8] = TaosBind.BindUBigInt(UInt64.MinValue + 1);
+ binds[9] = TaosBind.BindFloat(11.11F);
+ binds[10] = TaosBind.BindDouble(22.22D);
+ binds[11] = TaosBind.BindBinary("TDengine数据");
+ binds[12] = TaosBind.BindNchar("taosdata涛思数据");
+ binds[13] = TaosBind.BindBool(true);
+ binds[14] = TaosBind.BindNil();
+ return binds;
+ }
+
public static TAOS_BIND[] getNtableRow()
{
TAOS_BIND[] binds = new TAOS_BIND[15];
@@ -60,7 +102,6 @@ namespace Test.UtilsTools.DataSource
binds[14] = TaosBind.BindNil();
return binds;
}
-
public static TAOS_MULTI_BIND[] GetMultiBindArr()
{
TAOS_MULTI_BIND[] mBinds = new TAOS_MULTI_BIND[14];
@@ -80,6 +121,26 @@ namespace Test.UtilsTools.DataSource
mBinds[13] = TaosMultiBind.MultiBindNchar(ncharArr);
return mBinds;
}
+ public static TAOS_MULTI_BIND[] GetMultiBindCNArr()
+ {
+ TAOS_MULTI_BIND[] mBinds = new TAOS_MULTI_BIND[14];
+ mBinds[0] = TaosMultiBind.MultiBindTimestamp(tsArr);
+ mBinds[1] = TaosMultiBind.MultiBindBool(boolArr);
+ mBinds[2] = TaosMultiBind.MultiBindTinyInt(tinyIntArr);
+ mBinds[3] = TaosMultiBind.MultiBindSmallInt(shortArr);
+ mBinds[4] = TaosMultiBind.MultiBindInt(intArr);
+ mBinds[5] = TaosMultiBind.MultiBindBigint(longArr);
+ mBinds[6] = TaosMultiBind.MultiBindFloat(floatArr);
+ mBinds[7] = TaosMultiBind.MultiBindDouble(doubleArr);
+ mBinds[8] = TaosMultiBind.MultiBindUTinyInt(uTinyIntArr);
+ mBinds[9] = TaosMultiBind.MultiBindUSmallInt(uShortArr);
+ mBinds[10] = TaosMultiBind.MultiBindUInt(uIntArr);
+ mBinds[11] = TaosMultiBind.MultiBindUBigInt(uLongArr);
+ mBinds[12] = TaosMultiBind.MultiBindBinary(binaryArrCn);
+ mBinds[13] = TaosMultiBind.MultiBindNchar(NcharArrCn);
+ return mBinds;
+ }
+
public static TAOS_BIND[] GetQueryCondition()
{
TAOS_BIND[] queryCondition = new TAOS_BIND[2];
diff --git a/src/connector/C#/src/test/Cases/Program.cs b/src/connector/C#/src/test/Cases/Program.cs
index 89f878e994aa35977fc69c5576bca0ec21c41882..a498cc21d50a4d8c2811d86a33677e4027e96993 100644
--- a/src/connector/C#/src/test/Cases/Program.cs
+++ b/src/connector/C#/src/test/Cases/Program.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
using Test.UtilsTools;
using Cases;
@@ -14,9 +14,9 @@ namespace Cases.EntryPoint
IntPtr res = IntPtr.Zero;
conn = UtilsTools.TDConnection("127.0.0.1", "root", "taosdata", "", 0);
- UtilsTools.ExecuteQuery(conn, "drop database if exists csharp");
- UtilsTools.ExecuteQuery(conn, "create database if not exists csharp keep 3650");
- UtilsTools.ExecuteQuery(conn, "use csharp");
+ UtilsTools.ExecuteUpdate(conn, "drop database if exists csharp");
+ UtilsTools.ExecuteUpdate(conn, "create database if not exists csharp keep 3650");
+ UtilsTools.ExecuteUpdate(conn, "use csharp");
Console.WriteLine("====================StableColumnByColumn===================");
StableColumnByColumn columnByColumn = new StableColumnByColumn();
@@ -34,6 +34,8 @@ namespace Cases.EntryPoint
Console.WriteLine("====================NtableSingleLine===================");
NtableSingleLine ntableSingleLine = new NtableSingleLine();
ntableSingleLine.Test(conn, "stablesingleline");
+ IntPtr resPtr = UtilsTools.ExecuteQuery(conn, "select * from stablesingleline ");
+ UtilsTools.DisplayRes(resPtr);
Console.WriteLine("====================NtableMutipleLine===================");
NtableMutipleLine ntableMutipleLine = new NtableMutipleLine();
@@ -50,6 +52,27 @@ namespace Cases.EntryPoint
FetchFields fetchFields = new FetchFields();
fetchFields.Test(conn, "fetchfeilds");
+
+ StableStmtCases stableStmtCases = new StableStmtCases();
+ Console.WriteLine("====================stableStmtCases.TestBindSingleLineCn===================");
+ stableStmtCases.TestBindSingleLineCn(conn, "stablestmtcasestestbindsinglelinecn");
+
+ Console.WriteLine("====================stableStmtCases.TestBindColumnCn===================");
+ stableStmtCases.TestBindColumnCn(conn, " stablestmtcasestestbindcolumncn");
+
+ Console.WriteLine("====================stableStmtCases.TestBindMultiLineCn===================");
+ stableStmtCases.TestBindMultiLineCn(conn, "stablestmtcasestestbindmultilinecn");
+
+ NormalTableStmtCases normalTableStmtCases = new NormalTableStmtCases();
+ Console.WriteLine("====================normalTableStmtCases.TestBindSingleLineCn===================");
+ normalTableStmtCases.TestBindSingleLineCn(conn, "normaltablestmtcasestestbindsinglelinecn");
+
+ Console.WriteLine("====================normalTableStmtCases.TestBindColumnCn===================");
+ normalTableStmtCases.TestBindColumnCn(conn, "normaltablestmtcasestestbindcolumncn");
+
+ Console.WriteLine("====================normalTableStmtCases.TestBindMultiLineCn===================");
+ normalTableStmtCases.TestBindMultiLineCn(conn, "normaltablestmtcasestestbindmultilinecn");
+
Console.WriteLine("===================JsonTagTest====================");
JsonTagTest jsonTagTest = new JsonTagTest();
jsonTagTest.Test(conn);
diff --git a/src/connector/C#/src/test/Cases/StmtNormalTable.cs b/src/connector/C#/src/test/Cases/StmtNormalTable.cs
index a918f6bada153bc64d0c31d10597526503d696f8..19622fd1ddbc1760856630db4b9e91fb1bd9fe2b 100644
--- a/src/connector/C#/src/test/Cases/StmtNormalTable.cs
+++ b/src/connector/C#/src/test/Cases/StmtNormalTable.cs
@@ -7,7 +7,11 @@ namespace Cases
{
public class NtableSingleLine
{
-
+ /// xiaolei
+ /// NtableSingleLine.Test
+ /// Test stmt insert sinle line data into normal table
+ /// StmtNormalTable.cs
+ /// pass or failed
public void Test(IntPtr conn, string tableName)
{
String createTb = "create table " + tableName + "(ts timestamp,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200),bo bool,nullVal int);";
@@ -23,21 +27,26 @@ namespace Cases
StmtUtilTools.StmtExecute(stmt);
StmtUtilTools.StmtClose(stmt);
DataSource.FreeTaosBind(valuesRow);
+
}
}
public class NtableMutipleLine
{
- TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindArr();
+ /// xiaolei
+ /// NtableMutipleLine.Test
+ /// Test stmt insert multiple rows of data into normal table
+ /// StmtNormalTable.cs
+ /// pass or failed
public void Test(IntPtr conn, string tableName)
{
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindArr();
String createTb = "create table " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200));";
String insertSql = "insert into ? values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
- UtilsTools.ExecuteQuery(conn, createTb);
- String[] loadList = { tableName };
+ UtilsTools.ExecuteUpdate(conn, createTb);
+
IntPtr stmt = StmtUtilTools.StmtInit(conn);
- StmtUtilTools.loadTableInfo(conn, loadList);
StmtUtilTools.StmtPrepare(stmt, insertSql);
StmtUtilTools.SetTableName(stmt, tableName);
StmtUtilTools.BindParamBatch(stmt, mbind);
@@ -49,15 +58,20 @@ namespace Cases
}
public class NtableColumnByColumn
{
- DataSource data = new DataSource();
- TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindArr();
+ /// xiaolei
+ /// NtableColumnByColumn.Test
+ /// Test stmt insert multiple rows of data into normal table by column after column
+ /// StmtNormalTable.cs
+ /// pass or failed
public void Test(IntPtr conn, string tableName)
{
+ DataSource data = new DataSource();
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindArr();
String createTb = "create table " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200));";
String insertSql = "insert into ? values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
- UtilsTools.ExecuteQuery(conn, createTb);
+ UtilsTools.ExecuteUpdate(conn, createTb);
IntPtr stmt = StmtUtilTools.StmtInit(conn);
StmtUtilTools.StmtPrepare(stmt, insertSql);
@@ -86,4 +100,106 @@ namespace Cases
}
}
+
+ public class NormalTableStmtCases
+ {
+ /// xiaolei
+ /// NormalTableStmtCases.TestBindSingleLineCn
+ /// Test stmt insert single line of chinese character into normal table by column after column
+ /// StmtNormalTable.cs
+ /// pass or failed
+ public void TestBindSingleLineCn(IntPtr conn, string tableName)
+ {
+ String createTb = "create table " + tableName + "(ts timestamp,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200),bo bool,nullVal int);";
+ String insertSql = "insert into ? values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+ TAOS_BIND[] valuesRow = DataSource.getNtableCNRow();
+ UtilsTools.ExecuteUpdate(conn, createTb);
+
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+ StmtUtilTools.SetTableName(stmt, tableName);
+ StmtUtilTools.BindParam(stmt, valuesRow);
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+ StmtUtilTools.StmtClose(stmt);
+ DataSource.FreeTaosBind(valuesRow);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+
+ }
+
+ /// xiaolei
+ /// NormalTableStmtCases.TestBindColumnCn
+ /// Test stmt insert single line of chinese character into normal table by column after column
+ /// StmtNormalTable.cs
+ /// pass or failed
+ public void TestBindColumnCn(IntPtr conn,string tableName)
+ {
+ DataSource data = new DataSource();
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindCNArr();
+ String createTb = "create table " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200));";
+ String insertSql = "insert into ? values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+
+ UtilsTools.ExecuteUpdate(conn, createTb);
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+
+ StmtUtilTools.SetTableName(stmt, tableName);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[0], 0);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[1], 1);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[2], 2);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[3], 3);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[4], 4);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[5], 5);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[6], 6);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[7], 7);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[8], 8);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[9], 9);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[10], 10);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[11], 11);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[12], 12);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[13], 13);
+
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+ StmtUtilTools.StmtClose(stmt);
+
+ DataSource.FreeTaosMBind(mbind);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+ }
+ /// xiaolei
+ /// NormalTableStmtCases.TestBindMultiLineCn
+ /// Test stmt insert single line of chinese character into normal table by column after column
+ /// StmtNormalTable.cs
+ /// pass or failed
+ public void TestBindMultiLineCn(IntPtr conn, string tableName)
+ {
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindCNArr();
+ String createTb = "create table " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200));";
+ String insertSql = "insert into ? values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ UtilsTools.ExecuteUpdate(conn, createTb);
+
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+ StmtUtilTools.SetTableName(stmt, tableName);
+ StmtUtilTools.BindParamBatch(stmt, mbind);
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+ StmtUtilTools.StmtClose(stmt);
+
+ DataSource.FreeTaosMBind(mbind);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+ }
+ }
}
\ No newline at end of file
diff --git a/src/connector/C#/src/test/Cases/StmtStable.cs b/src/connector/C#/src/test/Cases/StmtStable.cs
index f6024909d04b2a239f0b49ba5bba65eba3d2a718..b47ef2226225977fa0d95aa6113d07dc8fb10f50 100644
--- a/src/connector/C#/src/test/Cases/StmtStable.cs
+++ b/src/connector/C#/src/test/Cases/StmtStable.cs
@@ -15,10 +15,8 @@ namespace Cases
String createTb = "create stable " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200))tags(bo bool,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200));";
String insertSql = "insert into ? using " + tableName + " tags(?,?,?,?,?,?,?,?,?,?,?,?,?) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
- UtilsTools.ExecuteQuery(conn, createTb);
- String[] loadList = { tableName };
+ UtilsTools.ExecuteUpdate(conn, createTb);
IntPtr stmt = StmtUtilTools.StmtInit(conn);
- StmtUtilTools.loadTableInfo(conn, loadList);
StmtUtilTools.StmtPrepare(stmt, insertSql);
StmtUtilTools.SetTableNameTags(stmt, tableName + "_t1", tags);
@@ -43,9 +41,8 @@ namespace Cases
String insertSql = "insert into ? using " + tableName + " tags(?,?,?,?,?,?,?,?,?,?,?,?,?) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
- UtilsTools.ExecuteQuery(conn, createTb);
+ UtilsTools.ExecuteUpdate(conn, createTb);
IntPtr stmt = StmtUtilTools.StmtInit(conn);
-
StmtUtilTools.StmtPrepare(stmt, insertSql);
StmtUtilTools.SetTableNameTags(stmt, tableName + "_t1", tags);
@@ -73,4 +70,119 @@ namespace Cases
}
}
+ public class StableStmtCases
+ {
+ /// xiaolei
+ /// StableStmtCases.TestBindSingleLineCn
+ /// Test stmt insert single line of chinese character into stable by column after column
+ /// StmtSTable.cs
+ /// pass or failed
+ public void TestBindSingleLineCn(IntPtr conn, string tableName)
+ {
+ TAOS_BIND[] tags = DataSource.getCNTags();
+ TAOS_BIND[] binds = DataSource.getNtableCNRow();
+ String createTb = "create stable " + tableName + " (ts timestamp,v1 tinyint,v2 smallint,v4 int,v8 bigint,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,f4 float,f8 double,bin binary(200),blob nchar(200),b bool,nilcol int)tags(bo bool,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200));";
+ String insertSql = "insert into ? using " + tableName + " tags(?,?,?,?,?,?,?,?,?,?,?,?,?) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ UtilsTools.ExecuteUpdate(conn, createTb);
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+ StmtUtilTools.SetTableNameTags(stmt, tableName + "_t1", tags);
+ StmtUtilTools.BindParam(stmt, binds);
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+
+ StmtUtilTools.StmtClose(stmt);
+ DataSource.FreeTaosBind(tags);
+ DataSource.FreeTaosBind(binds);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+
+ }
+
+ /// xiaolei
+ /// StableStmtCases.TestBindColumnCn
+ /// Test stmt insert single line of chinese character into stable by column after column
+ /// StmtSTable.cs
+ /// pass or failed
+ public void TestBindColumnCn(IntPtr conn, string tableName)
+ {
+ DataSource data = new DataSource();
+ TAOS_BIND[] tags = DataSource.getCNTags();
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindCNArr();
+
+ String createTb = "create stable " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200))tags(bo bool,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200));";
+ String insertSql = "insert into ? using " + tableName + " tags(?,?,?,?,?,?,?,?,?,?,?,?,?) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+
+ UtilsTools.ExecuteUpdate(conn, createTb);
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+ StmtUtilTools.SetTableNameTags(stmt, tableName + "_t1", tags);
+
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[0], 0);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[1], 1);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[2], 2);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[3], 3);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[4], 4);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[5], 5);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[6], 6);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[7], 7);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[8], 8);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[9], 9);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[10], 10);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[11], 11);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[12], 12);
+ StmtUtilTools.BindSingleParamBatch(stmt, mbind[13], 13);
+
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+ StmtUtilTools.StmtClose(stmt);
+
+ DataSource.FreeTaosBind(tags);
+ DataSource.FreeTaosMBind(mbind);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+
+
+ }
+
+ /// xiaolei
+ /// StableStmtCases.TestBindMultiLineCn
+ /// Test stmt insert single line of chinese character into stable by column after column
+ /// StmtSTable.cs
+ /// pass or failed
+ public void TestBindMultiLineCn(IntPtr conn, string tableName)
+ {
+ TAOS_BIND[] tags = DataSource.getCNTags();
+ TAOS_MULTI_BIND[] mbind = DataSource.GetMultiBindCNArr();
+
+ String createTb = "create stable " + tableName + " (ts timestamp ,b bool,v1 tinyint,v2 smallint,v4 int,v8 bigint,f4 float,f8 double,u1 tinyint unsigned,u2 smallint unsigned,u4 int unsigned,u8 bigint unsigned,bin binary(200),blob nchar(200))tags(bo bool,tt tinyint,si smallint,ii int,bi bigint,tu tinyint unsigned,su smallint unsigned,iu int unsigned,bu bigint unsigned,ff float ,dd double ,bb binary(200),nc nchar(200));";
+ String insertSql = "insert into ? using " + tableName + " tags(?,?,?,?,?,?,?,?,?,?,?,?,?) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ UtilsTools.ExecuteUpdate(conn, createTb);
+ IntPtr stmt = StmtUtilTools.StmtInit(conn);
+
+ StmtUtilTools.StmtPrepare(stmt, insertSql);
+ StmtUtilTools.SetTableNameTags(stmt, tableName + "_t1", tags);
+ StmtUtilTools.BindParamBatch(stmt, mbind);
+ StmtUtilTools.AddBatch(stmt);
+ StmtUtilTools.StmtExecute(stmt);
+
+ StmtUtilTools.StmtClose(stmt);
+ DataSource.FreeTaosBind(tags);
+ DataSource.FreeTaosMBind(mbind);
+
+ string querySql = "select * from " + tableName;
+ IntPtr res = UtilsTools.ExecuteQuery(conn, querySql);
+ UtilsTools.DisplayRes(res);
+ }
+
+ }
}
\ No newline at end of file
diff --git a/src/connector/C#/src/test/Cases/Utils.cs b/src/connector/C#/src/test/Cases/Utils.cs
index 7877601e0adbc38c186bd44456ceb3005d806ff1..dd856db8eb2bfc4122ccdd80db2fe74e74af2760 100644
--- a/src/connector/C#/src/test/Cases/Utils.cs
+++ b/src/connector/C#/src/test/Cases/Utils.cs
@@ -8,7 +8,7 @@ namespace Test.UtilsTools
public class UtilsTools
{
- static string configDir = "C:/TDengine/cfg";
+ static string configDir = "/etc/taos";//"C:/TDengine/cfg";
public static IntPtr TDConnection(string ip, string user, string password, string db, short port)
{
@@ -60,6 +60,28 @@ namespace Test.UtilsTools
return res;
}
+ public static void ExecuteUpdate(IntPtr conn, String sql)
+ {
+ IntPtr res = TDengine.Query(conn, sql);
+ if ((res == IntPtr.Zero) || (TDengine.ErrorNo(res) != 0))
+ {
+ Console.Write(sql.ToString() + " failure, ");
+ if (res != IntPtr.Zero)
+ {
+ Console.Write("reason: " + TDengine.Error(res));
+
+ }
+ Console.WriteLine("");
+ ExitProgram();
+ }
+ else
+ {
+ Console.WriteLine(sql.ToString() + " success");
+
+ }
+ TDengine.FreeResult(res);
+ }
+
public static void DisplayRes(IntPtr res)
{
long queryRows = 0;
diff --git a/src/connector/C#/src/test/XUnitTest/TestTaosBind.cs b/src/connector/C#/src/test/XUnitTest/TestTaosBind.cs
index 208bdcc02cf84db4af149ddc314d67db7b92b848..1929d70a580744e6dcb57ee79699f18e295c3393 100644
--- a/src/connector/C#/src/test/XUnitTest/TestTaosBind.cs
+++ b/src/connector/C#/src/test/XUnitTest/TestTaosBind.cs
@@ -652,8 +652,8 @@ namespace TDengineDriver.Test
{
int bufferType = 8;
String buffer = "qwertyuiopasdghjklzxcvbnm<>?:\"{}+_)(*&^%$#@!~QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,./`1234567890-=";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindBinary("qwertyuiopasdghjklzxcvbnm<>?:\"{}+_)(*&^%$#@!~QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,./`1234567890-=");
int BindLengPtr = Marshal.ReadInt32(bind.length);
@@ -674,8 +674,8 @@ namespace TDengineDriver.Test
{
int bufferType = 8;
String buffer = "一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindBinary("一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./");
int BindLengPtr = Marshal.ReadInt32(bind.length);
@@ -696,8 +696,8 @@ namespace TDengineDriver.Test
{
int bufferType = 8;
String buffer = "一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindBinary("一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM");
int BindLengPtr = Marshal.ReadInt32(bind.length);
@@ -718,8 +718,8 @@ namespace TDengineDriver.Test
{
int bufferType = 10;
String buffer = "qwertyuiopasdghjklzxcvbnm<>?:\"{}+_)(*&^%$#@!~QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,./`1234567890-=";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindNchar("qwertyuiopasdghjklzxcvbnm<>?:\"{}+_)(*&^%$#@!~QWERTYUIOP[]\\ASDFGHJKL;'ZXCVBNM,./`1234567890-=");
int BindLengPtr = Marshal.ReadInt32(bind.length);
@@ -739,8 +739,8 @@ namespace TDengineDriver.Test
{
int bufferType = 10;
String buffer = "一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindNchar("一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./");
int BindLengPtr = Marshal.ReadInt32(bind.length);
@@ -760,8 +760,8 @@ namespace TDengineDriver.Test
{
int bufferType = 10;
String buffer = "一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM";
- int bufferLength = buffer.Length;
- int length = buffer.Length;
+ int bufferLength = System.Text.Encoding.Default.GetBytes(buffer).Length;
+ int length = System.Text.Encoding.Default.GetBytes(buffer).Length;
TDengineDriver.TAOS_BIND bind = TaosBind.BindNchar("一二两三四五六七八九十廿毛另壹贰叁肆伍陆柒捌玖拾佰仟万亿元角分零整1234567890`~!@#$%^&*()_+[]{};':<>?,./qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM");
int BindLengPtr = Marshal.ReadInt32(bind.length);
diff --git a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/confprops/HttpKeepAliveTest.java b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/confprops/HttpKeepAliveTest.java
index 7f7979eb01154a85b25bbea7b9d3f042b4f1c104..33c6a6062766826ed37fc0fd338446ba9267904c 100644
--- a/src/connector/jdbc/src/test/java/com/taosdata/jdbc/confprops/HttpKeepAliveTest.java
+++ b/src/connector/jdbc/src/test/java/com/taosdata/jdbc/confprops/HttpKeepAliveTest.java
@@ -1,6 +1,7 @@
package com.taosdata.jdbc.confprops;
import org.junit.Assert;
+import org.junit.Ignore;
import org.junit.Test;
import java.sql.Connection;
@@ -13,6 +14,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
+@Ignore
public class HttpKeepAliveTest {
private static final String host = "127.0.0.1";
diff --git a/src/kit/taos-tools b/src/kit/taos-tools
index a3611888d4257a9baa0ce876b04b47c60cc17279..27751ba9ca17407425fb50a52cd68295794dedc3 160000
--- a/src/kit/taos-tools
+++ b/src/kit/taos-tools
@@ -1 +1 @@
-Subproject commit a3611888d4257a9baa0ce876b04b47c60cc17279
+Subproject commit 27751ba9ca17407425fb50a52cd68295794dedc3
diff --git a/src/plugins/CMakeLists.txt b/src/plugins/CMakeLists.txt
index c7221a6d301ae09e47bd68c76a90599fd85dff2a..765c7195cb4ef2fd7e2a87a1a95cff725d8b0c90 100644
--- a/src/plugins/CMakeLists.txt
+++ b/src/plugins/CMakeLists.txt
@@ -8,6 +8,8 @@ IF (TD_BUILD_HTTP)
MESSAGE("${Yellow} use original embedded httpd ${ColourReset}")
MESSAGE("")
ADD_SUBDIRECTORY(http)
+ELSEIF(TD_BUILD_TAOSA_INTERNAL)
+ MESSAGE("${Yellow} use taosa internal as httpd ${ColourReset}")
ELSE ()
MESSAGE("")
MESSAGE("${Green} use taosadapter as httpd ${ColourReset}")
diff --git a/src/plugins/monitor/src/monMain.c b/src/plugins/monitor/src/monMain.c
index b93d85140c230ae5b010a3559fed9488cc6b0b9f..a03bc09036d14045043704e82e22fdd177c243b2 100644
--- a/src/plugins/monitor/src/monMain.c
+++ b/src/plugins/monitor/src/monMain.c
@@ -601,6 +601,10 @@ static void monSaveSystemInfo() {
}
static int32_t monGetRowElemCharLen(TAOS_FIELD field, char *rowElem) {
+ if (field.type != TSDB_DATA_TYPE_BINARY && field.type != TSDB_DATA_TYPE_NCHAR) {
+ return -1;
+ }
+
int32_t charLen = varDataLen(rowElem - VARSTR_HEADER_SIZE);
if (field.type == TSDB_DATA_TYPE_BINARY) {
assert(charLen <= field.bytes && charLen >= 0);
@@ -629,12 +633,14 @@ static int32_t monBuildMasterUptimeSql(char *sql) {
while ((row = taos_fetch_row(result))) {
for (int i = 0; i < num_fields; ++i) {
- int32_t charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
- if (strcmp(fields[i].name, "role") == 0 && strncmp((char *)row[i], "master", charLen) == 0) {
- if (strcmp(fields[i + 1].name, "role_time") == 0) {
- int64_t now = taosGetTimestamp(TSDB_TIME_PRECISION_MILLI);
- //master uptime in seconds
- masterUptime = (now - *(int64_t *)row[i + 1]) / 1000;
+ if (strcmp(fields[i].name, "role") == 0) {
+ int32_t charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (strncmp((char *)row[i], "master", charLen) == 0) {
+ if (strcmp(fields[i + 1].name, "role_time") == 0) {
+ int64_t now = taosGetTimestamp(TSDB_TIME_PRECISION_MILLI);
+ //master uptime in seconds
+ masterUptime = (now - *(int64_t *)row[i + 1]) / 1000;
+ }
}
}
}
@@ -1139,12 +1145,16 @@ static uint32_t monBuildVgroupsInfoSql(char *sql, char *dbName) {
pos += snprintf(sql, SQL_LENGTH, "insert into %s.vgroup_%d values(%" PRId64 ", "SQL_STR_FMT,
tsMonitorDbName, vgId, ts, dbName);
} else {
- return TSDB_CODE_SUCCESS;
+ goto DONE;
}
} else if (strcmp(fields[i].name, "tables") == 0) {
pos += snprintf(sql + pos, SQL_LENGTH, ", %d", *(int32_t *)row[i]);
} else if (strcmp(fields[i].name, "status") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save vgroup_%d info, reason: invalid row %s len, sql:%s", vgId, (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 1, ", "SQL_STR_FMT, (char *)row[i]);
} else if (strcmp(fields[i].name, "onlines") == 0) {
pos += snprintf(sql + pos, SQL_LENGTH, ", %d", *(int32_t *)row[i]);
@@ -1152,6 +1162,10 @@ static uint32_t monBuildVgroupsInfoSql(char *sql, char *dbName) {
snprintf(v_dnode_ids, sizeof(v_dnode_ids), "%d;", *(int16_t *)row[i]);
} else if (v_dnode_str && strcmp(v_dnode_str, "_status") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save vgroup_%d info, reason: invalid row %s len, sql:%s", vgId, (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
snprintf(v_dnode_status, charLen + 1, "%s;", (char *)row[i]);
} else if (strcmp(fields[i].name, "compacting") == 0) {
//flush dnode_ids and dnode_role in to sql
@@ -1169,8 +1183,9 @@ static uint32_t monBuildVgroupsInfoSql(char *sql, char *dbName) {
monDebug("successfully to save vgroup_%d info, sql:%s", vgId, tsMonitor.sql);
}
}
- taos_free_result(result);
+DONE:
+ taos_free_result(result);
return TSDB_CODE_SUCCESS;
}
@@ -1220,12 +1235,24 @@ static void monSaveSlowQueryInfo() {
if (strcmp(fields[i].name, "query_id") == 0) {
has_slowquery = true;
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save slow_query info, reason: invalid row %s len, sql:%s", (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 1, ", "SQL_STR_FMT, (char *)row[i]);
} else if (strcmp(fields[i].name, "user") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save slow_query info, reason: invalid row %s len, sql:%s", (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 1, ", "SQL_STR_FMT, (char *)row[i]);
} else if (strcmp(fields[i].name, "qid") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save slow_query info, reason: invalid row %s len, sql:%s", (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 1, ", "SQL_STR_FMT, (char *)row[i]);
} else if (strcmp(fields[i].name, "created_time") == 0) {
int64_t create_time = *(int64_t *)row[i];
@@ -1235,18 +1262,25 @@ static void monSaveSlowQueryInfo() {
pos += snprintf(sql + pos, SQL_LENGTH, ", %" PRId64 "", *(int64_t *)row[i]);
} else if (strcmp(fields[i].name, "ep") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save slow_query info, reason: invalid row %s len, sql:%s", (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 1, ", "SQL_STR_FMT, (char *)row[i]);
} else if (strcmp(fields[i].name, "sql") == 0) {
charLen = monGetRowElemCharLen(fields[i], (char *)row[i]);
+ if (charLen < 0) {
+ monError("failed to save slow_query info, reason: invalid row %s len, sql:%s", (char *)row[i], tsMonitor.sql);
+ goto DONE;
+ }
pos += snprintf(sql + pos, strlen(SQL_STR_FMT) + charLen + 2, ", "SQL_STR_FMT")", (char *)row[i]);
}
}
}
monDebug("save slow query, sql:%s", sql);
- taos_free_result(result);
if (!has_slowquery) {
- return;
+ goto DONE;
}
void *res = taos_query(tsMonitor.conn, tsMonitor.sql);
code = taos_errno(res);
@@ -1259,6 +1293,9 @@ static void monSaveSlowQueryInfo() {
monDebug("successfully to save slowquery info, sql:%s", tsMonitor.sql);
}
+DONE:
+ taos_free_result(result);
+ return;
}
static void monSaveDisksInfo() {
diff --git a/src/plugins/taosadapter b/src/plugins/taosadapter
index 11d1e02255edfeeaa8d5b1f45abfa9637332ce65..273b5219f8bcc604e43beebc6f1f95abed85170a 160000
--- a/src/plugins/taosadapter
+++ b/src/plugins/taosadapter
@@ -1 +1 @@
-Subproject commit 11d1e02255edfeeaa8d5b1f45abfa9637332ce65
+Subproject commit 273b5219f8bcc604e43beebc6f1f95abed85170a
diff --git a/tests/develop-test/0-others/json_tag.py b/tests/develop-test/0-others/json_tag.py
index b6a15ca770ea7e4885973a03bfc8e3bd08c3f54d..1271e8ae4a04e28bbe2d2f701524a3d42b1d0e61 100644
--- a/tests/develop-test/0-others/json_tag.py
+++ b/tests/develop-test/0-others/json_tag.py
@@ -22,9 +22,9 @@ import json
class TDTestCase:
def caseDescription(self):
'''
- Json tag test case, include create table with json tag,
- select json tag and query with json tag in where condition,
- besides, include json tag in group by/order by/join/subquery.
+ Json tag test case, include create table with json tag, select json tag and query with json tag in where condition, besides, include json tag in group by/order by/join/subquery.
+ case1: [TD-12452] fix error if json tag is NULL
+ case2: [TD-12389] describe child table, tag length error if the tag is json tag
'''
return
@@ -515,6 +515,23 @@ class TDTestCase:
tdSql.query("select jtag->'tag3' from jsons1_16")
tdSql.checkData(0, 0, '-2.111000000')
+ # test TD-12452
+ tdSql.execute("ALTER TABLE jsons1_1 SET TAG jtag=NULL")
+ tdSql.query("select jtag from jsons1_1")
+ tdSql.checkData(0, 0, None)
+ tdSql.execute("CREATE TABLE if not exists jsons1_20 using jsons1 tags(NULL)")
+ tdSql.query("select jtag from jsons1_20")
+ tdSql.checkData(0, 0, None)
+ tdSql.execute("insert into jsons1_21 using jsons1 tags(NULL) values(1591061628000, 11, false, '你就会','')")
+ tdSql.query("select jtag from jsons1_21")
+ tdSql.checkData(0, 0, None)
+
+ #test TD-12389
+ tdSql.query("describe jsons1")
+ tdSql.checkData(5, 2, 4096)
+ tdSql.query("describe jsons1_1")
+ tdSql.checkData(5, 2, 4096)
+
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
diff --git a/tests/develop-test/3-connectors/java/test.sh b/tests/develop-test/3-connectors/java/test.sh
index 15f7b84955b793e0fb6acaa434fba83c6ff0c710..8b43d1a44391c832735b5649258c1131f1133794 100755
--- a/tests/develop-test/3-connectors/java/test.sh
+++ b/tests/develop-test/3-connectors/java/test.sh
@@ -14,4 +14,32 @@ stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
+nohup taosadapter -c /etc/taos/taosadapter.toml > /dev/null 2>&1 &
sleep 10
+
+cd ../../../../
+WKC=`pwd`
+cd ${WKC}/src/connector/jdbc
+
+mvn clean test > jdbc-out.log 2>&1
+tail -n 20 jdbc-out.log
+
+cases=`grep 'Tests run' jdbc-out.log | awk 'END{print $3}'`
+totalJDBCCases=`echo ${cases/%,}`
+failed=`grep 'Tests run' jdbc-out.log | awk 'END{print $5}'`
+JDBCFailed=`echo ${failed/%,}`
+error=`grep 'Tests run' jdbc-out.log | awk 'END{print $7}'`
+JDBCError=`echo ${error/%,}`
+
+totalJDBCFailed=`expr $JDBCFailed + $JDBCError`
+totalJDBCSuccess=`expr $totalJDBCCases - $totalJDBCFailed`
+
+if [ "$totalJDBCSuccess" -gt "0" ]; then
+ echo -e "\n${GREEN} ### Total $totalJDBCSuccess JDBC case(s) succeed! ### ${NC}"
+fi
+
+if [ "$totalJDBCFailed" -ne "0" ]; then
+ echo -e "\n${RED} ### Total $totalJDBCFailed JDBC case(s) failed! ### ${NC}"
+ exit 8
+fi
+
diff --git a/tests/pytest/insert/openTsdbTelnetLinesInsert.py b/tests/pytest/insert/openTsdbTelnetLinesInsert.py
index c6a84c7def8301fa6ecd1752f9238731ce922338..d30bec55d83bfd5d7b991f59225d2419683532d5 100644
--- a/tests/pytest/insert/openTsdbTelnetLinesInsert.py
+++ b/tests/pytest/insert/openTsdbTelnetLinesInsert.py
@@ -30,7 +30,10 @@ class TDTestCase:
self._conn = conn
self.smlChildTableName_value = tdSql.getVariable("smlChildTableName")[0].upper()
- def createDb(self, name="test", db_update_tag=0):
+ def createDb(self, name="test", db_update_tag=0, protocol=None):
+ if protocol == "telnet-tcp":
+ name = "opentsdb_telnet"
+
if db_update_tag == 0:
tdSql.execute(f"drop database if exists {name}")
tdSql.execute(f"create database if not exists {name} precision 'us'")
@@ -142,10 +145,13 @@ class TDTestCase:
type_num_list.append(14)
return type_num_list
- def inputHandle(self, input_sql, ts_type):
+ def inputHandle(self, input_sql, ts_type, protocol=None):
input_sql_split_list = input_sql.split(" ")
+ if protocol == "telnet-tcp":
+ input_sql_split_list.pop(0)
stb_name = input_sql_split_list[0]
stb_tag_list = input_sql_split_list[3:]
+ stb_tag_list[-1] = stb_tag_list[-1].strip()
stb_col_value = input_sql_split_list[2]
ts_value = self.timeTrans(input_sql_split_list[1], ts_type)
@@ -209,7 +215,7 @@ class TDTestCase:
t8="L\"ncharTagValue\"", ts="1626006833641",
id_noexist_tag=None, id_change_tag=None, id_upper_tag=None, id_mixul_tag=None, id_double_tag=None,
t_add_tag=None, t_mul_tag=None, c_multi_tag=None, c_blank_tag=None, t_blank_tag=None,
- chinese_tag=None, multi_field_tag=None, point_trans_tag=None):
+ chinese_tag=None, multi_field_tag=None, point_trans_tag=None, protocol=None, tcp_keyword_tag=None):
if stb_name == "":
stb_name = tdCom.getLongName(len=6, mode="letters")
if tb_name == "":
@@ -253,6 +259,10 @@ class TDTestCase:
sql_seq = f'{stb_name} {ts} {value} {id}={tb_name} t0={t0} {value}'
if point_trans_tag is not None:
sql_seq = f'.point.trans.test {ts} {value} t0={t0}'
+ if tcp_keyword_tag is not None:
+ sql_seq = f'put {ts} {value} t0={t0}'
+ if protocol == "telnet-tcp":
+ sql_seq = 'put ' + sql_seq + '\n'
return sql_seq, stb_name
def genMulTagColStr(self, genType, count=1):
@@ -280,13 +290,15 @@ class TDTestCase:
long_sql = stb_name + ' ' + ts + ' ' + col_str + ' ' + ' ' + tag_str
return long_sql, stb_name
- def getNoIdTbName(self, stb_name):
+ def getNoIdTbName(self, stb_name, protocol=None):
query_sql = f"select tbname from {stb_name}"
- tb_name = self.resHandle(query_sql, True)[0][0]
+ tb_name = self.resHandle(query_sql, True, protocol)[0][0]
return tb_name
- def resHandle(self, query_sql, query_tag):
+ def resHandle(self, query_sql, query_tag, protocol=None):
tdSql.execute('reset query cache')
+ if protocol == "telnet-tcp":
+ time.sleep(0.5)
row_info = tdSql.query(query_sql, query_tag)
col_info = tdSql.getColNameList(query_sql, query_tag)
res_row_list = []
@@ -299,14 +311,17 @@ class TDTestCase:
res_type_list = col_info[1]
return res_row_list, res_field_list_without_ts, res_type_list
- def resCmp(self, input_sql, stb_name, query_sql="select * from", condition="", ts=None, ts_type=None, id=True, none_check_tag=None, precision=None):
- expect_list = self.inputHandle(input_sql, ts_type)
- if precision == None:
- self._conn.schemaless_insert([input_sql], TDSmlProtocolType.TELNET.value, ts_type)
+ def resCmp(self, input_sql, stb_name, query_sql="select * from", condition="", ts=None, ts_type=None, id=True, none_check_tag=None, precision=None, protocol=None):
+ expect_list = self.inputHandle(input_sql, ts_type, protocol)
+ if protocol == "telnet-tcp":
+ tdCom.tcpClient(input_sql)
else:
- self._conn.schemaless_insert([input_sql], TDSmlProtocolType.TELNET.value, precision)
+ if precision == None:
+ self._conn.schemaless_insert([input_sql], TDSmlProtocolType.TELNET.value, ts_type)
+ else:
+ self._conn.schemaless_insert([input_sql], TDSmlProtocolType.TELNET.value, precision)
query_sql = f"{query_sql} {stb_name} {condition}"
- res_row_list, res_field_list_without_ts, res_type_list = self.resHandle(query_sql, True)
+ res_row_list, res_field_list_without_ts, res_type_list = self.resHandle(query_sql, True, protocol)
if ts == 0:
res_ts = self.dateToTs(res_row_list[0][0])
current_time = time.time()
@@ -327,16 +342,16 @@ class TDTestCase:
for i in range(len(res_type_list)):
tdSql.checkEqual(res_type_list[i], expect_list[2][i])
- def initCheckCase(self):
+ def initCheckCase(self, protocol=None):
"""
normal tags and cols, one for every elm
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql, stb_name = self.genFullTypeSql()
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
- def boolTypeCheckCase(self):
+ def boolTypeCheckCase(self, protocol=None):
"""
check all normal type
"""
@@ -344,10 +359,10 @@ class TDTestCase:
tdCom.cleanTb()
full_type_list = ["f", "F", "false", "False", "t", "T", "true", "True"]
for t_type in full_type_list:
- input_sql, stb_name = self.genFullTypeSql(t0=t_type)
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(t0=t_type, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
- def symbolsCheckCase(self):
+ def symbolsCheckCase(self, protocol=None):
"""
check symbols = `~!@#$%^&*()_-+={[}]\|:;'\",<.>/?
"""
@@ -359,10 +374,10 @@ class TDTestCase:
tdCom.cleanTb()
binary_symbols = '"abcd`~!@#$%^&*()_-{[}]|:;<.>?lfjal"'
nchar_symbols = f'L{binary_symbols}'
- input_sql1, stb_name1 = self.genFullTypeSql(value=binary_symbols, t7=binary_symbols, t8=nchar_symbols)
- input_sql2, stb_name2 = self.genFullTypeSql(value=nchar_symbols, t7=binary_symbols, t8=nchar_symbols)
- self.resCmp(input_sql1, stb_name1)
- self.resCmp(input_sql2, stb_name2)
+ input_sql1, stb_name1 = self.genFullTypeSql(value=binary_symbols, t7=binary_symbols, t8=nchar_symbols, protocol=protocol)
+ input_sql2, stb_name2 = self.genFullTypeSql(value=nchar_symbols, t7=binary_symbols, t8=nchar_symbols, protocol=protocol)
+ self.resCmp(input_sql1, stb_name1, protocol=protocol)
+ self.resCmp(input_sql2, stb_name2, protocol=protocol)
def tsCheckCase(self):
"""
@@ -406,38 +421,38 @@ class TDTestCase:
except SchemalessError as err:
tdSql.checkNotEqual(err.errno, 0)
- def idSeqCheckCase(self):
+ def idSeqCheckCase(self, protocol=None):
"""
check id.index in tags
eg: t0=**,id=**,t1=**
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql, stb_name = self.genFullTypeSql(id_change_tag=True)
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(id_change_tag=True, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
- def idLetterCheckCase(self):
+ def idLetterCheckCase(self, protocol=None):
"""
check id param
eg: id and ID
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql, stb_name = self.genFullTypeSql(id_upper_tag=True)
- self.resCmp(input_sql, stb_name)
- input_sql, stb_name = self.genFullTypeSql(id_mixul_tag=True)
- self.resCmp(input_sql, stb_name)
- input_sql, stb_name = self.genFullTypeSql(id_change_tag=True, id_upper_tag=True)
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(id_upper_tag=True, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
+ input_sql, stb_name = self.genFullTypeSql(id_mixul_tag=True, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
+ input_sql, stb_name = self.genFullTypeSql(id_change_tag=True, id_upper_tag=True, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
- def noIdCheckCase(self):
+ def noIdCheckCase(self, protocol=None):
"""
id not exist
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql, stb_name = self.genFullTypeSql(id_noexist_tag=True)
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(id_noexist_tag=True, protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
query_sql = f"select tbname from {stb_name}"
res_row_list = self.resHandle(query_sql, True)[0]
if len(res_row_list[0][0]) > 0:
@@ -461,7 +476,7 @@ class TDTestCase:
except SchemalessError as err:
tdSql.checkNotEqual(err.errno, 0)
- def stbTbNameCheckCase(self):
+ def stbTbNameCheckCase(self, protocol=None):
"""
test illegal id name
mix "`~!@#$¥%^&*()-+{}|[]、「」【】:;《》<>?"
@@ -470,18 +485,18 @@ class TDTestCase:
tdCom.cleanTb()
rstr = list("~!@#$¥%^&*()-+{}|[]、「」【】:;《》<>?")
for i in rstr:
- input_sql, stb_name = self.genFullTypeSql(tb_name=f"\"aaa{i}bbb\"")
- self.resCmp(input_sql, f'`{stb_name}`')
+ input_sql, stb_name = self.genFullTypeSql(tb_name=f"\"aaa{i}bbb\"", protocol=protocol)
+ self.resCmp(input_sql, f'`{stb_name}`', protocol=protocol)
tdSql.execute(f'drop table if exists `{stb_name}`')
- def idStartWithNumCheckCase(self):
+ def idStartWithNumCheckCase(self, protocol=None):
"""
id is start with num
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql, stb_name = self.genFullTypeSql(tb_name="1aaabbb")
- self.resCmp(input_sql, stb_name)
+ input_sql, stb_name = self.genFullTypeSql(tb_name="1aaabbb", protocol=protocol)
+ self.resCmp(input_sql, stb_name, protocol=protocol)
def nowTsCheckCase(self):
"""
@@ -1060,15 +1075,18 @@ class TDTestCase:
stb_name = input_sql.split(' ')[0]
self.resCmp(input_sql, stb_name)
- def pointTransCheckCase(self):
+ def pointTransCheckCase(self, protocol=None):
"""
metric value "." trans to "_"
"""
tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
tdCom.cleanTb()
- input_sql = self.genFullTypeSql(point_trans_tag=True)[0]
- stb_name = f'`{input_sql.split(" ")[0]}`'
- self.resCmp(input_sql, stb_name)
+ input_sql = self.genFullTypeSql(point_trans_tag=True, protocol=protocol)[0]
+ if protocol == 'telnet-tcp':
+ stb_name = f'`{input_sql.split(" ")[1]}`'
+ else:
+ stb_name = f'`{input_sql.split(" ")[0]}`'
+ self.resCmp(input_sql, stb_name, protocol=protocol)
tdSql.execute("drop table `.point.trans.test`")
def defaultTypeCheckCase(self):
@@ -1105,6 +1123,17 @@ class TDTestCase:
col_tag_res = tdSql.getColNameList(query_sql)
tdSql.checkEqual(col_tag_res, ['ts', 'value', '"t$3"', 't!@#$%^&*()_+[];:<>?,9', 't#2', 't%4', 't&6', 't*7', 't^5', 'Tt!0', 'tT@1'])
tdSql.execute('drop table `rFa$sta`')
+
+ def tcpKeywordsCheckCase(self, protocol="telnet-tcp"):
+ """
+ stb = "put"
+ """
+ tdLog.info(f'{sys._getframe().f_code.co_name}() function is running')
+ tdCom.cleanTb()
+ input_sql = self.genFullTypeSql(tcp_keyword_tag=True, protocol=protocol)[0]
+ stb_name = f'`{input_sql.split(" ")[1]}`'
+ self.resCmp(input_sql, stb_name, protocol=protocol)
+
def genSqlList(self, count=5, stb_name="", tb_name=""):
"""
stb --> supertable
@@ -1430,10 +1459,21 @@ class TDTestCase:
def run(self):
print("running {}".format(__file__))
- self.createDb()
+
try:
- # self.blankTagInsertCheckCase()
+ self.createDb()
self.runAll()
+ # self.createDb(protocol="telnet-tcp")
+ # self.initCheckCase('telnet-tcp')
+ # self.boolTypeCheckCase('telnet-tcp')
+ # self.symbolsCheckCase('telnet-tcp')
+ # self.idSeqCheckCase('telnet-tcp')
+ # self.idLetterCheckCase('telnet-tcp')
+ # self.noIdCheckCase('telnet-tcp')
+ # self.stbTbNameCheckCase('telnet-tcp')
+ # self.idStartWithNumCheckCase('telnet-tcp')
+ # self.pointTransCheckCase('telnet-tcp')
+ # self.tcpKeywordsCheckCase()
except Exception as err:
print(''.join(traceback.format_exception(None, err, err.__traceback__)))
raise err
diff --git a/tests/pytest/query/queryGroupTbname.py b/tests/pytest/query/queryGroupTbname.py
index bb67809e60087f94ad7f92ca7515aa8ddfc43151..7beb0832a448780232006bb7c142c5f9fff0bc46 100644
--- a/tests/pytest/query/queryGroupTbname.py
+++ b/tests/pytest/query/queryGroupTbname.py
@@ -32,7 +32,7 @@ class TDTestCase:
tb_str = ""
for tbname in tbname_list:
- globals()[tbname] = tdCom.getLongName(8, "letters_mixed")
+ globals()[tbname] = tdCom.getLongName(8, "letters_mixed").upper()
tdSql.execute(f'CREATE TABLE {table_name} (ts timestamp, {table_name_sub1} tinyint, \
{table_name_sub2} smallint, {table_name_sub3} int, {table_name_sub4} bigint, \
{table_name_sub5} float, {table_name_sub6} double, {table_name_sub7} binary(20),\
@@ -44,7 +44,7 @@ class TDTestCase:
for i in range(10):
for tbname in tbname_list:
- tdSql.execute(f'insert into {globals()[tbname]} values (now, 1, 2, 3, 4, 1.1, 2.2, "{globals()[tbname]}", "{globals()[tbname]}", True)')
+ tdSql.execute(f'insert into {globals()[tbname]} values (now-{i*i}s, 1, 2, 3, 4, 1.1, 2.2, "{globals()[tbname]}", "{globals()[tbname]}", True)')
for i in range(100):
tdSql.query(f'select {table_name_sub1},{table_name_sub2},{table_name_sub3},{table_name_sub4},{table_name_sub5},{table_name_sub6},{table_name_sub7},{table_name_sub8},{table_name_sub9} from {table_name} where tbname in ("{table_name_sub1}","{table_name_sub2}","{table_name_sub3}","{table_name_sub4}","{table_name_sub5}","{table_name_sub6}","{table_name_sub7}","{table_name_sub8}","{table_name_sub9}") and ts >= "1980-01-01 00:00:00.000"')
diff --git a/tests/pytest/util/common.py b/tests/pytest/util/common.py
index adfec12cb2a0aafe19b5d125164b583a7dbd288f..df4c0e8e9ce37fe60b5aaaeed16c034054b17508 100644
--- a/tests/pytest/util/common.py
+++ b/tests/pytest/util/common.py
@@ -17,6 +17,7 @@ from util.sql import tdSql
from util.dnodes import tdDnodes
import requests
import time
+import socket
class TDCom:
def init(self, conn, logSql):
tdSql.init(conn.cursor(), logSql)
@@ -30,6 +31,21 @@ class TDCom:
telnet_url = "http://127.0.0.1:6041/opentsdb/v1/put/telnet"
return header, sql_url, sqlt_url, sqlutc_url, influx_url, telnet_url
+ def genTcpParam(self):
+ MaxBytes = 1024*1024
+ host ='127.0.0.1'
+ port = 6046
+ return MaxBytes, host, port
+
+ def tcpClient(self, input):
+ MaxBytes = tdCom.genTcpParam()[0]
+ host = tdCom.genTcpParam()[1]
+ port = tdCom.genTcpParam()[2]
+ sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ sock.connect((host, port))
+ sock.send(input.encode())
+ sock.close()
+
def restApiPost(self, sql):
requests.post(self.preDefine()[1], sql.encode("utf-8"), headers = self.preDefine()[0])
diff --git a/tests/system-test/1-insert/stmt_error.py b/tests/system-test/1-insert/stmt_error.py
new file mode 100644
index 0000000000000000000000000000000000000000..8961346034827ba3cdb57b1c33614e5413a2e4bf
--- /dev/null
+++ b/tests/system-test/1-insert/stmt_error.py
@@ -0,0 +1,185 @@
+# encoding:UTF-8
+from taos import *
+
+from ctypes import *
+from datetime import datetime
+import taos
+
+import taos
+import time
+
+from util.log import *
+from util.cases import *
+from util.sql import *
+from util.dnodes import *
+
+class TDTestCase:
+ def __init__(self):
+ self.err_case = 0
+ self.curret_case = 0
+
+ def caseDescription(self):
+
+ '''
+ case1 : [TD-11899] : this is an test case for check stmt error use .
+ '''
+ return
+
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ def conn(self):
+ # type: () -> taos.TaosConnection
+ return connect()
+
+ def test_stmt_insert(self,conn):
+ # type: (TaosConnection) -> None
+
+ dbname = "pytest_taos_stmt"
+ try:
+ conn.execute("drop database if exists %s" % dbname)
+ conn.execute("create database if not exists %s" % dbname)
+ conn.select_db(dbname)
+
+ conn.execute(
+ "create table if not exists log(ts timestamp, bo bool, nil tinyint, ti tinyint, si smallint, ii int,\
+ bi bigint, tu tinyint unsigned, su smallint unsigned, iu int unsigned, bu bigint unsigned, \
+ ff float, dd double, bb binary(100), nn nchar(100), tt timestamp)",
+ )
+ conn.load_table_info("log")
+
+
+ stmt = conn.statement("insert into log values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)")
+ params = new_bind_params(16)
+ params[0].timestamp(1626861392589, PrecisionEnum.Milliseconds)
+ params[1].bool(True)
+ params[2].null()
+ params[3].tinyint(2)
+ params[4].smallint(3)
+ params[5].int(4)
+ params[6].bigint(5)
+ params[7].tinyint_unsigned(6)
+ params[8].smallint_unsigned(7)
+ params[9].int_unsigned(8)
+ params[10].bigint_unsigned(9)
+ params[11].float(10.1)
+ params[12].double(10.11)
+ params[13].binary("hello")
+ params[14].nchar("stmt")
+ params[15].timestamp(1626861392589, PrecisionEnum.Milliseconds)
+
+ stmt.bind_param(params)
+ stmt.execute()
+
+ result = stmt.use_result()
+ assert result.affected_rows == 1
+ result.close()
+ stmt.close()
+
+ stmt = conn.statement("select * from log")
+ stmt.execute()
+ result = stmt.use_result()
+ row = result.next()
+ print(row)
+ assert row[2] == None
+ for i in range(3, 11):
+ assert row[i] == i - 1
+ #float == may not work as expected
+ # assert row[10] == c_float(10.1)
+ assert row[12] == 10.11
+ assert row[13] == "hello"
+ assert row[14] == "stmt"
+
+ conn.execute("drop database if exists %s" % dbname)
+ conn.close()
+
+ except Exception as err:
+ conn.execute("drop database if exists %s" % dbname)
+ conn.close()
+ raise err
+
+ def test_stmt_insert_error(self,conn):
+ # type: (TaosConnection) -> None
+
+ dbname = "pytest_taos_stmt_error"
+ try:
+ conn.execute("drop database if exists %s" % dbname)
+ conn.execute("create database if not exists %s" % dbname)
+ conn.select_db(dbname)
+
+ conn.execute(
+ "create table if not exists log(ts timestamp, bo bool, nil tinyint, ti tinyint, si smallint, ii int,\
+ bi bigint, tu tinyint unsigned, su smallint unsigned, iu int unsigned, bu bigint unsigned, \
+ ff float, dd double, bb binary(100), nn nchar(100), tt timestamp , error_data int )",
+ )
+ conn.load_table_info("log")
+
+
+ stmt = conn.statement("insert into log values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,1000)")
+ params = new_bind_params(16)
+ params[0].timestamp(1626861392589, PrecisionEnum.Milliseconds)
+ params[1].bool(True)
+ params[2].null()
+ params[3].tinyint(2)
+ params[4].smallint(3)
+ params[5].int(4)
+ params[6].bigint(5)
+ params[7].tinyint_unsigned(6)
+ params[8].smallint_unsigned(7)
+ params[9].int_unsigned(8)
+ params[10].bigint_unsigned(9)
+ params[11].float(10.1)
+ params[12].double(10.11)
+ params[13].binary("hello")
+ params[14].nchar("stmt")
+ params[15].timestamp(1626861392589, PrecisionEnum.Milliseconds)
+
+ stmt.bind_param(params)
+ stmt.execute()
+
+ result = stmt.use_result()
+ assert result.affected_rows == 1
+ result.close()
+ stmt.close()
+
+ stmt = conn.statement("select * from log")
+ stmt.execute()
+ result = stmt.use_result()
+ row = result.next()
+ print(row)
+ assert row[2] == None
+ for i in range(3, 11):
+ assert row[i] == i - 1
+ #float == may not work as expected
+ # assert row[10] == c_float(10.1)
+ assert row[12] == 10.11
+ assert row[13] == "hello"
+ assert row[14] == "stmt"
+
+ conn.execute("drop database if exists %s" % dbname)
+ conn.close()
+
+ except Exception as err:
+ conn.execute("drop database if exists %s" % dbname)
+ conn.close()
+ raise err
+
+ def run(self):
+
+ self.test_stmt_insert(self.conn())
+ try:
+ self.test_stmt_insert_error(self.conn())
+ except Exception as error :
+
+ if str(error)=='[0x0200]: invalid operation: only ? allowed in values':
+ tdLog.info('=========stmt error occured for bind part colum ==============')
+ else:
+ tdLog.exit("expect error not occured")
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
diff --git a/tests/system-test/2-query/TD-12344.py b/tests/system-test/2-query/TD-12191.py
similarity index 52%
rename from tests/system-test/2-query/TD-12344.py
rename to tests/system-test/2-query/TD-12191.py
index 871356d49bc738fc6290e79b13d4ea41013282ef..b77c2eab3d58aad0d481c74a061503ac42dd7bcf 100644
--- a/tests/system-test/2-query/TD-12344.py
+++ b/tests/system-test/2-query/TD-12191.py
@@ -14,6 +14,7 @@
from posixpath import split
import sys
import os
+import psutil
from util.log import *
from util.cases import *
@@ -25,9 +26,9 @@ class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
-
self.ts = 1420041600000 # 2015-01-01 00:00:00 this is begin time for first record
self.num = 10
+
def getBuildPath(self):
selfPath = os.path.dirname(os.path.realpath(__file__))
@@ -45,32 +46,6 @@ class TDTestCase:
break
return buildPath
-
- def caseDescription(self):
-
- '''
- case1 : [TD-12344] :
- this test case is an test case for unexpectd crash for session function , it will coredump taoshell ;
-
- '''
- return
-
- def getBuildPath(self):
- selfPath = os.path.dirname(os.path.realpath(__file__))
-
- if ("community" in selfPath):
- projPath = selfPath[:selfPath.find("community")]
- else:
- projPath = selfPath[:selfPath.find("tests")]
-
- for root, dirs, files in os.walk(projPath):
- if ("taosd" in files):
- rootRealPath = os.path.dirname(os.path.realpath(root))
- if ("packaging" not in rootRealPath):
- buildPath = root[:len(root)-len("/build/bin")]
- break
- return buildPath
-
def getcfgPath(self):
selfPath = os.path.dirname(os.path.realpath(__file__))
print(selfPath)
@@ -81,24 +56,51 @@ class TDTestCase:
cfgPath = projPath + "/sim/dnode1/cfg "
return cfgPath
+
+ def caseDescription(self):
+
+ '''
+ case1 : [TD-12191] :
+ this test case is an test case for unexpectd error for taosd work error ,it maybe caused by ;
+ '''
+ return
def run(self):
tdSql.prepare()
- tdSql.execute("create database if not exists testdb keep 36500;")
- tdSql.execute("use testdb;")
- tdSql.execute("create stable st (ts timestamp , id int , value double) tags(hostname binary(10) ,ind int);")
- for i in range(self.num):
- tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+100*i,i*2,i+10.00))
- tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+200*i,i*2,i+10.00))
- tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+300*i,i*2,i+10.00))
- tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+10000*i,i*2,i+10.00))
-
- cfg_path = self.getcfgPath()
- print(cfg_path)
- tdSql.execute('select elapsed(ts,10s) from testdb.st where ts>=\"2015-01-01 00:00:00.000\" and ts < \"2015-01-01 00:10:00.000\" session(ts,1d) group by tbname;') # session not support super table
- taos_cmd1= "taos -c %s -s 'select elapsed(ts,10s) from testdb.st where ts>=\"2015-01-01 00:00:00.000\" and ts < \"2015-01-01 00:10:00.000\" session(ts,1d) group by tbname;' " % (cfg_path)
- _ = subprocess.check_output(taos_cmd1, shell=True).decode("utf-8")
+
+ # prepare data for generate draft
+
+ build_path = self.getBuildPath()+"/build/bin/"
+ taos_cmd1= "%staosBenchmark -f 2-query/td_12191.json " % (build_path)
+ print(taos_cmd1)
+ taos_cmd2 = 'taos -s "create table test_TD11483.elapsed_vol as select elapsed(ts) from test_TD11483.stb interval(1m) sliding(30s)"'
+ taos_cmd3 = 'taos -s "show queries;"'
+ taos_cmd4 = 'taos -s "show streams;"'
+ # only taos -s for shell can generate this issue
+ _ = subprocess.check_output(taos_cmd1, shell=True).decode("utf-8")
+ _ = subprocess.check_output(taos_cmd2, shell=True).decode("utf-8")
+ _ = subprocess.check_output(taos_cmd3, shell=True).decode("utf-8")
+ _ = subprocess.check_output(taos_cmd4, shell=True).decode("utf-8")
+
+ # check data written done
+ tdSql.execute("use test_TD11483")
+ tdSql.query("select count(*) from elapsed_vol;")
+ tdSql.checkRows(0)
+
+
+ taosd_pid = int(subprocess.getstatusoutput('ps aux|grep "taosd" |grep -v "grep"|awk \'{print $2}\'')[1])
+
+ sleep(10)
+ cmd = "top -H -p %d -n 1"%taosd_pid
+ sys_output = subprocess.check_output(cmd, shell=True).decode("utf-8")
+ print(sys_output)
+
+ cmd_insert = "%staosBenchmark -y -n 10 -t 10 -S 10000 > /dev/null 2>&1 & " % (build_path)
+ os.system(cmd_insert)
+ sleep(5)
+ tdSql.query("select count(*) from meters")
+ tdSql.checkData(0,0,10)
def stop(self):
tdSql.close()
diff --git a/tests/system-test/2-query/TD-12276.py b/tests/system-test/2-query/TD-12276.py
new file mode 100644
index 0000000000000000000000000000000000000000..5353ab66176de30766117505e687f9103191f764
--- /dev/null
+++ b/tests/system-test/2-query/TD-12276.py
@@ -0,0 +1,94 @@
+###################################################################
+# Copyright (c) 2020 by TAOS Technologies, Inc.
+# All rights reserved.
+#
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+#
+###################################################################
+
+# -*- coding: utf-8 -*-
+
+from posixpath import split
+import sys
+import os
+
+from util.log import *
+from util.cases import *
+from util.sql import *
+from util.dnodes import *
+import subprocess
+
+class TDTestCase:
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ self.ts = 1420041600000 # 2015-01-01 00:00:00 this is begin time for first record
+ self.num = 10
+
+ def getBuildPath(self):
+ selfPath = os.path.dirname(os.path.realpath(__file__))
+
+ if ("community" in selfPath):
+ projPath = selfPath[:selfPath.find("community")]
+ else:
+ projPath = selfPath[:selfPath.find("tests")]
+
+ for root, dirs, files in os.walk(projPath):
+ if ("taosd" in files):
+ rootRealPath = os.path.dirname(os.path.realpath(root))
+ if ("packaging" not in rootRealPath):
+ buildPath = root[:len(root) - len("/build/bin")]
+ break
+ return buildPath
+
+
+ def caseDescription(self):
+
+ '''
+ case1 :[TD-12276] :
+ this test case is an test case elapsed result about desc order timestamp .
+ '''
+ return
+
+
+ def run(self):
+ tdSql.prepare()
+ tdSql.execute("create database if not exists testdb keep 36500;")
+ tdSql.execute("use testdb;")
+ tdSql.execute("create stable st (ts timestamp , id int , value double) tags(hostname binary(10) ,ind int);")
+ for i in range(self.num):
+ tdSql.execute("insert into tb%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+100*i,i*2,i+10.00))
+ tdSql.execute("insert into tb%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+200*i,i*2,i+10.00))
+ tdSql.execute("insert into tb%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+300*i,i*2,i+10.00))
+ tdSql.execute("insert into tb%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+10000*i,i*2,i+10.00))
+
+ tdSql.query('select elapsed(ts) from (select csum(value) from tb1 );')
+ tdSql.checkRows(1)
+ tdSql.checkData(0,0,9900.0)
+
+ tdSql.query('select elapsed(ts) from (select csum(value) from tb1 order by ts desc );')
+ tdSql.checkRows(1)
+ tdSql.checkData(0,0,9900.0)
+
+ tdSql.query('select elapsed(ts) from (select diff(value) from tb2 );')
+ tdSql.checkRows(1)
+ tdSql.checkData(0,0,19600.0)
+
+ tdSql.query('select elapsed(ts) from (select diff(value) from tb2 order by ts desc);')
+ tdSql.checkRows(1)
+ tdSql.checkData(0,0,400.0)
+
+
+
+
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
diff --git a/tests/system-test/3-connectors/restful/restful_binddbname.py b/tests/system-test/3-connectors/restful/restful_binddbname.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c47629b57b72b26f7e4c772474e6e202cbb1389
--- /dev/null
+++ b/tests/system-test/3-connectors/restful/restful_binddbname.py
@@ -0,0 +1,168 @@
+# #################################################################
+# Copyright (c) 2016 by TAOS Technologies, Inc.
+# All rights reserved.
+
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+
+# #################################################################
+
+# -*- coding: utf-8 -*-
+
+# TODO: after TD-4518 and TD-4510 is resolved, add the exception test case for these situations
+
+from distutils.log import error
+import sys
+
+from requests.api import head
+from requests.models import Response
+from util.log import *
+from util.cases import *
+from util.sql import *
+import time, datetime
+import requests, json
+import threading
+import string
+import random
+import re
+
+
+null = ''
+true= 'true'
+false = 'false'
+def caseDescription(self):
+ '''
+ case1:dbname binding
+ case2:dbname without binding
+
+ '''
+def check_unbind_db(url, data, header):
+ resp = requests.post(url, data, headers = header )
+ resp.encoding='utf-8'
+ resp = eval(resp.text)
+ status = resp['status']
+ #cod = resp['code']
+ sqls = data
+ if status=="error" :#and cod == 401:
+ print(" %s : check pass" %sqls)
+ else:
+ printf("%s error occured , " %sqls)
+ sys.exit(1)
+
+def check_bind_db(url, data, header):
+ resp = requests.post(url, data, headers = header )
+ resp.encoding='utf-8'
+ resp_dict = eval(resp.text)
+ status = resp_dict['status']
+ if status =="succ":
+ print("%s run success!"%data)
+ # print(resp.text)
+ else :
+ print("%s run failed !"%data)
+ print(resp.text)
+ sys.exit(1)
+
+class TDTestCase():
+
+
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ def run(self):
+ tdSql.prepare()
+ tdSql.execute('reset query cache')
+ tdSql.execute('drop database if exists test')
+ tdSql.execute('drop database if exists db')
+ tdSql.execute('drop database if exists test01')
+ tdSql.execute('create database test')
+ tdSql.execute('create database test01')
+
+ header = {'Authorization': 'Basic cm9vdDp0YW9zZGF0YQ=='}
+ url = "http://127.0.0.1:6041/rest/sql"
+
+
+ # case 1: test with no bind dbname
+ sqls1 = ["show databases;",
+ "use test;",
+ "show dnodes;",
+ "create database db;",
+ "drop database db;",
+ "select client_version();" ,
+ "ALTER DATABASE test COMP 2;",
+ "show test.tables",
+ "create table test.tb (ts timestamp, id int , data double)",
+ "insert into test.tb values (now , 2, 2.0) ",
+ "select * from test.tb"
+ ]
+ sqls2 = ["show tables;",
+ "show vgroups;",
+ "create table tb (ts timestamp, id int , data double)",
+ "insert into tb values (now , 1, 1.0) ",
+ "select * from tb",
+ "insert into tb values (now , 2, 2.0) ",
+ "select * from tb"
+ ]
+
+ print("==================="*5)
+ print(" check unbind db about restful ")
+ print("==================="*5)
+ for sql in sqls1:
+ print("===================")
+ check_bind_db(url,sql,header)
+
+ for sql in sqls2:
+ print("===================")
+ check_unbind_db(url,sql,header)
+
+ tdSql.execute('drop database if exists test01')
+ tdSql.execute('drop database if exists test')
+ tdSql.execute('create database test')
+ tdSql.execute('create database test01')
+
+ #case 2: test with bind dbname
+ sqls3 = ["show databases;",
+ "use test;",
+ "show tables;",
+ "show dnodes;",
+ "show vgroups;",
+ "create database db;",
+ "drop database db;",
+ "select client_version();" ,
+ "use test",
+ "ALTER DATABASE test COMP 2;",
+ "create table tb (ts timestamp, id int , data double)",
+ "insert into tb values (now , 1, 1.0) ",
+ "select * from tb",
+ "show test.tables",
+ "show tables",
+ "insert into tb values (now , 2, 2.0) ",
+ "create table test.tb1 (ts timestamp, id int , data double)",
+ "insert into test.tb1 values (now , 2, 2.0) ",
+ "select * from tb",
+ "select * from test.tb1"
+ ]
+
+
+ print("==================="*5)
+ print(" check bind db about restful ")
+ print("==================="*5)
+ url = "http://127.0.0.1:6041/rest/sql/test"
+ for sql in sqls3:
+ print("===================")
+
+ check_bind_db(url,sql,header)
+ # check data
+ tdSql.query("select * from test.tb")
+ tdSql.checkRows(2)
+
+ os.system('sudo timedatectl set-ntp on')
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
diff --git a/tests/system-test/5-taos-tools/taosdump/TD-12435.py b/tests/system-test/5-taos-tools/taosdump/TD-12435.py
new file mode 100644
index 0000000000000000000000000000000000000000..4aaaba5179807513ea4369122e4fb3497ba1a35f
--- /dev/null
+++ b/tests/system-test/5-taos-tools/taosdump/TD-12435.py
@@ -0,0 +1,829 @@
+###################################################################
+# Copyright (c) 2016 by TAOS Technologies, Inc.
+# All rights reserved.
+#
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+#
+###################################################################
+
+# -*- coding: utf-8 -*-
+
+import sys
+import taos
+import time
+import os
+from util.log import tdLog
+from util.cases import tdCases
+from util.sql import tdSql
+
+
+class TDTestCase:
+ def caseDescription(self):
+ '''
+ case1:taosdump: char "`" can be used for both tag name and column name
+ '''
+ return
+
+ def init(self, conn, logSql):
+ tdLog.debug("start to execute %s" % __file__)
+ tdSql.init(conn.cursor(), logSql)
+
+ now = time.time()
+ self.ts = int(round(now * 1000))
+
+ def table1_checkall(self,sql):
+ tdLog.info(sql)
+ tdSql.query(sql)
+ tdSql.checkData(0,1,1)
+ tdSql.checkData(0,2,2)
+ tdSql.checkData(0,3,3)
+ tdSql.checkData(0,4,4)
+ tdSql.checkData(0,5,'True')
+ tdSql.checkData(0,6,6)
+ tdSql.checkData(0,7,7)
+ tdSql.checkData(0,8,8)
+ tdSql.checkData(0,9,9)
+ tdSql.checkData(0,10,'1970-01-01 08:00:00.010')
+
+ def table1_checkall_1(self,sql):
+ tdSql.query(sql)
+ tdSql.checkData(0,1,1)
+
+ def table1_checkall_2(self,sql):
+ self.table1_checkall_1(sql)
+ tdSql.checkData(0,2,2)
+
+ def table1_checkall_3(self,sql):
+ self.table1_checkall_2(sql)
+ tdSql.checkData(0,3,3)
+
+ def table1_checkall_4(self,sql):
+ self.table1_checkall_3(sql)
+ tdSql.checkData(0,4,4)
+
+ def table1_checkall_5(self,sql):
+ self.table1_checkall_4(sql)
+ tdSql.checkData(0,5,'True')
+
+ def table1_checkall_6(self,sql):
+ self.table1_checkall_5(sql)
+ tdSql.checkData(0,6,6)
+
+ def table1_checkall_7(self,sql):
+ self.table1_checkall_6(sql)
+ tdSql.checkData(0,7,7)
+
+ def table1_checkall_8(self,sql):
+ self.table1_checkall_7(sql)
+ tdSql.checkData(0,8,8)
+
+ def table1_checkall_9(self,sql):
+ self.table1_checkall_8(sql)
+ tdSql.checkData(0,9,9)
+
+ def table1_checkall_10(self,sql):
+ self.table1_checkall_9(sql)
+ tdSql.checkData(0,10,'1970-01-01 08:00:00.010')
+
+ def run(self):
+
+ testcaseFilename = os.path.split(__file__)[-1]
+ os.system("rm -rf 5-taos-tools/taosdump/%s.sql" % testcaseFilename )
+ tdSql.prepare()
+
+ print("==============step1")
+ print("prepare data")
+
+ # case for defect: https://jira.taosdata.com:18080/browse/TD-2693
+ tdSql.execute("create database db2")
+ tdSql.execute("use db2")
+
+ print("==============new version [escape character] for stable==============")
+ print("==============step1,#create db.stable,db.table; insert db.table; show db.table; select db.table; drop db.table;")
+ print("prepare data")
+
+ self.stb1 = "stable_1~!@#$%^&*()-_+=[]{}':,<.>/?stST13579"
+ self.tb1 = "table_1~!@#$%^&*()-_+=[]{}':,<.>/?stST13579"
+
+ self.col_base = "123~!@#$%^&*()-_+=[]{}':,<.>/?stST13579"
+
+ self.col_int = "stable_col_int%s" %self.col_base
+ print(self.col_int)
+ self.col_bigint = "stable_col_bigint%s" %self.col_base
+ self.col_smallint = "stable_col_smallint%s" %self.col_base
+ self.col_tinyint = "stable_col_tinyint%s" %self.col_base
+ self.col_bool = "stable_col_bool%s" %self.col_base
+ self.col_binary = "stable_col_binary%s" %self.col_base
+ self.col_nchar = "stable_col_nchar%s" %self.col_base
+ self.col_float = "stable_col_float%s" %self.col_base
+ self.col_double = "stable_col_double%s" %self.col_base
+ self.col_ts = "stable_col_ts%s" %self.col_base
+
+ self.tag_base = "abc~!@#$%^&*()-_+=[]{}':,<.>/?stST13579"
+ self.tag_int = "stable_tag_int%s" %self.tag_base
+ self.tag_bigint = "stable_tag_bigint%s" %self.tag_base
+ self.tag_smallint = "stable_tag_smallint%s" %self.tag_base
+ self.tag_tinyint = "stable_tag_tinyint%s" %self.tag_base
+ self.tag_bool = "stable_tag_bool%s" %self.tag_base
+ self.tag_binary = "stable_tag_binary%s" %self.tag_base
+ self.tag_nchar = "stable_tag_nchar%s" %self.tag_base
+ self.tag_float = "stable_tag_float%s" %self.tag_base
+ self.tag_double = "stable_tag_double%s" %self.tag_base
+ self.tag_ts = "stable_tag_ts%s" %self.tag_base
+
+ tdSql.execute('''create stable db.`%s` (ts timestamp, `%s` int , `%s` bigint , `%s` smallint , `%s` tinyint, `%s` bool ,
+ `%s` binary(20) , `%s` nchar(20) ,`%s` float , `%s` double , `%s` timestamp)
+ tags(loc nchar(20), `%s` int , `%s` bigint , `%s` smallint , `%s` tinyint, `%s` bool ,
+ `%s` binary(20) , `%s` nchar(20) ,`%s` float , `%s` double , `%s` timestamp);'''
+ %(self.stb1, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool,
+ self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,
+ self.tag_int, self.tag_bigint, self.tag_smallint, self.tag_tinyint, self.tag_bool,
+ self.tag_binary, self.tag_nchar, self.tag_float, self.tag_double, self.tag_ts))
+ tdSql.query("describe db.`%s` ; " %self.stb1)
+ tdSql.checkRows(22)
+
+ tdSql.query("select _block_dist() from db.`%s` ; " %self.stb1)
+ tdSql.checkRows(0)
+
+ tdSql.query("show create stable db.`%s` ; " %self.stb1)
+ tdSql.checkData(0, 0, self.stb1)
+ tdSql.checkData(0, 1, "create table `%s` (`ts` TIMESTAMP,`%s` INT,`%s` BIGINT,`%s` SMALLINT,`%s` TINYINT,`%s` BOOL,`%s` BINARY(20),`%s` NCHAR(20),`%s` FLOAT,`%s` DOUBLE,`%s` TIMESTAMP)\
+ TAGS (`loc` NCHAR(20),`%s` INT,`%s` BIGINT,`%s` SMALLINT,`%s` TINYINT,`%s` BOOL,`%s` BINARY(20),`%s` NCHAR(20),`%s` FLOAT,`%s` DOUBLE,`%s` TIMESTAMP)"
+ %(self.stb1, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool,
+ self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,
+ self.tag_int, self.tag_bigint, self.tag_smallint, self.tag_tinyint, self.tag_bool,
+ self.tag_binary, self.tag_nchar, self.tag_float, self.tag_double, self.tag_ts))
+
+ tdSql.execute("create table db.`table!1` using db.`%s` tags('table_1' , '0' , '0' , '0' , '0' , 0 , '0' , '0' , '0' , '0' ,'0')" %self.stb1)
+ tdSql.query("describe db.`table!1` ; ")
+ tdSql.checkRows(22)
+
+ time.sleep(10)
+ tdSql.query("show create table db.`table!1` ; ")
+ tdSql.checkData(0, 0, "table!1")
+ tdSql.checkData(0, 1, "CREATE TABLE `table!1` USING `%s` TAGS (\"table_1\",0,0,0,0,false,\"0\",\"0\",0.000000,0.000000,\"0\")" %self.stb1)
+
+ tdSql.execute("insert into db.`table!1` values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)")
+ sql = " select * from db.`table!1`; "
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+ sql = '''select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db.`table!1`; '''\
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+
+ time.sleep(1)
+ tdSql.execute('''insert into db.`table!1`(ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`) values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)'''\
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts) )
+ sql = " select * from db.`table!1`; "
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ tdSql.query("select count(*) from db.`table!1`; ")
+ tdSql.checkData(0, 0, 2)
+ tdSql.query("select _block_dist() from db.`%s` ; " %self.stb1)
+ tdSql.checkRows(1)
+
+ tdSql.execute("create table db.`%s` using db.`%s` TAGS (\"table_2\",2,2,2,2,true,\"2\",\"2\",2.000000,2.000000,\"2\")" %(self.tb1,self.stb1))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.query("show create table db.`%s` ; " %self.tb1)
+ tdSql.checkData(0, 0, self.tb1)
+ tdSql.checkData(0, 1, "CREATE TABLE `%s` USING `%s` TAGS (\"table_2\",2,2,2,2,true,\"2\",\"2\",2.000000,2.000000,\"2\")" %(self.tb1,self.stb1))
+
+ tdSql.execute("insert into db.`%s` values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)" %self.tb1)
+ sql = "select * from db.`%s` ; " %self.tb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+ sql = '''select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db.`%s` ; '''\
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,\
+ self.tag_int, self.tag_bigint, self.tag_smallint, self.tag_tinyint, self.tag_bool, self.tag_binary, self.tag_nchar, self.tag_float, self.tag_double, self.tag_ts, self.tb1)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+
+ time.sleep(1)
+ tdSql.execute('''insert into db.`%s`(ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`) values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)'''\
+ %(self.tb1, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts) )
+ sql = " select * from db.`%s` ; " %self.tb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ sql = " select * from db.`%s` where `%s`=1 and `%s`=2 and `%s`=3 and `%s`=4 and `%s`='True' and `%s`=6 and `%s`=7 and `%s`=8 and `%s`=9 and `%s`=10; " \
+ %(self.tb1, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ tdSql.query("select count(*) from db.`%s`; " %self.tb1)
+ tdSql.checkData(0, 0, 2)
+ sql = "select * from db.`%s` ; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(4)
+ tdSql.query("select count(*) from db.`%s`; " %self.stb1)
+ tdSql.checkData(0, 0, 4)
+
+ sql = "select * from (select * from db.`%s`) ; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(4)
+ tdSql.query("select count(*) from (select * from db.`%s`) ; " %self.stb1)
+ tdSql.checkData(0, 0, 4)
+
+ sql = "select * from (select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db.`%s`) ; " \
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts, self.stb1)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(4)
+
+ sql = "select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from (select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db.`%s`) ; " \
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,\
+ self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts, self.stb1)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(4)
+
+ sql = "select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from (select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db.`%s`\
+ where `%s`=1 and `%s`=2 and `%s`=3 and `%s`=4 and `%s`='True' and `%s`=6 and `%s`=7 and `%s`=8 and `%s`=9 and `%s`=10 ) ; " \
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,\
+ self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts, self.stb1, \
+ self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(4)
+
+ tdSql.query("show db.stables like 'stable_1%' ")
+ tdSql.checkRows(1)
+ tdSql.query("show db.tables like 'table%' ")
+ tdSql.checkRows(2)
+
+ self.cr_tb1 = "create_table_1~!@#$%^&*()-_+=[]{}':,<.>/?stST13579"
+ tdSql.execute("create table db.`%s` as select avg(`%s`) from db.`%s` where ts > now interval(1m) sliding(30s);" %(self.cr_tb1,self.col_bigint,self.stb1))
+ tdSql.query("show db.tables like 'create_table_%' ")
+ tdSql.checkRows(1)
+
+ print("==============drop\ add\ change\ modify column or tag")
+ print("==============drop==============")
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_ts))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(21)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_double))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(20)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_float))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(19)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_nchar))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(18)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_binary))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(17)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_bool))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(16)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_tinyint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(15)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_smallint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(14)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_bigint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(13)
+ tdSql.execute("ALTER TABLE db.`%s` DROP TAG `%s`; " %(self.stb1, self.tag_int))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(12)
+
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_ts))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_9(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(11)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_double))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_8(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(10)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_float))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_7(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(9)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_nchar))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_6(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(8)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_binary))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_5(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(7)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_bool))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_4(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(6)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_tinyint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_3(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(5)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_smallint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_2(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(4)
+ tdSql.execute("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_bigint))
+ sql = " select * from db.`%s`; " %self.stb1
+ datacheck = self.table1_checkall_1(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(3)
+ tdSql.error("ALTER TABLE db.`%s` DROP COLUMN `%s`; " %(self.stb1, self.col_int))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(3)
+
+ print("==============add==============")
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` bigint; " %(self.stb1, self.col_bigint))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(4)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` smallint; " %(self.stb1, self.col_smallint))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(5)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` tinyint; " %(self.stb1, self.col_tinyint))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(6)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` bool; " %(self.stb1, self.col_bool))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(7)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` binary(20); " %(self.stb1, self.col_binary))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(8)
+
+ tdSql.execute("insert into db.`%s` values(now, 1 , 2, 3, 4, 5, 6)" %self.tb1)
+ sql = "select * from db.`%s` order by ts desc; " %self.tb1
+ datacheck = self.table1_checkall_5(sql)
+
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` nchar(20); " %(self.stb1, self.col_nchar))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(9)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` float; " %(self.stb1, self.col_float))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(10)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` double; " %(self.stb1, self.col_double))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(11)
+ tdSql.execute("ALTER TABLE db.`%s` ADD COLUMN `%s` timestamp; " %(self.stb1, self.col_ts))
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(12)
+
+ tdSql.execute("insert into db.`%s` values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)" %self.tb1)
+ sql = "select * from db.`%s` order by ts desc; " %self.tb1
+ datacheck = self.table1_checkall(sql)
+
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` int; " %(self.stb1, self.tag_int))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(13)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` bigint; " %(self.stb1, self.tag_bigint))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(14)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` smallint; " %(self.stb1, self.tag_smallint))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(15)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` tinyint; " %(self.stb1, self.tag_tinyint))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(16)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` bool; " %(self.stb1, self.tag_bool))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(17)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` binary(20); " %(self.stb1, self.tag_binary))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(18)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` nchar(20); " %(self.stb1, self.tag_nchar))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(19)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` float; " %(self.stb1, self.tag_float))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(20)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` double; " %(self.stb1, self.tag_double))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(21)
+ tdSql.execute("ALTER TABLE db.`%s` ADD TAG `%s` timestamp; " %(self.stb1, self.tag_ts))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+
+ print("==============change==============")
+ self.tag_base_change = "abcdas"
+ self.tag_int_change = "stable_tag_int%s" %self.tag_base_change
+ self.tag_bigint_change = "stable_tag_bigint%s" %self.tag_base_change
+ self.tag_smallint_change = "stable_tag_smallint%s" %self.tag_base_change
+ self.tag_tinyint_change = "stable_tag_tinyint%s" %self.tag_base_change
+ self.tag_bool_change = "stable_tag_bool%s" %self.tag_base_change
+ self.tag_binary_change = "stable_tag_binary%s" %self.tag_base_change
+ self.tag_nchar_change = "stable_tag_nchar%s" %self.tag_base_change
+ self.tag_float_change = "stable_tag_float%s" %self.tag_base_change
+ self.tag_double_change = "stable_tag_double%s" %self.tag_base_change
+ self.tag_ts_change = "stable_tag_ts%s" %self.tag_base_change
+
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_int, self.tag_int_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_bigint, self.tag_bigint_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_smallint, self.tag_smallint_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_tinyint, self.tag_tinyint_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_bool, self.tag_bool_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_binary, self.tag_binary_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_nchar, self.tag_nchar_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_float, self.tag_float_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_double, self.tag_double_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER TABLE db.`%s` CHANGE TAG `%s` `%s`; " %(self.stb1, self.tag_ts, self.tag_ts_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+
+ print("==============modify==============")
+ # TD-10810
+ tdSql.execute("ALTER STABLE db.`%s` MODIFY TAG `%s` binary(30); ; " %(self.stb1, self.tag_binary_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER STABLE db.`%s` MODIFY TAG `%s` nchar(30); ; " %(self.stb1, self.tag_nchar_change))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+
+ tdSql.execute("ALTER STABLE db.`%s` MODIFY COLUMN `%s` binary(30); ; " %(self.stb1, self.col_binary))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+ tdSql.execute("ALTER STABLE db.`%s` MODIFY COLUMN `%s` nchar(30); ; " %(self.stb1, self.col_nchar))
+ sql = " select * from db.`%s` order by ts desc; " %self.stb1
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db.`%s` ; " %self.tb1)
+ tdSql.checkRows(22)
+
+ print("==============drop table\stable")
+ try:
+ tdSql.execute("drop table db.`%s` " %self.tb1)
+ except Exception as e:
+ tdLog.exit(e)
+
+ tdSql.error("select * from db.`%s`" %self.tb1)
+ tdSql.query("show db.stables like 'stable_1%' ")
+ tdSql.checkRows(1)
+
+ try:
+ tdSql.execute("drop table db.`%s` " %self.stb1)
+ except Exception as e:
+ tdLog.exit(e)
+
+ tdSql.error("select * from db.`%s`" %self.tb1)
+ tdSql.error("select * from db.`%s`" %self.stb1)
+
+
+ print("==============step2,#create stable,table; insert table; show table; select table; drop table")
+
+ self.stb2 = "stable_2~!@#$%^&*()-_+=[]{}';:,<.>/?stST24680~!@#$%^&*()-_+=[]{}"
+ self.tb2 = "table_2~!@#$%^&*()-_+=[]{}';:,<.>/?stST24680~!@#$%^&*()-_+=[]{}"
+
+ tdSql.execute("create stable `%s` (ts timestamp, i int) tags(j int);" %self.stb2)
+ tdSql.query("describe `%s` ; "%self.stb2)
+ tdSql.checkRows(3)
+
+ tdSql.query("select _block_dist() from `%s` ; " %self.stb2)
+ tdSql.checkRows(0)
+
+ tdSql.query("show create stable `%s` ; " %self.stb2)
+ tdSql.checkData(0, 0, self.stb2)
+ tdSql.checkData(0, 1, "create table `%s` (`ts` TIMESTAMP,`i` INT) TAGS (`j` INT)" %self.stb2)
+
+ tdSql.execute("create table `table!2` using `%s` tags(1)" %self.stb2)
+ tdSql.query("describe `table!2` ; ")
+ tdSql.checkRows(3)
+
+ time.sleep(10)
+
+ tdSql.query("show create table `table!2` ; ")
+ tdSql.checkData(0, 0, "table!2")
+ tdSql.checkData(0, 1, "CREATE TABLE `table!2` USING `%s` TAGS (1)" %self.stb2)
+ tdSql.execute("insert into `table!2` values(now, 1)")
+ tdSql.query("select * from `table!2`; ")
+ tdSql.checkRows(1)
+ tdSql.query("select count(*) from `table!2`; ")
+ tdSql.checkData(0, 0, 1)
+ tdSql.query("select _block_dist() from `%s` ; " %self.stb2)
+ tdSql.checkRows(1)
+
+ tdSql.execute("create table `%s` using `%s` tags(1)" %(self.tb2,self.stb2))
+ tdSql.query("describe `%s` ; " %self.tb2)
+ tdSql.checkRows(3)
+ tdSql.query("show create table `%s` ; " %self.tb2)
+ tdSql.checkData(0, 0, self.tb2)
+ tdSql.checkData(0, 1, "CREATE TABLE `%s` USING `%s` TAGS (1)" %(self.tb2,self.stb2))
+ tdSql.execute("insert into `%s` values(now, 1)" %self.tb2)
+ tdSql.query("select * from `%s` ; " %self.tb2)
+ tdSql.checkRows(1)
+ tdSql.query("select count(*) from `%s`; " %self.tb2)
+ tdSql.checkData(0, 0, 1)
+ tdSql.query("select * from `%s` ; " %self.stb2)
+ tdSql.checkRows(2)
+ tdSql.query("select count(*) from `%s`; " %self.stb2)
+ tdSql.checkData(0, 0, 2)
+
+ tdSql.query("select * from (select * from `%s`) ; " %self.stb2)
+ tdSql.checkRows(2)
+ tdSql.query("select count(*) from (select * from `%s` ); " %self.stb2)
+ tdSql.checkData(0, 0, 2)
+
+ tdSql.query("show stables like 'stable_2%' ")
+ tdSql.checkRows(1)
+ tdSql.query("show tables like 'table%' ")
+ tdSql.checkRows(2)
+
+
+ #TD-10536
+ self.cr_tb2 = "create_table_2~!@#$%^&*()-_+=[]{}';:,<.>/?stST24680~!@#$%^&*()-_+=[]{}"
+ tdSql.execute("create table `%s` as select * from `%s` ;" %(self.cr_tb2,self.stb2))
+ tdSql.query("show db.tables like 'create_table_%' ")
+ tdSql.checkRows(1)
+
+
+ print("==============step3,#create regular_table; insert regular_table; show regular_table; select regular_table; drop regular_table")
+ self.regular_table = "regular_table~!@#$%^&*()-_+=[]{}';:,<.>/?stST24680~!@#$%^&*()-_+=[]{}"
+
+ self.regular_col_base = "123@#$%^&*()-_+=[]{};:,<.>/?~!$%^"
+
+ self.col_int = "regular_table_col_int%s" %self.regular_col_base
+ print(self.col_int)
+ self.col_bigint = "regular_table_col_bigint%s" %self.regular_col_base
+ self.col_smallint = "regular_table_col_smallint%s" %self.regular_col_base
+ self.col_tinyint = "regular_table_col_tinyint%s" %self.regular_col_base
+ self.col_bool = "regular_table_col_bool%s" %self.regular_col_base
+ self.col_binary = "regular_table_col_binary%s" %self.regular_col_base
+ self.col_nchar = "regular_table_col_nchar%s" %self.regular_col_base
+ self.col_float = "regular_table_col_float%s" %self.regular_col_base
+ self.col_double = "regular_table_col_double%s" %self.regular_col_base
+ self.col_ts = "regular_table_col_ts%s" %self.regular_col_base
+
+ tdSql.execute("create table `%s` (ts timestamp,`%s` int , `%s` bigint , `%s` smallint , `%s` tinyint, `%s` bool , \
+ `%s` binary(20) , `%s` nchar(20) ,`%s` float , `%s` double , `%s` timestamp) ;"\
+ %(self.regular_table, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool,
+ self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts))
+ tdSql.query("describe `%s` ; "%self.regular_table)
+ tdSql.checkRows(11)
+
+ tdSql.query("select _block_dist() from `%s` ; " %self.regular_table)
+ tdSql.checkRows(1)
+
+ tdSql.query("show create table `%s` ; " %self.regular_table)
+ tdSql.checkData(0, 0, self.regular_table)
+ tdSql.checkData(0, 1, "create table `%s` (`ts` TIMESTAMP,`%s` INT,`%s` BIGINT,`%s` SMALLINT,`%s` TINYINT,`%s` BOOL,`%s` BINARY(20),`%s` NCHAR(20),`%s` FLOAT,`%s` DOUBLE,`%s` TIMESTAMP)"
+ %(self.regular_table, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool,
+ self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts))
+
+ tdSql.execute("insert into `%s` values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)" %self.regular_table)
+ sql = "select * from `%s` ; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+ sql = '''select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db2.`%s`; '''\
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts, self.regular_table)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(1)
+
+ time.sleep(1)
+ tdSql.execute('''insert into db2.`%s` (ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`) values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)'''\
+ %(self.regular_table, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts) )
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ sql = " select * from db2.`%s` where `%s`=1 and `%s`=2 and `%s`=3 and `%s`=4 and `%s`='True' and `%s`=6 and `%s`=7 and `%s`=8 and `%s`=9 and `%s`=10; " \
+ %(self.regular_table, self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ tdSql.query("select count(*) from `%s`; " %self.regular_table)
+ tdSql.checkData(0, 0, 2)
+ tdSql.query("select _block_dist() from `%s` ; " %self.regular_table)
+ tdSql.checkRows(1)
+
+ sql = "select * from (select * from `%s`) ; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ sql = "select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from (select ts ,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s`,`%s` from db2.`%s`\
+ where `%s`=1 and `%s`=2 and `%s`=3 and `%s`=4 and `%s`='True' and `%s`=6 and `%s`=7 and `%s`=8 and `%s`=9 and `%s`=10 ) ; " \
+ %(self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts,\
+ self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts, self.regular_table, \
+ self.col_int, self.col_bigint, self.col_smallint, self.col_tinyint, self.col_bool, self.col_binary, self.col_nchar, self.col_float, self.col_double, self.col_ts)
+ datacheck = self.table1_checkall(sql)
+ tdSql.checkRows(2)
+
+ tdSql.query("select count(*) from (select * from `%s` ); " %self.regular_table)
+ tdSql.checkData(0, 0, 2)
+
+ tdSql.query("show tables like 'regular_table%' ")
+ tdSql.checkRows(1)
+
+ self.crr_tb = "create_r_table~!@#$%^&*()-_+=[]{}';:,<.>/?stST24680~!@#$%^&*()-_+=[]{}"
+ tdSql.execute("create table `%s` as select * from `%s` ;" %(self.crr_tb,self.regular_table))
+ tdSql.query("show db2.tables like 'create_r_table%' ")
+ tdSql.checkRows(1)
+
+
+ print("==============drop\ add\ change\ modify column ")
+ print("==============drop==============")
+ tdSql.execute("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_ts))
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall_9(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(10)
+ tdSql.execute("ALTER TABLE `%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_double))
+ sql = " select * from `%s`; " %self.regular_table
+ datacheck = self.table1_checkall_8(sql)
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(9)
+ tdSql.execute("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_float))
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall_7(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(8)
+ tdSql.execute("ALTER TABLE `%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_nchar))
+ sql = " select * from `%s`; " %self.regular_table
+ datacheck = self.table1_checkall_6(sql)
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(7)
+ tdSql.execute("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_binary))
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall_5(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(6)
+ tdSql.execute("ALTER TABLE `%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_bool))
+ sql = " select * from `%s`; " %self.regular_table
+ datacheck = self.table1_checkall_4(sql)
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(5)
+ tdSql.execute("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_tinyint))
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall_3(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(4)
+ tdSql.execute("ALTER TABLE `%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_smallint))
+ sql = " select * from `%s`; " %self.regular_table
+ datacheck = self.table1_checkall_2(sql)
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(3)
+ tdSql.execute("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_bigint))
+ sql = " select * from db2.`%s`; " %self.regular_table
+ datacheck = self.table1_checkall_1(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(2)
+ tdSql.error("ALTER TABLE db2.`%s` DROP COLUMN `%s`; " %(self.regular_table, self.col_int))
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(2)
+
+ print("==============add==============")
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` bigint; " %(self.regular_table, self.col_bigint))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(3)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` smallint; " %(self.regular_table, self.col_smallint))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(4)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` tinyint; " %(self.regular_table, self.col_tinyint))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(5)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` bool; " %(self.regular_table, self.col_bool))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(6)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` binary(20); " %(self.regular_table, self.col_binary))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(7)
+
+ tdSql.execute("insert into db2.`%s` values(now, 1 , 2, 3, 4, 5, 6)" %self.regular_table)
+ sql = "select * from db2.`%s` order by ts desc; " %self.regular_table
+ datacheck = self.table1_checkall_5(sql)
+
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` nchar(20); " %(self.regular_table, self.col_nchar))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(8)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` float; " %(self.regular_table, self.col_float))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(9)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` double; " %(self.regular_table, self.col_double))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(10)
+ tdSql.execute("ALTER TABLE db2.`%s` ADD COLUMN `%s` timestamp; " %(self.regular_table, self.col_ts))
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(11)
+
+ tdSql.execute("insert into db2.`%s` values(now, 1 , 2, 3, 4, 5, 6 ,7 ,8 ,9 ,10)" %self.regular_table)
+ sql = "select * from db2.`%s` order by ts desc; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+
+
+ print("==============change, regular not support==============")
+
+
+ print("==============modify==============")
+ # TD-10810
+ tdSql.execute("ALTER TABLE db2.`%s` MODIFY COLUMN `%s` binary(30); ; " %(self.regular_table, self.col_binary))
+ sql = " select * from db2.`%s` order by ts desc; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe db2.`%s` ; " %self.regular_table)
+ tdSql.checkRows(11)
+ tdSql.execute("ALTER TABLE `%s` MODIFY COLUMN `%s` nchar(30); ; " %(self.regular_table, self.col_nchar))
+ sql = " select * from `%s` order by ts desc; " %self.regular_table
+ datacheck = self.table1_checkall(sql)
+ tdSql.query("describe `%s` ; " %self.regular_table)
+ tdSql.checkRows(11)
+
+
+ assert os.system("taosdump -D db") == 0
+ assert os.system("taosdump -D db2") == 0
+
+ assert os.system("taosdump -i . -g") == 0
+
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success("%s successfully executed" % __file__)
+
+
+tdCases.addWindows(__file__, TDTestCase())
+tdCases.addLinux(__file__, TDTestCase())
diff --git a/tests/system-test/fulltest-connector.sh b/tests/system-test/fulltest-connector.sh
index dbb77b2ce07d8c34c549a22a3218ebcb6894d2a3..0a02608cb8862b0fb685e363e3197759f773bb6e 100755
--- a/tests/system-test/fulltest-connector.sh
+++ b/tests/system-test/fulltest-connector.sh
@@ -1,3 +1,6 @@
+
+python3 ./test.py -f 3-connectors/restful/restful_binddbname.py
+
bash 3-connectors/c#/test.sh
bash 3-connectors/go/test.sh
bash 3-connectors/java/test.sh
@@ -5,3 +8,4 @@ bash 3-connectors/nodejs/test.sh
bash 3-connectors/python/test.sh
bash 3-connectors/restful/test.sh
bash 3-connectors/rust/test.sh
+
diff --git a/tests/system-test/fulltest-insert.sh b/tests/system-test/fulltest-insert.sh
index 709fab8791b37169a236887d57109a93cb38b585..3f2cc0366910ecfdb80a589e5b90e60638098651 100755
--- a/tests/system-test/fulltest-insert.sh
+++ b/tests/system-test/fulltest-insert.sh
@@ -1,5 +1,3 @@
-
-python3 ./test.py -f 1-insert/batchInsert.py
python3 test.py -f 1-insert/TD-11970.py
-
+python3 test.py -f 1-insert/stmt_error.py
diff --git a/tests/system-test/fulltest-query.sh b/tests/system-test/fulltest-query.sh
index 3488749e15d151800719526f82d8243e78fef79f..262818be32ffd3d0338799e2b861e2aa68ad7966 100755
--- a/tests/system-test/fulltest-query.sh
+++ b/tests/system-test/fulltest-query.sh
@@ -1,5 +1,4 @@
python3 ./test.py -f 2-query/TD-11256.py
-python3 ./test.py -f 2-query/TD-11389.py
+# python3 ./test.py -f 2-query/TD-11389.py
python3 ./test.py -f 2-query/TD-11945_crash.py
python3 ./test.py -f 2-query/TD-12340-12342.py
-python3 ./test.py -f 2-query/TD-12344.py
diff --git a/tests/test-all.sh b/tests/test-all.sh
index bfd2b04f027084d348f65a2d858427c3389c0774..78fcfc681b1c7477301ecf182836eb3a05a3ce92 100755
--- a/tests/test-all.sh
+++ b/tests/test-all.sh
@@ -348,7 +348,18 @@ if [ "$2" != "sim" ] && [ "$2" != "jdbc" ] && [ "$2" != "unit" ] && [ "$2" != "
runPyCaseOneByOne regressiontest.sh
elif [ "$1" == "full" ]; then
echo "### run Python full test ###"
- runPyCaseOneByOne fulltest.sh
+ cd $tests_dir/develop-test
+ for name in *.sh
+ do
+ runPyCaseOneByOne $name
+ done
+ cd $tests_dir/system-test
+ for name in *.sh
+ do
+ runPyCaseOneByOne $name
+ done
+ cd $tests_dir/pytest
+ runPyCaseOneByOne fulltest.sh
elif [ "$1" == "pytest" ]; then
echo "### run Python full test ###"
runPyCaseOneByOne fulltest.sh