未验证 提交 e47f03d5 编写于 作者: J Jason-Jia 提交者: GitHub

Merge branch 'develop' into TD-11040

......@@ -4,8 +4,10 @@ import jenkins.model.CauseOfInterruption
node {
}
def skipbuild=0
def win_stop=0
def skipbuild = 0
def win_stop = 0
def scope = []
def mod = [0,1,2,3,4]
def abortPreviousBuilds() {
def currentJobName = env.JOB_NAME
......@@ -349,7 +351,7 @@ pipeline {
}
stages {
stage('pre_build'){
agent{label 'master'}
agent{label 'catalina'}
options { skipDefaultCheckout() }
when {
changeRequest()
......@@ -358,44 +360,32 @@ pipeline {
script{
abort_previous()
abortPreviousBuilds()
}
// sh'''
// rm -rf ${WORKSPACE}.tes
// cp -r ${WORKSPACE} ${WORKSPACE}.tes
// cd ${WORKSPACE}.tes
// git fetch
// '''
// script {
// if (env.CHANGE_TARGET == 'master') {
// sh '''
// git checkout master
// '''
// }
// else if(env.CHANGE_TARGET == '2.0'){
// sh '''
// git checkout 2.0
// '''
// }
// else{
// sh '''
// git checkout develop
// '''
// }
// }
// sh'''
// git fetch origin +refs/pull/${CHANGE_ID}/merge
// git checkout -qf FETCH_HEAD
// '''
println env.CHANGE_BRANCH
if(env.CHANGE_FORK){
scope = ['connector','query','insert','other','tools','taosAdapter']
}
else{
sh'''
cd ${WKC}
git fetch
git checkout ${CHANGE_BRANCH}
git pull
'''
dir('/var/lib/jenkins/workspace/TDinternal/community'){
gitlog = sh(script: "git log -1 --pretty=%B ", returnStdout:true)
println gitlog
if (!(gitlog =~ /\((.*?)\)/)){
autoCancelled = true
error('Aborting the build.')
}
temp = (gitlog =~ /\((.*?)\)/)
temp = temp[0].remove(1)
scope = temp.split(",")
Collections.shuffle mod
}
// script{
// skipbuild='2'
// skipbuild=sh(script: "git log -2 --pretty=%B | fgrep -ie '[skip ci]' -e '[ci skip]' && echo 1 || echo 2", returnStdout:true)
// println skipbuild
// }
// sh'''
// rm -rf ${WORKSPACE}.tes
// '''
// }
}
}
}
}
stage('Parallel test stage') {
......@@ -408,239 +398,90 @@ pipeline {
}
}
parallel {
stage('python_1_s1') {
agent{label " slave1 || slave11 "}
stage('python_1') {
agent{label " slave1 || slave6 || slave11 || slave16 "}
steps {
pre_test()
timeout(time: 55, unit: 'MINUTES'){
sh '''
date
cd ${WKC}/tests
./test-all.sh p1
date'''
}
script{
scope.each {
sh """
date
cd ${WKC}/tests
./test-CI.sh ${it} 5 ${mod[0]}
date"""
}
}
}
}
}
stage('python_2_s5') {
agent{label " slave5 || slave15 "}
stage('python_2') {
agent{label " slave2 || slave7 || slave12 || slave17 "}
steps {
pre_test()
timeout(time: 55, unit: 'MINUTES'){
sh '''
date
cd ${WKC}/tests
./test-all.sh p2
date'''
}
}
}
stage('python_3_s6') {
agent{label " slave6 || slave16 "}
steps {
timeout(time: 55, unit: 'MINUTES'){
pre_test()
sh '''
date
cd ${WKC}/tests
./test-all.sh p3
date'''
script{
scope.each {
sh """
date
cd ${WKC}/tests
./test-CI.sh ${it} 5 ${mod[1]}
date"""
}
}
}
}
}
stage('test_b1_s2') {
agent{label " slave2 || slave12 "}
stage('python_3') {
agent{label " slave3 || slave8 || slave13 ||slave18 "}
steps {
timeout(time: 105, unit: 'MINUTES'){
pre_test()
sh '''
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd >/dev/null &
sleep 10
'''
sh '''
cd ${WKC}/src/connector/python
export PYTHONPATH=$PWD/
export LD_LIBRARY_PATH=${WKC}/debug/build/lib
pip3 install pytest
pytest tests/
python3 examples/bind-multi.py
python3 examples/bind-row.py
python3 examples/demo.py
python3 examples/insert-lines.py
python3 examples/pep-249.py
python3 examples/query-async.py
python3 examples/query-objectively.py
python3 examples/subscribe-sync.py
python3 examples/subscribe-async.py
'''
sh '''
cd ${WKC}/src/connector/nodejs
npm install
npm run test
cd ${WKC}/tests/examples/nodejs
npm install td2.0-connector > /dev/null 2>&1
node nodejsChecker.js host=localhost
node test1970.js
cd ${WKC}/tests/connectorTest/nodejsTest/nanosupport
npm install td2.0-connector > /dev/null 2>&1
node nanosecondTest.js
'''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/src/connector/C#
dotnet test
dotnet run --project src/test/Cases/Cases.csproj
cd ${WKC}/tests/examples/C#
dotnet run --project C#checker/C#checker.csproj
dotnet run --project TDengineTest/TDengineTest.csproj
dotnet run --project schemaless/schemaless.csproj
cd ${WKC}/tests/examples/C#/taosdemo
dotnet build -c Release
tree | true
./bin/Release/net5.0/taosdemo -c /etc/taos -y
'''
script{
scope.each {
sh """
date
cd ${WKC}/tests
./test-CI.sh ${it} 5 ${mod[2]}
date"""
}
}
sh '''
cd ${WKC}/tests/gotest
bash batchtest.sh
'''
sh '''
cd ${WKC}/tests
./test-all.sh b1fq
date'''
}
}
}
stage('test_crash_gen_s3') {
agent{label " slave3 || slave13 "}
stage('python_4') {
agent{label " slave4 || slave9 || slave14 || slave19 "}
steps {
pre_test()
timeout(time: 60, unit: 'MINUTES'){
sh '''
cd ${WKC}/tests/pytest
./crash_gen.sh -a -p -t 4 -s 2000
'''
}
timeout(time: 60, unit: 'MINUTES'){
sh '''
cd ${WKC}/tests/pytest
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
./handle_crash_gen_val_log.sh
'''
sh '''
cd ${WKC}/tests/pytest
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
./handle_taosd_val_log.sh
'''
}
timeout(time: 55, unit: 'MINUTES'){
sh '''
date
cd ${WKC}/tests
./test-all.sh b2fq
date
'''
}
}
}
stage('test_valgrind_s4') {
agent{label " slave4 || slave14 "}
steps {
pre_test()
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cd ${WKC}/tests/pytest
./valgrind-test.sh 2>&1 > mem-error-out.log
./handle_val_log.sh
'''
}
timeout(time: 55, unit: 'MINUTES'){
sh '''
date
cd ${WKC}/tests
./test-all.sh b3fq
date'''
sh '''
date
cd ${WKC}/tests
./test-all.sh full example
date'''
}
}
}
stage('test_b4_s7') {
agent{label " slave7 || slave17 "}
steps {
timeout(time: 105, unit: 'MINUTES'){
pre_test()
sh '''
date
cd ${WKC}/tests
./test-all.sh b4fq
cd ${WKC}/tests
./test-all.sh p4
'''
// cd ${WKC}/tests
// ./test-all.sh full jdbc
// cd ${WKC}/tests
// ./test-all.sh full unit
}
}
}
stage('test_b5_s8') {
agent{label " slave8 || slave18 "}
steps {
timeout(time: 55, unit: 'MINUTES'){
pre_test()
sh '''
date
cd ${WKC}/tests
./test-all.sh b5fq
date'''
}
}
}
stage('test_b6_s9') {
agent{label " slave9 || slave19 "}
steps {
timeout(time: 55, unit: 'MINUTES'){
pre_test()
sh '''
cd ${WKC}/tests
./test-all.sh develop-test
'''
sh '''
date
cd ${WKC}/tests
./test-all.sh b6fq
date'''
script{
scope.each {
sh """
date
cd ${WKC}/tests
./test-CI.sh ${it} 5 ${mod[3]}
date"""
}
}
}
}
}
stage('test_b7_s10') {
agent{label " slave10 || slave20 "}
stage('python_5') {
agent{label " slave5 || slave10 || slave15 || slave20 "}
steps {
timeout(time: 55, unit: 'MINUTES'){
pre_test()
sh '''
cd ${WKC}/tests
./test-all.sh system-test
'''
sh '''
date
cd ${WKC}/tests
./test-all.sh b7fq
date'''
script{
scope.each {
sh """
date
cd ${WKC}/tests
./test-CI.sh ${it} 5 ${mod[4]}
date"""
}
}
}
}
}
......@@ -813,3 +654,4 @@ pipeline {
}
}
}
......@@ -7,11 +7,21 @@ TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/
### 安装Grafana
目前 TDengine 支持 Grafana 6.2 以上的版本。用户可以根据当前的操作系统,到 Grafana 官网下载安装包,并执行安装。下载地址如下:https://grafana.com/grafana/download
目前 TDengine 支持 Grafana 7.0 以上的版本。用户可以根据当前的操作系统,到 Grafana 官网下载安装包,并执行安装。下载地址如下:<https://grafana.com/grafana/download>
### 配置Grafana
TDengine 的 Grafana 插件请从 <https://github.com/taosdata/grafanaplugin/releases/latest> 下载。
TDengine 的 Grafana 插件托管在GitHub,可从 <https://github.com/taosdata/grafanaplugin/releases/latest> 下载,当前最新版本为 3.1.3。
推荐使用 [`grafana-cli` 命令行工具](https://grafana.com/docs/grafana/latest/administration/cli/) 进行插件安装。
```bash
sudo -u grafana grafana-cli \
--pluginUrl https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip \
plugins install tdengine-datasource
```
或者下载到本地并解压到 Grafana 插件目录。
```bash
GF_VERSION=3.1.3
......@@ -31,11 +41,18 @@ Grafana 7.3+ / 8.x 版本会对插件进行签名检查,因此还需要在 gra
allow_loading_unsigned_plugins = tdengine-datasource
```
在Docker环境下,可以使用如下的环境变量设置自动安装并设置 TDengine 插件:
```bash
GF_INSTALL_PLUGINS=https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip;tdengine-datasource
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=tdengine-datasource
```
### 使用 Grafana
#### 配置数据源
用户可以直接通过 localhost:3000 的网址,登录 Grafana 服务器(用户名/密码:admin/admin),通过左侧 `Configuration -> Data Sources` 可以添加数据源,如下图所示:
用户可以直接通过 <http://localhost:3000> 的网址,登录 Grafana 服务器(用户名/密码:admin/admin),通过左侧 `Configuration -> Data Sources` 可以添加数据源,如下图所示:
![img](../images/connections/add_datasource1.jpg)
......
......@@ -48,7 +48,7 @@ TDengine 缺省的时间戳是毫秒精度,但通过在 CREATE DATABASE 时传
| 3 | BIGINT | 8 | 长整型,范围 [-2^63+1, 2^63-1], -2^63 用于 NULL |
| 4 | FLOAT | 4 | 浮点型,有效位数 6-7,范围 [-3.4E38, 3.4E38] |
| 5 | DOUBLE | 8 | 双精度浮点型,有效位数 15-16,范围 [-1.7E308, 1.7E308] |
| 6 | BINARY | 自定义 | 记录单字节字符串,建议只用于处理 ASCII 可见字符,中文等多字节字符需使用 nchar。理论上,最长可以有 16374 字节,但由于每行数据最多 16K 字节,实际上限一般小于理论值。binary 仅支持字符串输入,字符串两端需使用单引号引用。使用时须指定大小,如 binary(20) 定义了最长为 20 个单字节字符的字符串,每个字符占 1 byte 的存储空间,总共固定占用 20 bytes 的空间,此时如果用户字符串超出 20 字节将会报错。对于字符串内的单引号,可以用转义字符反斜线加单引号来表示,即 `\’`。 |
| 6 | BINARY | 自定义 | 记录单字节字符串,建议只用于处理 ASCII 可见字符,中文等多字节字符需使用 nchar。理论上,最长可以有 16374 字节。binary 仅支持字符串输入,字符串两端需使用单引号引用。使用时须指定大小,如 binary(20) 定义了最长为 20 个单字节字符的字符串,每个字符占 1 byte 的存储空间,总共固定占用 20 bytes 的空间,此时如果用户字符串超出 20 字节将会报错。对于字符串内的单引号,可以用转义字符反斜线加单引号来表示,即 `\’`。 |
| 7 | SMALLINT | 2 | 短整型, 范围 [-32767, 32767], -32768 用于 NULL |
| 8 | TINYINT | 1 | 单字节整型,范围 [-127, 127], -128 用于 NULL |
| 9 | BOOL | 1 | 布尔型,{true, false} |
......
......@@ -33,8 +33,8 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
### 下载 TDengine 插件到 grafana 插件目录
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip
2. sudo unzip tdengine-datasource-3.1.3.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
......
......@@ -32,8 +32,8 @@ IT 运维监测数据通常都是对时间特性比较敏感的数据,例如
### 复制 TDengine 插件到 grafana 插件目录
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip
2. sudo unzip tdengine-datasource-3.1.3.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
......
......@@ -6,25 +6,47 @@ TDengine can be quickly integrated with [Grafana](https://www.grafana.com/), an
### Install Grafana
TDengine currently supports Grafana 6.2 and above. You can download and install the package from Grafana website according to the current operating system. The download address is as follows:
https://grafana.com/grafana/download.
TDengine currently supports Grafana 7.0 and above. You can download and install the package from Grafana website according to the current operating system. The download address is as follows: <https://grafana.com/grafana/download>.
### Configure Grafana
Download grafana plugin from <https://github.com/taosdata/grafanaplugin/releases/latest> .
TDengine data source plugin for Grafana is hosted on GitHub, refer to GitHub latest release page <https://github.com/taosdata/grafanaplugin/releases/latest> to download the latest plugin package. Currently it's version 3.1.3 .
It is recommended to use [`grafana-cli` command line tool](https://grafana.com/docs/grafana/latest/administration/cli/) to install the plugin.
```bash
sudo -u grafana grafana-cli \
--pluginUrl https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip \
plugins install tdengine-datasource
```
Users could manually download the plugin package and install it to Grafana plugins directory.
```bash
GF_VERSION=3.1.3
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
```
Taking Centos 7.2 as an example, just copy grafanaplugin directory to /var/lib/grafana/plugins directory and restart Grafana.
Taking Centos 7.2 as an example, just unpack the package to /var/lib/grafana/plugins directory and restart Grafana.
```bash
sudo unzip tdengine-datasource-$GF_VERSION.zip /var/lib/grafana/plugins/
```
Grafana will check the signature after 7.3 and 8.x for security. Users need additional configurations in `grafana.ini` file to allow unsigned plugins like TDengine data source.
```ini
[plugins]
allow_loading_unsigned_plugins = tdengine-datasource
```
In docker/compose/k8s, simply setting the two environment variables will take it all for you.
```bash
GF_INSTALL_PLUGINS=https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip;tdengine-datasource
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=tdengine-datasource
```
### Use Grafana
#### Configure data source
......
......@@ -33,8 +33,8 @@ Please download TDengine 2.3.0.0 or the above version from TAOS Data's [official
### Download TDengine plugin to Grafana plugin's directory
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip
2. sudo unzip tdengine-datasource-3.1.3.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
......
......@@ -32,8 +32,8 @@ Please download TDengine 2.3.0.0 or the above version from TAOS Data's [official
### Download TDengine plugin to Grafana plugin's directory
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.3/tdengine-datasource-3.1.3.zip
2. sudo unzip tdengine-datasource-3.1.3.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
......
......@@ -43,9 +43,11 @@ if [ "$osType" != "Darwin" ]; then
if [ "$pagMode" == "lite" ]; then
#strip ${build_dir}/bin/taosd
strip ${build_dir}/bin/taos
bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh"
bin_files="${build_dir}/bin/taos \
${script_dir}/remove_client.sh"
else
bin_files="${script_dir}/remove_client.sh \
bin_files="${build_dir}/bin/taos \
${script_dir}/remove_client.sh \
${script_dir}/set_core.sh \
${script_dir}/get_client.sh"
#${script_dir}/get_client.sh ${script_dir}/taosd-dump-cfg.gdb"
......
......@@ -128,12 +128,13 @@ static int32_t tscSetValueToResObj(SSqlObj *pSql, int32_t rowLen) {
// type length
int32_t bytes = pSchema[i].bytes;
pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, 2);
if (pSchema[i].type == TSDB_DATA_TYPE_BINARY || pSchema[i].type == TSDB_DATA_TYPE_NCHAR) {
if (pSchema[i].type == TSDB_DATA_TYPE_BINARY){
bytes -= VARSTR_HEADER_SIZE;
if (pSchema[i].type == TSDB_DATA_TYPE_NCHAR) {
bytes = bytes / TSDB_NCHAR_SIZE;
}
}
else if(pSchema[i].type == TSDB_DATA_TYPE_NCHAR || pSchema[i].type == TSDB_DATA_TYPE_JSON) {
bytes -= VARSTR_HEADER_SIZE;
bytes = bytes / TSDB_NCHAR_SIZE;
}
*(int32_t *)(pRes->data + tscFieldInfoGetOffset(pQueryInfo, 2) * totalNumOfRows + pField->bytes * i) = bytes;
......
......@@ -4519,13 +4519,16 @@ static int32_t validateSQLExprItemSQLFunc(SSqlCmd* pCmd, tSqlExpr* pExpr,
if (TSDB_FUNC_IS_SCALAR(functionId)) {
code = validateSQLExprItem(pCmd, pParamElem->pNode, pQueryInfo, pList, childrenTypes + i, uid, childrenHeight+i);
if (code != TSDB_CODE_SUCCESS) {
free(childrenTypes);
tfree(childrenTypes);
tfree(childrenHeight);
return code;
}
}
if (!TSDB_FUNC_IS_SCALAR(functionId) &&
(pParamElem->pNode->type == SQL_NODE_EXPR || pParamElem->pNode->type == SQL_NODE_SQLFUNCTION)) {
tfree(childrenTypes);
tfree(childrenHeight);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg1);
}
......@@ -4547,6 +4550,8 @@ static int32_t validateSQLExprItemSQLFunc(SSqlCmd* pCmd, tSqlExpr* pExpr,
*height = maxChildrenHeight + 1;
if (anyChildAgg && anyChildScalar) {
tfree(childrenTypes);
tfree(childrenHeight);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg1);
}
if (anyChildAgg) {
......@@ -4558,7 +4563,8 @@ static int32_t validateSQLExprItemSQLFunc(SSqlCmd* pCmd, tSqlExpr* pExpr,
*type = SQLEXPR_TYPE_AGG;
}
}
free(childrenTypes);
tfree(childrenTypes);
tfree(childrenHeight);
//end if param list is not null
} else {
if (TSDB_FUNC_IS_SCALAR(functionId)) {
......@@ -6869,7 +6875,7 @@ int32_t setAlterTableInfo(SSqlObj* pSql, struct SSqlInfo* pInfo) {
tscError("json type error, should be string");
return invalidOperationMsg(pMsg, msg25);
}
if (pItem->pVar.nType > TSDB_MAX_JSON_TAGS_LEN / TSDB_NCHAR_SIZE) {
if (pItem->pVar.nLen > TSDB_MAX_JSON_TAGS_LEN / TSDB_NCHAR_SIZE) {
tscError("json tag too long");
return invalidOperationMsg(pMsg, msg14);
}
......
......@@ -774,11 +774,12 @@ static void setResRawPtrImpl(SSqlRes* pRes, SInternalField* pInfo, int32_t i, bo
memcpy(dst, p, varDataTLen(p));
} else if (varDataLen(p) > 0) {
int32_t length = taosUcs4ToMbs(varDataVal(p), varDataLen(p), varDataVal(dst));
varDataSetLen(dst, length);
if (length == 0) {
if (length <= 0) {
tscError("charset:%s to %s. val:%s convert failed.", DEFAULT_UNICODE_ENCODEC, tsCharset, (char*)p);
}
if (length >= 0){
varDataSetLen(dst, length);
}
} else {
varDataSetLen(dst, 0);
}
......@@ -809,18 +810,23 @@ static void setResRawPtrImpl(SSqlRes* pRes, SInternalField* pInfo, int32_t i, bo
varDataSetLen(dst, strlen(varDataVal(dst)));
}else if (type == TSDB_DATA_TYPE_JSON) {
int32_t length = taosUcs4ToMbs(varDataVal(realData), varDataLen(realData), varDataVal(dst));
varDataSetLen(dst, length);
if (length == 0) {
if (length <= 0) {
tscError("charset:%s to %s. val:%s convert failed.", DEFAULT_UNICODE_ENCODEC, tsCharset, (char*)p);
}
if (length >= 0){
varDataSetLen(dst, length);
}
}else if (type == TSDB_DATA_TYPE_NCHAR) { // value -> "value"
*(char*)varDataVal(dst) = '\"';
int32_t length = taosUcs4ToMbs(varDataVal(realData), varDataLen(realData), POINTER_SHIFT(varDataVal(dst), CHAR_BYTES));
*(char*)(POINTER_SHIFT(varDataVal(dst), length + CHAR_BYTES)) = '\"';
varDataSetLen(dst, length + CHAR_BYTES*2);
if (length == 0) {
if (length <= 0) {
tscError("charset:%s to %s. val:%s convert failed.", DEFAULT_UNICODE_ENCODEC, tsCharset, (char*)p);
}
if (length >= 0){
varDataSetLen(dst, length + CHAR_BYTES*2);
*(char*)(POINTER_SHIFT(varDataVal(dst), length + CHAR_BYTES)) = '\"';
}
}else if (type == TSDB_DATA_TYPE_DOUBLE) {
double jsonVd = *(double*)(realData);
sprintf(varDataVal(dst), "%.9lf", jsonVd);
......@@ -5515,10 +5521,10 @@ int parseJsontoTagData(char* json, SKVRowBuilder* kvRowBuilder, char* errMsg, in
char *tagVal = calloc(strlen(jsonValue) * TSDB_NCHAR_SIZE + TSDB_NCHAR_SIZE, 1);
*tagVal = jsonType2DbType(0, item->type); // type
char* tagData = POINTER_SHIFT(tagVal,CHAR_BYTES);
if (!taosMbsToUcs4(jsonValue, strlen(jsonValue), varDataVal(tagData),
if (strlen(jsonValue) > 0 && !taosMbsToUcs4(jsonValue, strlen(jsonValue), varDataVal(tagData),
(int32_t)(strlen(jsonValue) * TSDB_NCHAR_SIZE), &outLen)) {
tscError("json string error:%s|%s", strerror(errno), jsonValue);
retCode = tscSQLSyntaxErrMsg(errMsg, "serizelize json error", NULL);
tscError("charset:%s to %s. val:%s, errno:%s, convert failed.", DEFAULT_UNICODE_ENCODEC, tsCharset, jsonValue, strerror(errno));
retCode = tscSQLSyntaxErrMsg(errMsg, "charset convert json error", NULL);
free(tagVal);
goto end;
}
......
......@@ -27,6 +27,7 @@
#include "tskiplist.h"
#include "texpr.h"
#include "tarithoperator.h"
#include "tulog.h"
static int32_t exprValidateMathNode(tExprNode *pExpr);
static int32_t exprValidateStringConcatNode(tExprNode *pExpr);
......@@ -1274,6 +1275,11 @@ void castConvert(int16_t inputType, int16_t inputBytes, char *input, int16_t Out
} else if (inputType == TSDB_DATA_TYPE_NCHAR) {
char *newColData = calloc(1, outputBytes * TSDB_NCHAR_SIZE + 1);
int len = taosUcs4ToMbs(varDataVal(input), varDataLen(input), newColData);
if (len < 0){
uError("castConvert taosUcs4ToMbs error 1");
tfree(newColData);
return;
}
newColData[len] = 0;
*(int64_t *)output = strtoll(newColData, NULL, 10);
tfree(newColData);
......@@ -1291,6 +1297,11 @@ void castConvert(int16_t inputType, int16_t inputBytes, char *input, int16_t Out
} else if (inputType == TSDB_DATA_TYPE_NCHAR) {
char *newColData = calloc(1, outputBytes * TSDB_NCHAR_SIZE + 1);
int len = taosUcs4ToMbs(varDataVal(input), varDataLen(input), newColData);
if (len < 0){
uError("castConvert taosUcs4ToMbs error 2");
tfree(newColData);
return;
}
newColData[len] = 0;
*(int64_t *)output = strtoull(newColData, NULL, 10);
tfree(newColData);
......@@ -1332,11 +1343,19 @@ void castConvert(int16_t inputType, int16_t inputBytes, char *input, int16_t Out
if (inputType == TSDB_DATA_TYPE_BOOL) {
char tmp[8] = {0};
int32_t len = sprintf(tmp, "%.*s", ncharSize, *(int8_t*)input ? "true" : "false");
taosMbsToUcs4(tmp, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
bool ret = taosMbsToUcs4(tmp, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
if(!ret) {
uError("castConvert1 taosMbsToUcs4 error");
return;
}
varDataSetLen(output, len);
} else if (inputType == TSDB_DATA_TYPE_BINARY) {
int32_t len = ncharSize > varDataLen(input) ? varDataLen(input) : ncharSize;
taosMbsToUcs4(input + VARSTR_HEADER_SIZE, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
bool ret = taosMbsToUcs4(input + VARSTR_HEADER_SIZE, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
if(!ret) {
uError("castConvert2 taosMbsToUcs4 error");
return;
}
varDataSetLen(output, len);
} else if (inputType == TSDB_DATA_TYPE_TIMESTAMP) {
assert(0);
......@@ -1348,7 +1367,11 @@ void castConvert(int16_t inputType, int16_t inputBytes, char *input, int16_t Out
char tmp[400] = {0};
NUM_TO_STRING(inputType, input, sizeof(tmp), tmp);
int32_t len = (int32_t)(ncharSize > strlen(tmp) ? strlen(tmp) : ncharSize);
taosMbsToUcs4(tmp, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
bool ret = taosMbsToUcs4(tmp, len, varDataVal(output), outputBytes - VARSTR_HEADER_SIZE, &len);
if(!ret) {
uError("castConvert3 taosMbsToUcs4 error");
return;
}
varDataSetLen(output, len);
}
break;
......
......@@ -23,6 +23,7 @@
#include "ttype.h"
#include "tutil.h"
#include "tvariant.h"
#include "tulog.h"
#define SET_EXT_INFO(converted, res, minv, maxv, exti) do { \
if (converted == NULL || exti == NULL || *converted == false) { break; } \
......@@ -359,8 +360,12 @@ int32_t tVariantToString(tVariant *pVar, char *dst) {
case TSDB_DATA_TYPE_NCHAR: {
dst[0] = '\'';
taosUcs4ToMbs(pVar->wpz, (twcslen(pVar->wpz) + 1) * TSDB_NCHAR_SIZE, dst + 1);
int32_t len = (int32_t)strlen(dst);
int32_t len = taosUcs4ToMbs(pVar->wpz, (twcslen(pVar->wpz) + 1) * TSDB_NCHAR_SIZE, dst + 1);
if (len < 0){
uError("castConvert1 taosUcs4ToMbs error");
return 0 ;
}
len = (int32_t)strlen(dst);
dst[len] = '\'';
dst[len + 1] = 0;
return len + 1;
......@@ -428,11 +433,17 @@ static int32_t toBinary(tVariant *pVariant, char **pDest, int32_t *pDestSize) {
pBuf = realloc(pBuf, newSize + 1);
}
taosUcs4ToMbs(pVariant->wpz, (int32_t)newSize, pBuf);
int32_t len = taosUcs4ToMbs(pVariant->wpz, (int32_t)newSize, pBuf);
if (len < 0){
uError("castConvert1 taosUcs4ToMbs error");
}
free(pVariant->wpz);
pBuf[newSize] = 0;
} else {
taosUcs4ToMbs(pVariant->wpz, (int32_t)newSize, *pDest);
int32_t len = taosUcs4ToMbs(pVariant->wpz, (int32_t)newSize, *pDest);
if (len < 0){
uError("castConvert1 taosUcs4ToMbs error");
}
}
} else {
......
......@@ -398,5 +398,8 @@ namespace TDengineDriver
IntPtr stmtErrPrt = StmtErrPtr(stmt);
return Marshal.PtrToStringAnsi(stmtErrPrt);
}
[DllImport("taos", EntryPoint = "taos_fetch_lengths", CallingConvention = CallingConvention.Cdecl)]
static extern public IntPtr FetchLengths(IntPtr taos);
}
}
......@@ -9,4 +9,8 @@
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<PropertyGroup>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<DocumentationFile>..\doc\FunctionTest.XML</DocumentationFile>
</PropertyGroup>
</Project>
using System;
using Test.UtilsTools;
using System.Collections.Generic;
namespace Cases
{
public class FetchLengthCase
{
/// <author>xiaolei</author>
/// <Name>TestRetrieveBinary</Name>
/// <describe>TD-12103 C# connector fetch_row with binary data retrieving error</describe>
/// <filename>FetchLength.cs</filename>
/// <result>pass or failed </result>
public void TestRetrieveBinary(IntPtr conn)
{
string sql1 = "create stable stb1 (ts timestamp, name binary(10)) tags(n int);";
string sql2 = "insert into tb1 using stb1 tags(1) values(now, 'log');";
string sql3 = "insert into tb2 using stb1 tags(2) values(now, 'test');";
string sql4 = "insert into tb3 using stb1 tags(3) values(now, 'db02');";
string sql5 = "insert into tb4 using stb1 tags(4) values(now, 'db3');";
string sql6 = "select distinct(name) from stb1;";//
UtilsTools.ExecuteQuery(conn, sql1);
UtilsTools.ExecuteQuery(conn, sql2);
UtilsTools.ExecuteQuery(conn, sql3);
UtilsTools.ExecuteQuery(conn, sql4);
UtilsTools.ExecuteQuery(conn, sql5);
IntPtr resPtr = IntPtr.Zero;
resPtr = UtilsTools.ExecuteQuery(conn, sql6);
List<List<string>> result = UtilsTools.GetResultSet(resPtr);
List<string> colname = result[0];
List<string> data = result[1];
UtilsTools.AssertEqual("db3", data[0]);
UtilsTools.AssertEqual("log", data[1]);
UtilsTools.AssertEqual("db02", data[2]);
UtilsTools.AssertEqual("test", data[3]);
}
}
}
using System;
using Test.UtilsTools;
using Cases;
namespace Cases.EntryPoint
{
class Program
{
static void Main(string[] args)
{
IntPtr conn = IntPtr.Zero;
IntPtr stmt = IntPtr.Zero;
IntPtr res = IntPtr.Zero;
conn = UtilsTools.TDConnection("127.0.0.1", "root", "taosdata", "", 0);
UtilsTools.ExecuteQuery(conn, "drop database if exists csharp");
UtilsTools.ExecuteQuery(conn, "create database if not exists csharp keep 3650");
UtilsTools.ExecuteQuery(conn, "use csharp");
Console.WriteLine("====================StableColumnByColumn===================");
StableColumnByColumn columnByColumn = new StableColumnByColumn();
columnByColumn.Test(conn, "stablecolumnbycolumn");
Console.WriteLine("====================StmtStableQuery===================");
StmtStableQuery stmtStableQuery = new StmtStableQuery();
stmtStableQuery.Test(conn, "stablecolumnbycolumn");
Console.WriteLine("====================StableMutipleLine===================");
StableMutipleLine mutipleLine = new StableMutipleLine();
mutipleLine.Test(conn, "stablemutipleline");
//================================================================================
Console.WriteLine("====================NtableSingleLine===================");
NtableSingleLine ntableSingleLine = new NtableSingleLine();
ntableSingleLine.Test(conn, "stablesingleline");
Console.WriteLine("====================NtableMutipleLine===================");
NtableMutipleLine ntableMutipleLine = new NtableMutipleLine();
ntableMutipleLine.Test(conn, "ntablemutipleline");
Console.WriteLine("====================StmtNtableQuery===================");
StmtNtableQuery stmtNtableQuery = new StmtNtableQuery();
stmtNtableQuery.Test(conn, "ntablemutipleline");
Console.WriteLine("====================NtableColumnByColumn===================");
NtableColumnByColumn ntableColumnByColumn = new NtableColumnByColumn();
ntableColumnByColumn.Test(conn, "ntablecolumnbycolumn");
Console.WriteLine("====================fetchfeilds===================");
FetchFields fetchFields = new FetchFields();
fetchFields.Test(conn,"fetchfeilds");
Console.WriteLine("===================JsonTagTest====================");
JsonTagTest jsonTagTest = new JsonTagTest();
jsonTagTest.Test(conn);
// UtilsTools.ExecuteQuery(conn, "drop database if exists csharp");
UtilsTools.CloseConnection(conn);
UtilsTools.ExitProgram();
}
}
}
using System;
using Test.UtilsTools;
using Cases;
namespace Cases.EntryPoint
{
class Program
{
static void Main(string[] args)
{
IntPtr conn = IntPtr.Zero;
IntPtr stmt = IntPtr.Zero;
IntPtr res = IntPtr.Zero;
conn = UtilsTools.TDConnection("127.0.0.1", "root", "taosdata", "", 0);
UtilsTools.ExecuteQuery(conn, "drop database if exists csharp");
UtilsTools.ExecuteQuery(conn, "create database if not exists csharp keep 3650");
UtilsTools.ExecuteQuery(conn, "use csharp");
Console.WriteLine("====================StableColumnByColumn===================");
StableColumnByColumn columnByColumn = new StableColumnByColumn();
columnByColumn.Test(conn, "stablecolumnbycolumn");
Console.WriteLine("====================StmtStableQuery===================");
StmtStableQuery stmtStableQuery = new StmtStableQuery();
stmtStableQuery.Test(conn, "stablecolumnbycolumn");
Console.WriteLine("====================StableMutipleLine===================");
StableMutipleLine mutipleLine = new StableMutipleLine();
mutipleLine.Test(conn, "stablemutipleline");
//================================================================================
Console.WriteLine("====================NtableSingleLine===================");
NtableSingleLine ntableSingleLine = new NtableSingleLine();
ntableSingleLine.Test(conn, "stablesingleline");
Console.WriteLine("====================NtableMutipleLine===================");
NtableMutipleLine ntableMutipleLine = new NtableMutipleLine();
ntableMutipleLine.Test(conn, "ntablemutipleline");
Console.WriteLine("====================StmtNtableQuery===================");
StmtNtableQuery stmtNtableQuery = new StmtNtableQuery();
stmtNtableQuery.Test(conn, "ntablemutipleline");
Console.WriteLine("====================NtableColumnByColumn===================");
NtableColumnByColumn ntableColumnByColumn = new NtableColumnByColumn();
ntableColumnByColumn.Test(conn, "ntablecolumnbycolumn");
Console.WriteLine("====================fetchfeilds===================");
FetchFields fetchFields = new FetchFields();
fetchFields.Test(conn, "fetchfeilds");
Console.WriteLine("===================JsonTagTest====================");
JsonTagTest jsonTagTest = new JsonTagTest();
jsonTagTest.Test(conn);
Console.WriteLine("====================fetchLengthCase===================");
FetchLengthCase fetchLengthCase = new FetchLengthCase();
fetchLengthCase.TestRetrieveBinary(conn);
UtilsTools.ExecuteQuery(conn, "drop database if exists csharp");
UtilsTools.CloseConnection(conn);
UtilsTools.ExitProgram();
}
}
}
......@@ -35,7 +35,6 @@ namespace Test.UtilsTools
else
{
Console.WriteLine(sql.ToString() + " success");
}
return res;
}
......@@ -83,9 +82,13 @@ namespace Test.UtilsTools
IntPtr rowdata;
StringBuilder builder = new StringBuilder();
while ((rowdata = TDengine.FetchRows(res)) != IntPtr.Zero)
{
queryRows++;
IntPtr colLengthPtr = TDengine.FetchLengths(res);
int[] colLengthArr = new int[fieldCount];
Marshal.Copy(colLengthPtr, colLengthArr, 0, fieldCount);
for (int fields = 0; fields < fieldCount; ++fields)
{
TDengineMeta meta = metas[fields];
......@@ -131,7 +134,7 @@ namespace Test.UtilsTools
builder.Append(v7);
break;
case TDengineDataType.TSDB_DATA_TYPE_BINARY:
string v8 = Marshal.PtrToStringAnsi(data);
string v8 = Marshal.PtrToStringAnsi(data, colLengthArr[fields]);
builder.Append(v8);
break;
case TDengineDataType.TSDB_DATA_TYPE_TIMESTAMP:
......@@ -139,7 +142,7 @@ namespace Test.UtilsTools
builder.Append(v9);
break;
case TDengineDataType.TSDB_DATA_TYPE_NCHAR:
string v10 = Marshal.PtrToStringAnsi(data);
string v10 = Marshal.PtrToStringAnsi(data, colLengthArr[fields]);
builder.Append(v10);
break;
case TDengineDataType.TSDB_DATA_TYPE_JSONTAG:
......@@ -164,6 +167,117 @@ namespace Test.UtilsTools
TDengine.FreeResult(res); Console.WriteLine("");
}
public static List<List<string>> GetResultSet(IntPtr res)
{
List<List<string>> result = new List<List<string>>();
List<string> colName = new List<string>();
List<string> dataRaw = new List<string>();
long queryRows = 0;
if (!IsValidResult(res))
{
ExitProgram();
}
int fieldCount = TDengine.FieldCount(res);
List<TDengineMeta> metas = TDengine.FetchFields(res);
for (int j = 0; j < metas.Count; j++)
{
TDengineMeta meta = (TDengineMeta)metas[j];
colName.Add(meta.name);
}
result.Add(colName);
IntPtr rowdata;
while ((rowdata = TDengine.FetchRows(res)) != IntPtr.Zero)
{
queryRows++;
IntPtr colLengthPtr = TDengine.FetchLengths(res);
int[] colLengthArr = new int[fieldCount];
Marshal.Copy(colLengthPtr, colLengthArr, 0, fieldCount);
for (int fields = 0; fields < fieldCount; ++fields)
{
TDengineMeta meta = metas[fields];
int offset = IntPtr.Size * fields;
IntPtr data = Marshal.ReadIntPtr(rowdata, offset);
if (data == IntPtr.Zero)
{
dataRaw.Add("NULL");
continue;
}
switch ((TDengineDataType)meta.type)
{
case TDengineDataType.TSDB_DATA_TYPE_BOOL:
bool v1 = Marshal.ReadByte(data) == 0 ? false : true;
dataRaw.Add(v1.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_TINYINT:
byte v2 = Marshal.ReadByte(data);
dataRaw.Add(v2.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_SMALLINT:
short v3 = Marshal.ReadInt16(data);
dataRaw.Add(v3.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_INT:
int v4 = Marshal.ReadInt32(data);
dataRaw.Add(v4.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_BIGINT:
long v5 = Marshal.ReadInt64(data);
dataRaw.Add(v5.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_FLOAT:
float v6 = (float)Marshal.PtrToStructure(data, typeof(float));
dataRaw.Add(v6.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_DOUBLE:
double v7 = (double)Marshal.PtrToStructure(data, typeof(double));
dataRaw.Add(v7.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_BINARY:
string v8 = Marshal.PtrToStringAnsi(data, colLengthArr[fields]);
dataRaw.Add(v8);
break;
case TDengineDataType.TSDB_DATA_TYPE_TIMESTAMP:
long v9 = Marshal.ReadInt64(data);
dataRaw.Add(v9.ToString());
break;
case TDengineDataType.TSDB_DATA_TYPE_NCHAR:
string v10 = Marshal.PtrToStringAnsi(data, colLengthArr[fields]);
dataRaw.Add(v10);
break;
}
}
}
result.Add(dataRaw);
if (TDengine.ErrorNo(res) != 0)
{
Console.Write("Query is not complete, Error {0:G}", TDengine.ErrorNo(res), TDengine.Error(res));
}
TDengine.FreeResult(res); Console.WriteLine("");
return result;
}
public static bool IsValidResult(IntPtr res)
{
if ((res == IntPtr.Zero) || (TDengine.ErrorNo(res) != 0))
{
if (res != IntPtr.Zero)
{
Console.Write("reason: " + TDengine.Error(res));
return false;
}
Console.WriteLine("");
return false;
}
return true;
}
public static void CloseConnection(IntPtr conn)
{
if (conn != IntPtr.Zero)
......@@ -183,6 +297,18 @@ namespace Test.UtilsTools
List<TDengineMeta> metas = TDengine.FetchFields(res);
return metas;
}
public static void AssertEqual(string expectVal, string actualVal)
{
if (expectVal == actualVal)
{
Console.WriteLine("{0}=={1} pass", expectVal, actualVal);
}
else
{
Console.WriteLine("{0}=={1} failed", expectVal, actualVal);
ExitProgram();
}
}
public static void ExitProgram()
{
TDengine.Cleanup();
......
......@@ -2,10 +2,14 @@
<PropertyGroup>
<TargetFramework>net5.0</TargetFramework>
<IsPackable>false</IsPackable>
</PropertyGroup>
<PropertyGroup>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<DocumentationFile>..\doc\UnitTest.XML</DocumentationFile>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.msbuild" Version="3.1.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
......
......@@ -443,16 +443,29 @@ public class TSDBResultSetRowData {
case 0: {
milliseconds = ts;
fracNanoseconds = (int) (ts * 1_000_000 % 1_000_000_000);
fracNanoseconds = fracNanoseconds < 0 ? 1_000_000_000 + fracNanoseconds : fracNanoseconds;
break;
}
case 1: {
milliseconds = ts / 1_000;
fracNanoseconds = (int) (ts * 1_000 % 1_000_000_000);
if (fracNanoseconds < 0) {
if (milliseconds == 0 ){
milliseconds = -1;
}
fracNanoseconds += 1_000_000_000;
}
break;
}
case 2: {
milliseconds = ts / 1_000_000;
fracNanoseconds = (int) (ts % 1_000_000_000);
if (fracNanoseconds < 0) {
if (milliseconds == 0 ){
milliseconds = -1;
}
fracNanoseconds += 1_000_000_000;
}
break;
}
default: {
......
package com.taosdata.jdbc.cases;
import com.taosdata.jdbc.annotation.CatalogRunner;
import com.taosdata.jdbc.annotation.Description;
import com.taosdata.jdbc.annotation.TestTarget;
import com.taosdata.jdbc.utils.TimestampUtil;
import org.junit.*;
import org.junit.runner.RunWith;
import java.sql.*;
@RunWith(CatalogRunner.class)
@TestTarget(alias = "negative value convert to timestamp", author = "huolibo", version = "2.0.37")
public class DatetimeBefore1970Test {
private static final String host = "127.0.0.1";
private Connection conn;
@Test
public void test() throws SQLException {
@Description("millisecond")
public void msTest() throws SQLException {
conn = createEnvironment("ms");
long now = System.currentTimeMillis();
try (Statement stmt = conn.createStatement()) {
// given
// before
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 00:00:00.001')");
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 23:59:59.999')");
// zero
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 08:00:00.000')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 07:59:59.999')");
//after
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.001')");
stmt.executeUpdate("insert into weather(ts) values(" + now + ")");
ResultSet rs = stmt.executeQuery("select * from weather order by ts asc");
ResultSetMetaData metaData = rs.getMetaData();
Assert.assertEquals(2, metaData.getColumnCount());
......@@ -26,44 +39,221 @@ public class DatetimeBefore1970Test {
rs.next();
// then
Timestamp ts = rs.getTimestamp("ts");
Assert.assertEquals("1969-12-31 23:59:59.999", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(-24 * 60 * 60 * 1000 + 1, ts.getTime());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals("1970-01-01 00:00:00.000", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(-1, ts.getTime());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals("1970-01-01 08:00:00.000", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(0, ts.getTime());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(1, ts.getTime());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(now, ts.getTime());
}
}
@Test
@Description("microsecond")
public void usTest() throws SQLException {
conn = createEnvironment("us");
long now = System.currentTimeMillis();
try (Statement stmt = conn.createStatement()) {
// given
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 00:00:00.000001')");
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 23:59:59.999999')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000000')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000001')");
stmt.executeUpdate("insert into weather(ts) values(" + now + ")");
ResultSet rs = stmt.executeQuery("select * from weather order by ts asc");
ResultSetMetaData metaData = rs.getMetaData();
Assert.assertEquals(2, metaData.getColumnCount());
// when
rs.next();
// then
Timestamp ts = rs.getTimestamp("ts");
Assert.assertEquals(-24 * 60 * 60 * 1000, ts.getTime());
Assert.assertEquals(1_000, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(-1, ts.getTime());
Assert.assertEquals(999_999_000, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(0, ts.getTime());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(0, ts.getTime());
Assert.assertEquals(1_000, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
String s = String.valueOf(now);
Assert.assertEquals(Long.parseLong(s.substring(0, s.length() - 3)), ts.getTime());
Assert.assertEquals(Long.parseLong(s.substring(s.length() - 6) + "000"), ts.getNanos());
}
}
@Test
@Description("nanosecond")
public void nanoTest() throws SQLException {
conn = createEnvironment("ns");
long now = System.currentTimeMillis() * 1000_000L + System.nanoTime() % 1000_000L;
try (Statement stmt = conn.createStatement()) {
// given
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 00:00:00.000000123')");
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 23:59:59.999999999')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000000001')");
stmt.executeUpdate("insert into weather(ts) values(" + now + ")");
ResultSet rs = stmt.executeQuery("select * from weather order by ts asc");
ResultSetMetaData metaData = rs.getMetaData();
Assert.assertEquals(2, metaData.getColumnCount());
// when
rs.next();
// then
Timestamp ts = rs.getTimestamp("ts");
Assert.assertEquals(-24 * 60 * 60 * 1_000, ts.getTime());
Assert.assertEquals(123, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(-1, ts.getTime());
Assert.assertEquals(999999999, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(0, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals(1, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
String s = String.valueOf(now);
Assert.assertEquals(Long.parseLong(s.substring(0, s.length() - 6)), ts.getTime());
Assert.assertEquals(Long.parseLong(s.substring(s.length() - 9)), ts.getNanos());
}
}
@Test
@Ignore
@Description("nanosecond convert timestamp when timezone is asia shanghai")
public void asiaShanghaiTest() throws SQLException {
conn = createEnvironment("ns");
long now = System.currentTimeMillis() * 1000_000L + System.nanoTime() % 1000_000L;
try (Statement stmt = conn.createStatement()) {
// given
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 00:00:00.000000123')");
stmt.executeUpdate("insert into weather(ts) values('1969-12-31 23:59:59.999999999')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000')");
stmt.executeUpdate("insert into weather(ts) values('1970-01-01 00:00:00.000000001')");
stmt.executeUpdate("insert into weather(ts) values(" + now + ")");
ResultSet rs = stmt.executeQuery("select * from weather order by ts asc");
ResultSetMetaData metaData = rs.getMetaData();
Assert.assertEquals(2, metaData.getColumnCount());
// when
rs.next();
// then
Timestamp ts = rs.getTimestamp("ts");
Assert.assertEquals("1969-12-31 08:00:00.000", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(123, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals("1970-01-01 07:59:59.999", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(999999999, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals("1970-01-01 08:00:00.000", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(0, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
Assert.assertEquals("1970-01-01 08:00:00.000", TimestampUtil.longToDatetime(ts.getTime()));
Assert.assertEquals(1, ts.getNanos());
// when
rs.next();
// then
ts = rs.getTimestamp("ts");
String s = String.valueOf(now);
Assert.assertEquals(Long.parseLong(s.substring(0, s.length() - 6)), ts.getTime());
Assert.assertEquals(Long.parseLong(s.substring(s.length() - 9)), ts.getNanos());
}
}
@Before
public void before() throws SQLException {
conn = DriverManager.getConnection("jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata");
private Connection createEnvironment(String precision) throws SQLException {
String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata&timezone=UTC";
String createSql = "create database if not exists test_timestamp keep 36500";
if (!isEmpty(precision)) {
createSql += " precision '" + precision + "'";
}
conn = DriverManager.getConnection(url);
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists test_timestamp");
stmt.execute("create database if not exists test_timestamp keep 36500");
stmt.execute(createSql);
stmt.execute("use test_timestamp");
stmt.execute("create table weather(ts timestamp,f1 float)");
stmt.close();
return conn;
}
private boolean isEmpty(String string) {
return null == string || string.trim().equals("");
}
@After
public void after() throws SQLException {
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists test_timestamp");
if (conn != null)
if (conn != null) {
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists test_timestamp");
stmt.close();
conn.close();
}
}
}
......@@ -274,7 +274,6 @@ int32_t* taosGetErrno();
#define TSDB_CODE_TDB_IVLD_TAG_VAL TAOS_DEF_ERROR_CODE(0, 0x0615) //"TSDB invalid tag value")
#define TSDB_CODE_TDB_NO_CACHE_LAST_ROW TAOS_DEF_ERROR_CODE(0, 0x0616) //"TSDB no cache last row data")
#define TSDB_CODE_TDB_INCOMPLETE_DFILESET TAOS_DEF_ERROR_CODE(0, 0x0617) //"TSDB incomplete DFileSet")
#define TSDB_CODE_TDB_NO_JSON_TAG_KEY TAOS_DEF_ERROR_CODE(0, 0x0618) //"TSDB no tag json key")
// query
#define TSDB_CODE_QRY_INVALID_QHANDLE TAOS_DEF_ERROR_CODE(0, 0x0700) //"Invalid handle")
......
......@@ -1942,7 +1942,7 @@ static SQLFunctionCtx* createSQLFunctionCtx(SQueryRuntimeEnv* pRuntimeEnv, SExpr
for (int32_t j = 0; j < pCtx->numOfParams; ++j) {
int16_t type = pSqlExpr->param[j].nType;
int16_t bytes = pSqlExpr->param[j].nLen;
if (pSqlExpr->functionId == TSDB_FUNC_STDDEV_DST) {
if (pSqlExpr->functionId == TSDB_FUNC_STDDEV_DST || pSqlExpr->functionId == TSDB_FUNC_TS_COMP) {
continue;
}
......
......@@ -1040,7 +1040,10 @@ void tColModelDisplay(SColumnModel *pModel, void *pData, int32_t numOfRows, int3
break;
case TSDB_DATA_TYPE_NCHAR: {
char buf[4096] = {0};
taosUcs4ToMbs(val, pModel->pFields[j].field.bytes, buf);
int32_t len = taosUcs4ToMbs(val, pModel->pFields[j].field.bytes, buf);
if (len < 0){
qError("castConvert1 taosUcs4ToMbs error");
}
printf("%s\t", buf);
break;
}
......@@ -1092,7 +1095,10 @@ void tColModelDisplayEx(SColumnModel *pModel, void *pData, int32_t numOfRows, in
break;
case TSDB_DATA_TYPE_NCHAR: {
char buf[128] = {0};
taosUcs4ToMbs(val, pModel->pFields[j].field.bytes, buf);
int32_t len = taosUcs4ToMbs(val, pModel->pFields[j].field.bytes, buf);
if (len < 0){
qError("castConvert1 taosUcs4ToMbs error");
}
printf("%s\t", buf);
break;
}
......
......@@ -1899,12 +1899,20 @@ int32_t filterInitValFieldData(SFilterInfo *info) {
(unit->compare.optr == TSDB_RELATION_MATCH || unit->compare.optr == TSDB_RELATION_NMATCH)){
char newValData[TSDB_REGEX_STRING_DEFAULT_LEN * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE] = {0};
int32_t len = taosUcs4ToMbs(varDataVal(fi->data), varDataLen(fi->data), varDataVal(newValData));
if (len < 0){
qError("filterInitValFieldData taosUcs4ToMbs error 1");
return TSDB_CODE_FAILED;
}
varDataSetLen(newValData, len);
varDataCopy(fi->data, newValData);
}else if(type == TSDB_DATA_TYPE_JSON &&
(unit->compare.optr == TSDB_RELATION_MATCH || unit->compare.optr == TSDB_RELATION_NMATCH)){
char newValData[TSDB_REGEX_STRING_DEFAULT_LEN * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE] = {0};
int32_t len = taosUcs4ToMbs(((tVariant*)(fi->desc))->pz, ((tVariant*)(fi->desc))->nLen, newValData);
if (len < 0){
qError("filterInitValFieldData taosUcs4ToMbs error 2");
return TSDB_CODE_FAILED;
}
memcpy(((tVariant*)(fi->desc))->pz, newValData, len);
((tVariant*)(fi->desc))->nLen = len;
}
......@@ -3025,6 +3033,11 @@ static void doJsonCompare(SFilterComUnit *cunit, int8_t *result, void* colData){
}else{
char *newColData = calloc(cunit->dataSize * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE, 1);
int len = taosUcs4ToMbs(varDataVal(realData), varDataLen(realData), varDataVal(newColData));
if (len < 0){
qError("castConvert1 taosUcs4ToMbs error");
tfree(newColData);
return;
}
varDataSetLen(newColData, len);
tVariant* val = cunit->valData;
char newValData[TSDB_REGEX_STRING_DEFAULT_LEN * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE] = {0};
......@@ -3113,9 +3126,13 @@ bool filterExecuteImplMisc(void *pinfo, int32_t numOfRows, int8_t** p, SDataStat
if(info->cunits[uidx].dataType == TSDB_DATA_TYPE_NCHAR && (info->cunits[uidx].optr == TSDB_RELATION_MATCH || info->cunits[uidx].optr == TSDB_RELATION_NMATCH)){
char *newColData = calloc(info->cunits[uidx].dataSize * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE, 1);
int len = taosUcs4ToMbs(varDataVal(colData), varDataLen(colData), varDataVal(newColData));
varDataSetLen(newColData, len);
(*p)[i] = filterDoCompare(gDataCompare[info->cunits[uidx].func], info->cunits[uidx].optr, newColData, info->cunits[uidx].valData);
int32_t len = taosUcs4ToMbs(varDataVal(colData), varDataLen(colData), varDataVal(newColData));
if (len < 0){
qError("castConvert1 taosUcs4ToMbs error");
}else{
varDataSetLen(newColData, len);
(*p)[i] = filterDoCompare(gDataCompare[info->cunits[uidx].func], info->cunits[uidx].optr, newColData, info->cunits[uidx].valData);
}
tfree(newColData);
}else if(info->cunits[uidx].dataType == TSDB_DATA_TYPE_JSON){
doJsonCompare(&(info->cunits[uidx]), &(*p)[i], colData);
......@@ -3170,9 +3187,13 @@ bool filterExecuteImpl(void *pinfo, int32_t numOfRows, int8_t** p, SDataStatis *
} else {
if(cunit->dataType == TSDB_DATA_TYPE_NCHAR && (cunit->optr == TSDB_RELATION_MATCH || cunit->optr == TSDB_RELATION_NMATCH)){
char *newColData = calloc(cunit->dataSize * TSDB_NCHAR_SIZE + VARSTR_HEADER_SIZE, 1);
int len = taosUcs4ToMbs(varDataVal(colData), varDataLen(colData), varDataVal(newColData));
varDataSetLen(newColData, len);
(*p)[i] = filterDoCompare(gDataCompare[cunit->func], cunit->optr, newColData, cunit->valData);
int32_t len = taosUcs4ToMbs(varDataVal(colData), varDataLen(colData), varDataVal(newColData));
if (len < 0){
qError("castConvert1 taosUcs4ToMbs error");
}else{
varDataSetLen(newColData, len);
(*p)[i] = filterDoCompare(gDataCompare[cunit->func], cunit->optr, newColData, cunit->valData);
}
tfree(newColData);
}else if(cunit->dataType == TSDB_DATA_TYPE_JSON){
doJsonCompare(cunit, &(*p)[i], colData);
......@@ -3577,7 +3598,11 @@ int32_t filterConverNcharColumns(SFilterInfo* info, int32_t rows, bool *gotNchar
char *src = FILTER_GET_COL_FIELD_DATA(fi, j);
char *dst = FILTER_GET_COL_FIELD_DATA(&nfi, j);
int32_t len = 0;
taosMbsToUcs4(varDataVal(src), varDataLen(src), varDataVal(dst), bufSize, &len);
bool ret = taosMbsToUcs4(varDataVal(src), varDataLen(src), varDataVal(dst), bufSize, &len);
if(!ret) {
qError("filterConverNcharColumns taosMbsToUcs4 error");
return TSDB_CODE_FAILED;
}
varDataLen(dst) = len;
}
......
......@@ -1463,6 +1463,7 @@ static void *tsdbDecodeTable(void *buf, STable **pRTable) {
tsdbFreeTable(pTable);
return NULL;
}
taosHashSetFreeFp(pTable->jsonKeyMap, taosArrayDestroyForHash);
}else{
pTable->pIndex = tSkipListCreate(TSDB_SUPER_TABLE_SL_LEVEL, colType(pCol), (uint8_t)(colBytes(pCol)), NULL,
SL_ALLOW_DUP_KEY, getTagIndexKey);
......
......@@ -4243,20 +4243,28 @@ char* parseTagDatatoJson(void *p){
}
cJSON_AddItemToObject(json, tagJsonKey, value);
}else if(type == TSDB_DATA_TYPE_NCHAR) {
char *tagJsonValue = calloc(varDataLen(realData), 1);
int32_t length = taosUcs4ToMbs(varDataVal(realData), varDataLen(realData), tagJsonValue);
if (length < 0) {
tsdbError("charset:%s to %s. val:%s convert json value failed.", DEFAULT_UNICODE_ENCODEC, tsCharset,
(char*)val);
cJSON* value = NULL;
if (varDataLen(realData) > 0){
char *tagJsonValue = calloc(varDataLen(realData), 1);
int32_t length = taosUcs4ToMbs(varDataVal(realData), varDataLen(realData), tagJsonValue);
if (length < 0) {
tsdbError("charset:%s to %s. val:%s convert json value failed.", DEFAULT_UNICODE_ENCODEC, tsCharset,
(char*)val);
free(tagJsonValue);
goto end;
}
value = cJSON_CreateString(tagJsonValue);
free(tagJsonValue);
goto end;
}
cJSON* value = cJSON_CreateString(tagJsonValue);
free(tagJsonValue);
if (value == NULL)
{
goto end;
if (value == NULL)
{
goto end;
}
}else if(varDataLen(realData) == 0){
value = cJSON_CreateString("");
}else{
assert(0);
}
cJSON_AddItemToObject(json, tagJsonKey, value);
}else if(type == TSDB_DATA_TYPE_DOUBLE){
double jsonVd = *(double*)(realData);
......
......@@ -282,7 +282,6 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TDB_MESSED_MSG, "TSDB messed message")
TAOS_DEFINE_ERROR(TSDB_CODE_TDB_IVLD_TAG_VAL, "TSDB invalid tag value")
TAOS_DEFINE_ERROR(TSDB_CODE_TDB_NO_CACHE_LAST_ROW, "TSDB no cache last row data")
TAOS_DEFINE_ERROR(TSDB_CODE_TDB_INCOMPLETE_DFILESET, "Incomplete DFileSet")
TAOS_DEFINE_ERROR(TSDB_CODE_TDB_NO_JSON_TAG_KEY, "TSDB no tag json key")
// query
TAOS_DEFINE_ERROR(TSDB_CODE_QRY_INVALID_QHANDLE, "Invalid handle")
......
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
cd ${WKC}/src/connector/C#
dotnet test
dotnet run --project src/test/Cases/Cases.csproj
cd ${WKC}/tests/examples/C#
dotnet run --project C#checker/C#checker.csproj
dotnet run --project TDengineTest/TDengineTest.csproj
dotnet run --project schemaless/schemaless.csproj
cd ${WKC}/tests/examples/C#/taosdemo
dotnet build -c Release
tree | true
./bin/Release/net5.0/taosdemo -c /etc/taos -y
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
cd ${WKC}/src/connector/nodejs
npm install
npm run test
cd ${WKC}/tests/examples/nodejs
npm install td2.0-connector > /dev/null 2>&1
node nodejsChecker.js host=localhost
node test1970.js
cd ${WKC}/tests/connectorTest/nodejsTest/nanosupport
npm install td2.0-connector > /dev/null 2>&1
node nanosecondTest.js
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../src/connector/python
pip3 install pytest
pytest tests/
python3 examples/bind-multi.py
python3 examples/bind-row.py
python3 examples/demo.py
python3 examples/insert-lines.py
python3 examples/pep-249.py
python3 examples/query-async.py
python3 examples/query-objectively.py
python3 examples/subscribe-sync.py
python3 examples/subscribe-async.py
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
\ No newline at end of file
bash 3-connectors/c#/test.sh
bash 3-connectors/go/test.sh
bash 3-connectors/java/test.sh
bash 3-connectors/nodejs/test.sh
bash 3-connectors/python/test.sh
bash 3-connectors/restful/test.sh
bash 3-connectors/rust/test.sh
python3 ./test.py -f 1-insert/batchInsert.py
python3 ./test.py -f 1-insert/batchInsert.py
\ No newline at end of file
python3 ./test.py -f 0-others/json_tag.py
python3 ./test.py -f 0-others/json_tag.py
\ No newline at end of file
python3 ./test.py -f 2-query/ts_hidden_column.py
python3 ./test.py -f 2-query/union-order.py
python3 ./test.py -f 2-query/session_two_stage.py
python3 ./test.py -f 2-query/ts_hidden_column.py
python3 ./test.py -f 2-query/union-order.py
python3 ./test.py -f 2-query/session_two_stage.py
python3 ./test.py -f 5-taos-tools/taosdump/taosdumpTestTypeJson.py
python3 ./test.py -f 5-taos-tools/taosdump/taosdumpTestTypeJson.py
\ No newline at end of file
#!/bin/bash
ulimit -c unlimited
#======================p1-start===============
# restful test for python
# python3 test.py -f restful/restful_bind_db1.py
# python3 test.py -f restful/restful_bind_db2.py
python3 ./test.py -f client/nettest.py
python3 ./test.py -f ../system-test/4-taosAdapter/taosAdapter_query.py
python3 ./test.py -f ../system-test/4-taosAdapter/taosAdapter_insert.py
#======================p1-end===============
ulimit -c unlimited
\ No newline at end of file
......@@ -4,7 +4,6 @@ ulimit -c unlimited
python3 testCompress.py
python3 testNoCompress.py
python3 ./test.py -f import_merge/importBlock1HO.py
python3 ./test.py -f import_merge/importBlock1HPO.py
python3 ./test.py -f import_merge/importBlock1H.py
......@@ -23,10 +22,6 @@ python3 ./test.py -f import_merge/importBlock2TPO.py
python3 ./test.py -f import_merge/importBlock2T.py
python3 ./test.py -f import_merge/importBlockbetween.py
python3 ./test.py -f import_merge/importCacheFileHO.py
#======================p1-end===============
#======================p2-start===============
python3 ./test.py -f import_merge/importCacheFileHPO.py
python3 ./test.py -f import_merge/importCacheFileH.py
python3 ./test.py -f import_merge/importCacheFileS.py
......@@ -48,10 +43,6 @@ python3 ./test.py -f import_merge/importDataLastTPO.py
python3 ./test.py -f import_merge/importDataLastT.py
python3 ./test.py -f import_merge/importDataS.py
python3 ./test.py -f import_merge/importDataSub.py
#======================p2-end===============
#======================p3-start===============
python3 ./test.py -f import_merge/importDataTO.py
python3 ./test.py -f import_merge/importDataTPO.py
python3 ./test.py -f import_merge/importDataT.py
......@@ -73,10 +64,6 @@ python3 ./test.py -f import_merge/importSpan.py
python3 ./test.py -f import_merge/importSRestart.py
python3 ./test.py -f import_merge/importSubRestart.py
python3 ./test.py -f import_merge/importTailOverlap.py
#======================p3-end===============
#======================p4-start===============
python3 ./test.py -f import_merge/importTailPartOverlap.py
python3 ./test.py -f import_merge/importTail.py
python3 ./test.py -f import_merge/importToCommit.py
......@@ -88,7 +75,6 @@ python3 ./test.py -f import_merge/importCSV.py
python3 ./test.py -f import_merge/import_update_0.py
python3 ./test.py -f import_merge/import_update_1.py
python3 ./test.py -f import_merge/import_update_2.py
python3 ./test.py -f insert/basic.py
python3 ./test.py -f insert/int.py
python3 ./test.py -f insert/float.py
......@@ -98,8 +84,6 @@ python3 ./test.py -f insert/double.py
python3 ./test.py -f insert/smallint.py
python3 ./test.py -f insert/tinyint.py
python3 ./test.py -f insert/date.py
python3 ./test.py -f insert/binary.py
python3 ./test.py -f insert/nchar.py
#python3 ./test.py -f insert/nchar-boundary.py
......@@ -133,41 +117,21 @@ python3 ./test.py -f insert/verifyMemToDiskCrash.py
#python3 ./test.py -f insert/schemalessInsert.py
#python3 ./test.py -f insert/openTsdbJsonInsert.py
python3 ./test.py -f insert/openTsdbTelnetLinesInsert.py
# update
python3 ./test.py -f update/merge_commit_data.py
python3 ./test.py -f update/allow_update.py
python3 ./test.py -f update/allow_update-0.py
python3 ./test.py -f update/append_commit_data.py
python3 ./test.py -f update/append_commit_last-0.py
python3 ./test.py -f update/append_commit_last.py
python3 ./test.py -f update/merge_commit_data2.py
python3 ./test.py -f update/merge_commit_data2_update0.py
python3 ./test.py -f update/merge_commit_last-0.py
python3 ./test.py -f update/merge_commit_last.py
python3 ./test.py -f update/update_options.py
python3 ./test.py -f update/merge_commit_data-0.py
# wal
python3 ./test.py -f wal/addOldWalTest.py
python3 ./test.py -f wal/sdbComp.py
#======================p4-end===============
#======================p5-start===============
python3 ./test.py -f ../system-test/1-insert/0-sql/basic.py
python3 ./test.py -f ../develop-test/1-insert/0-sql/basic.py
python3 ./test.py -f ../develop-test/1-insert/0-sql/batchInsert.py
#======================p5-end===============
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, db_test.stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
import os
import taos
from util.log import tdLog
from util.cases import tdCases
from util.sql import tdSql
import time
import random
import datetime
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
# def assertCheck(self, filename, queryResult, expectResult):
# self.filename = filename
# self.queryResult = queryResult
# self.expectResult = expectResult
# args0 = (filename, queryResult, expectResult)
# assert queryResult == expectResult, "Queryfile:%s ,result is %s != expect: %s" % args0
def assertfileDataExport(self, filename, expectResult):
self.filename = filename
self.expectResult = expectResult
with open("%s" % filename, 'r+') as f1:
for line in f1.readlines():
queryResultTaosc = line.strip().split(',')[0]
# self.assertCheck(filename, queryResultTaosc, expectResult)
def run(self):
starttime = 1537146000000
tdSql.prepare()
tdSql.execute("drop database if exists db_json;")
print("==============step1 tag format =======")
tdLog.info("create database db_jsonB ")
tdSql.execute("create database db_jsonB")
tdSql.execute("use db_jsonB")
# test Benchmark
tdSql.execute("create table if not exists jsons1(ts timestamp,dataFloat float) tags(jtag json)")
for numTables in range(500):
json = "{\"loc1%d\":\"beijingandshanghaiandchangzhouandshijiazhuanganda%d\",\"loc2%d\":\"beijingandshanghaiandchangzhouandshijiazhuangandb%d\" ,\"loc3%d\":\"beijingandshanghaiandchangzhouandshijiazhuangandc%d\",\
\"loc4%d\":\"beijingandshanghaiandchangzhouandshijiazhuangandd%d\",\"loc5%d\":\"beijingandshanghaiandchangzhouandshijiazhuangande%d\",\"loc6%d\":\"beijingandshanghaiandchangzhouandshijiazhuangandf%d\",\
\"loc7%d\":\"beijingandshanghaiandchangzhouandshijiazhuangandg%d\"}"% (numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables,numTables)
print(json)
createTableSqls = "create table if not exists jsons1_%d using jsons1 tags('%s')" %(numTables,json)
print(createTableSqls)
tdLog.info(createTableSqls)
tdSql.execute(createTableSqls)
for numRecords in range(1,101):
dataFloatSql=numRecords*10+numRecords*0.01
insertDataSqls = "insert into jsons1_%d values(%d+%ds, %d) " %(numTables,starttime,numRecords,dataFloatSql)
tdLog.info(insertDataSqls)
tdSql.execute(insertDataSqls)
tdSql.execute("use db_jsonB")
now_time1 = datetime.datetime.now()
tdSql.query("select * from jsons1 where ts>1537145900000 and ts<1537156000000;")
spendTimes1 = datetime.datetime.now() - now_time1
print(spendTimes1)
now_time2 = datetime.datetime.now()
tdSql.query("select * from jsons1 where ts>1537156000000;")
spendTimes2 = datetime.datetime.now() - now_time2
print(spendTimes2)
tdSql.execute("drop database db_jsonB")
testcaseFilename = os.path.split(__file__)[-1]
os.system("rm -rf ./insert_res.txt")
os.system("rm -rf tools/taosdemoAllTest/%s.sql" % testcaseFilename )
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
......@@ -12,6 +12,7 @@
# -*- coding: utf-8 -*-
import sys
import os
import taos
from util.log import tdLog
from util.cases import tdCases
......@@ -25,6 +26,21 @@ class TDTestCase:
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
# def assertCheck(self, filename, queryResult, expectResult):
# self.filename = filename
# self.queryResult = queryResult
# self.expectResult = expectResult
# args0 = (filename, queryResult, expectResult)
# assert queryResult == expectResult, "Queryfile:%s ,result is %s != expect: %s" % args0
def assertfileDataExport(self, filename, expectResult):
self.filename = filename
self.expectResult = expectResult
with open("%s" % filename, 'r+') as f1:
for line in f1.readlines():
queryResultTaosc = line.strip().split(',')[0]
# self.assertCheck(filename, queryResultTaosc, expectResult)
def run(self):
tdSql.prepare()
tdSql.execute("drop database if exists db_json;")
......@@ -62,8 +78,18 @@ class TDTestCase:
tdSql.error("CREATE TABLE if not exists jsons4_1 using jsons4 tags('{\"%s1\":5}')" % char2) # len(key)=257
tdSql.execute("CREATE TABLE if not exists jsons4_2 using jsons4 tags('{\"T\":\"%s\"}')" % char3) # len(object)=4096
tdSql.error("CREATE TABLE if not exists jsons4_2 using jsons4 tags('{\"TS\":\"%s\"}')" % char3) # len(object)=4097
# test the min/max length of double type , and int64 is not required
tdSql.error("CREATE TABLE if not exists jsons4_3 using jsons4 tags('{\"doublength\":-1.8e308}')")
tdSql.error("CREATE TABLE if not exists jsons4_3 using jsons4 tags('{\"doublength\":1.8e308}')")
tdSql.execute("CREATE TABLE if not exists jsons4_4 using jsons4 tags('{\"doublength\":-1.7e308}')")
tdSql.execute("CREATE TABLE if not exists jsons4_5 using jsons4 tags('{\"doublength\":1.71e308}')")
tdSql.query("select jtag from jsons4 where jtag->'doublength'<-1.69e+308;")
tdSql.checkRows(1)
tdSql.query("select jtag from jsons4 where jtag->'doublength'>1.7e+308;")
tdSql.checkRows(1)
tdSql.execute("insert into jsons1_1 values(now, 1, 'json1')")
tdSql.execute("insert into jsons1_1 values(now+2s, 1, 'json1')")
tdSql.execute("insert into jsons1_1 values(now+1s, 1, 'json1')")
tdSql.execute("insert into jsons1_2 using jsons1 tags('{\"num\":5,\"location\":\"beijing\"}') values (now, 1, 'json2')")
tdSql.execute("insert into jsons1_3 using jsons1 tags('{\"num\":34,\"location\":\"beijing\",\"level\":\"l1\"}') values (now, 1, 'json3')")
......@@ -194,15 +220,12 @@ class TDTestCase:
tdSql.checkRows(0)
# # test where condition in
# tdSql.query("select * from jsons1 where jtag->'location' in ('beijing')")
tdSql.error("select * from jsons1 where jtag->'location' in ('beijing')")
# tdSql.checkRows(3)
# tdSql.query("select * from jsons1 where jtag->'num' in (5,34)")
tdSql.error("select * from jsons1 where jtag->'num' in (5,34)")
# tdSql.checkRows(2)
# tdSql.error("select * from jsons1 where jtag->'num' in ('5',34)")
# tdSql.query("select * from jsons1 where jtag->'location' in ('beijing') and jtag->'class'=55")
tdSql.error("select * from jsons1 where jtag->'num' in ('5',34)")
tdSql.error("select * from jsons1 where jtag->'location' in ('beijing') and jtag->'class'=55")
# tdSql.checkRows(1)
# test where condition match
......@@ -412,8 +435,25 @@ class TDTestCase:
tdSql.query(" select stddev(dataint) from jsons1 where jtag->'location'='beijing';")
tdSql.checkRows(1)
tdSql.error(" select LEASTSQUARES(dataint,1,2) from jsons1_1 where jtag->'location' ='beijing' ;")
tdSql.query("select count(jtag) from jsons1 ;")
tdSql.checkData(0, 0, 15)
tdSql.error("select count( jtag->'location'='beijing') from jsons1 ;")
tdSql.error("select count( jtag contains 'age') from jsons1 ;")
functionName = ['avg','twa','irate','stddev', 'stddev', 'leastsquares']
print(functionName)
for fn in functionName:
tdSql.error("select %s( jtag) from jsons1 ;"%fn)
tdSql.error("select %s( jtag->'location'='beijing') from jsons1 ;"%fn)
tdSql.error("select %s( jtag contains 'age') from jsons1 ;"%fn)
# tdSql.error("select avg( jtag) from jsons1 ;")
# tdSql.error("select avg( jtag->'location'='beijing') from jsons1 ;")
# tdSql.error("select avg( jtag contains 'age') from jsons1 ;")
# Select_exprs is SQL function -Selection function
......@@ -467,6 +507,13 @@ class TDTestCase:
tdSql.checkRows(4)
tdSql.checkData(0,1,2)
tdSql.checkData(2,1,4)
#error
functionName = ['min','max','last','TOP','last_row','bottom','apercentile','interp']
print(functionName)
for fn in functionName:
tdSql.error("select %s( jtag) from jsons1 ;"%fn)
tdSql.error("select %s( jtag->'location'='beijing') from jsons1 ;"%fn)
tdSql.error("select %s( jtag contains 'age') from jsons1 ;"%fn)
# Select_exprs is SQL function -Calculation function
tdSql.error(" select diff(dataint) from jsons1 where jtag->'location'= 'beijing' or jtag->'location'= 'tianjing'or jtag contains 'num' or jtag->'age'=35 ;")
......@@ -500,13 +547,21 @@ class TDTestCase:
tdSql.query("select ts,round(dataint),round(datafloat),round(datadouble) from jsons7 where jtag contains 'tea';")
tdSql.query("select round(dataint),round(datafloat),round(datadouble) from jsons7 where jtag contains 'tea';")
functionName = ['diff','Derivative','SPREAD','ceil','round','floor']
print(functionName)
for fn in functionName:
tdSql.error("select %s( jtag) from jsons1 ;"%fn)
tdSql.error("select %s( jtag->'location'='beijing') from jsons1 ;"%fn)
tdSql.error("select %s( jtag contains 'age') from jsons1 ;"%fn)
#modify one same key and diffirent data type,include negative number of double
tdSql.execute("insert into jsons7_4 using jsons7 tags('{\"nv\":null,\"tea\":123,\"tag\":123,\"tea\":false}') values (now+1s,5,'true',4.01,2.2,'abc'); ")
tdSql.execute("insert into jsons7_5 using jsons7 tags('{\"nv\":null,\"tea\":\"app\",\"tag\":123,\"tea\":false}') values (now+2s,5,'true',4.01,2.2,'abc'); ")
tdSql.error("insert into jsons7_6 using jsons7 tags('{\"nv\":null,\"tea\":-1.111111111111111111111111111111111111111111111111111111111111111111111,\"tag\":123,\"tea\":false}') values (now+3s,5,'true',4.01,2.2,'123'); ")
tdSql.execute("insert into jsons7_6 using jsons7 tags('{\"nv\":null,\"tea\":-1.111111111,\"tag\":123,\"tea\":false}') values (now,5,'false',4.01,2.2,'t123'); ")
tdSql.query("select jtag from jsons7 where jtag->'tea'>-1.01;")
# tdSql.checkRows(2)
tdSql.query("select jtag from jsons7 where jtag->'tea'<-1.01;")
tdSql.checkRows(1)
# test join
tdSql.execute("create table if not exists jsons6(ts timestamp, dataInt int, dataBool bool, dataStr nchar(50)) tags(jtag json)")
......@@ -572,6 +627,36 @@ class TDTestCase:
tdSql.query(" select stddev(dataint) from jsons8 group by datatime;")
tdSql.error(" select stddev(datatime) from jsons8 group by datadouble;")
# # verify the tag length of the super table and the child table
# TD-12389
# tdSql.query("describe jsons1;")
# jtagLengthSup=tdSql.queryResult[3][2]
# tdSql.query("describe jsons1_1;")
# tdSql.checkData(3, 2, jtagLengthSup)
# #test import and export
# tdSql.execute("select * from jsons1 >> jsons1_data.csv;")
# tdSql.query("select * from jsons1 ")
# with open("./jsons1_data.csv", 'r+') as f1:
# # count=len(open("./jsons1_data.csv",'rU').readlines())
# # print(count)
# rows=0
# for line in f1.readlines():
# # for columns in range(4): # it will be replaced with column length later,but now it is setted to a fixed value first
# queryResultInt = line.strip().split(',')[1]
# # queryResultTag = line.strip().split(',')[3]
# # for rows in range(9):
# # print(rows,1,queryResultInt,queryResultTag)
# tdSql.checkData(rows, 1, "%s" %queryResultInt)
# # tdSql.checkData(rows, 3, "%s" %queryResultTag)
# rows +=1
# # test taos -f
# os.system("taos -f stable/json_tag_extra.py.sql ")
# tdSql.execute("use db_json")
# tdSql.query("select * from jsons1")
# tdSql.checkRows(9)
# # test drop tables and databases
# tdSql.execute("drop table jsons1_1")
......@@ -579,7 +664,10 @@ class TDTestCase:
# tdSql.execute("drop stable jsons3")
# tdSql.execute("drop stable jsons2")
# tdSql.execute("drop database db_json")
testcaseFilename = os.path.split(__file__)[-1]
os.system("rm -rf ./insert_res.txt")
os.system("rm -rf tools/taosdemoAllTest/%s.sql" % testcaseFilename )
def stop(self):
......
......@@ -42,7 +42,7 @@
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "taosc",
"insert_rows": 150,
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
......@@ -58,6 +58,138 @@
"tags_file": "",
"columns": [{"type": "INT"}, {"type": "DOUBLE", "count":1}, {"type": "BINARY", "len": 16, "count":2}, {"type": "nchar", "len": 32, "count":2}],
"tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY","count":1}, {"type": "nchar", "count":2}]
},
{
"name": "stb1",
"child_table_exists":"no",
"childtable_count": 20,
"childtable_prefix": "stb00_",
"auto_create_table": "no",
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "rest",
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
"interlace_rows": 0,
"insert_interval":0,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}, {"type": "DOUBLE", "count":1}, {"type": "BINARY", "len": 16, "count":2}, {"type": "nchar", "len": 32, "count":2}],
"tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY","count":1}, {"type": "nchar", "count":2}]
},
{
"name": "stb2",
"child_table_exists":"no",
"childtable_count": 30,
"childtable_prefix": "stb00_",
"auto_create_table": "no",
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "stmt",
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
"interlace_rows": 0,
"insert_interval":0,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}, {"type": "DOUBLE", "count":1}, {"type": "BINARY", "len": 16, "count":2}, {"type": "nchar", "len": 32, "count":2}],
"tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY","count":1}, {"type": "nchar", "count":2}]
},
{
"name": "stb3",
"child_table_exists":"no",
"childtable_count": 40,
"childtable_prefix": "stb00_",
"auto_create_table": "no",
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "sml",
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
"interlace_rows": 0,
"insert_interval":0,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}, {"type": "DOUBLE", "count":1}, {"type": "BINARY", "len": 16, "count":2}, {"type": "nchar", "len": 32, "count":2}],
"tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY","count":1}, {"type": "nchar", "count":2}]
},
{
"name": "stb4",
"child_table_exists":"no",
"childtable_count": 50,
"childtable_prefix": "stb00_",
"auto_create_table": "no",
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "telnet",
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
"interlace_rows": 0,
"insert_interval":0,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}],
"tags": [{"type": "TINYINT", "count":2}]
},
{
"name": "stb5",
"child_table_exists":"no",
"childtable_count": 60,
"childtable_prefix": "stb00_",
"auto_create_table": "no",
"batch_create_tbl_num": 20,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json",
"insert_rows": 100,
"childtable_limit": -1,
"childtable_offset":0,
"multi_thread_write_one_tbl": "no",
"interlace_rows": 0,
"insert_interval":0,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}],
"tags": [{"type": "TINYINT"}]
}]
}]
}
......@@ -85,16 +85,16 @@
"tags": [{"type": "TINYINT", "count":1}]
},
{
"name": "stb3",
"name": "stb2",
"child_table_exists":"no",
"childtable_count": 5,
"childtable_count": 3,
"childtable_prefix": "stb03_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -111,16 +111,16 @@
"tags": [{"type": "BIGINT", "count":1}]
},
{
"name": "stb4",
"name": "stb3",
"child_table_exists":"no",
"childtable_count": 20,
"childtable_count": 4,
"childtable_prefix": "stb04_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -137,16 +137,16 @@
"tags": [{"type": "SMALLINT", "count":1}]
},
{
"name": "stb5",
"name": "stb4",
"child_table_exists":"no",
"childtable_count": 40,
"childtable_count": 5,
"childtable_prefix": "stb05_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -163,16 +163,16 @@
"tags": [{"type": "FLOAT", "count":1}]
},
{
"name": "stb6",
"name": "stb5",
"child_table_exists":"no",
"childtable_count": 15,
"childtable_count": 6,
"childtable_prefix": "stb06_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -188,17 +188,43 @@
"columns": [{"type": "DOUBLE"}],
"tags": [{"type": "DOUBLE", "count":1}]
},
{
"name": "stb6",
"child_table_exists":"no",
"childtable_count": 7,
"childtable_prefix": "stb01_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "telnet" ,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
"insert_interval":0,
"max_sql_len": 1025000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 1,
"start_timestamp": "2012-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "UINT"}],
"tags": [{"type": "UINT", "count":1}]
},
{
"name": "stb7",
"child_table_exists":"no",
"childtable_count": 10,
"childtable_count": 8,
"childtable_prefix": "stb07_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -217,14 +243,14 @@
{
"name": "stb8",
"child_table_exists":"no",
"childtable_count": 20,
"childtable_count": 9,
"childtable_prefix": "stb08_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -243,14 +269,14 @@
{
"name": "stb9",
"child_table_exists":"no",
"childtable_count": 3,
"childtable_count": 10,
"childtable_prefix": "stb09_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "json" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -269,14 +295,14 @@
{
"name": "stb10",
"child_table_exists":"no",
"childtable_count": 3,
"childtable_count": 11,
"childtable_prefix": "stb10_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "telnet" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -295,14 +321,14 @@
{
"name": "stb11",
"child_table_exists":"no",
"childtable_count": 3,
"childtable_count": 12,
"childtable_prefix": "stb11_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "telnet" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......@@ -321,14 +347,14 @@
{
"name": "stb12",
"child_table_exists":"no",
"childtable_count": 3,
"childtable_count": 13,
"childtable_prefix": "stb12_",
"auto_create_table": "no",
"batch_create_tbl_num": 100,
"data_source": "rand",
"insert_mode": "sml",
"line_protocol": "telnet" ,
"insert_rows":50,
"insert_rows":10,
"childtable_limit": -1,
"childtable_offset":0,
"interlace_rows": 32767,
......
......@@ -97,15 +97,15 @@ class TDTestCase:
tdSql.checkData(0, 0, 4000)
# # insert-interface: sml-json
# os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insert-sml-json-alltype.json -y " % binPath)
# tdSql.execute("use db")
# tdSql.query("show stables")
# for i in range(13):
# for j in range(13):
# if tdSql.queryResult[i][0] == 'stb%d'%j:
# # print(i,"stb%d"%j)
# tdSql.checkData(i, 4, j+1)
# insert-interface: sml-json
os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insert-sml-json-alltype.json -y " % binPath)
tdSql.execute("use db")
tdSql.query("show stables")
for i in range(13):
for j in range(13):
if tdSql.queryResult[i][0] == 'stb%d'%j:
# print(i,"stb%d"%j)
tdSql.checkData(i, 4, j+1)
# insert-interface: sml-telnet
......
......@@ -95,18 +95,18 @@ class TDTestCase:
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
# tdLog.info("use diffrent interface rest")
# tdSql.execute("drop database db1;")
# # use diffrent interface -rest
# os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 -b float,int,NCHAR\(15\) -w 4097 \
# -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I rest " % (binPath,cfgPath))
# tdSql.execute("use db1")
# tdSql.query("select count(*) from meters")
# tdSql.checkData(0, 0, 1000)
# tdSql.query("select count(tbname) from meters")
# tdSql.checkData(0, 0, 10)
# tdSql.query("select count(*) from `test.0`")
# tdSql.checkData(0, 0, 100)
tdLog.info("use diffrent interface rest")
tdSql.execute("drop database db1;")
# use diffrent interface -rest
os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 -b float,int,NCHAR\(15\) -w 4097 \
-T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I rest " % (binPath,cfgPath))
tdSql.execute("use db1")
tdSql.query("select count(*) from meters")
tdSql.checkData(0, 0, 1000)
tdSql.query("select count(tbname) from meters")
tdSql.checkData(0, 0, 10)
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
tdLog.info("use diffrent interface sml")
tdSql.execute("drop database db1;")
......@@ -121,7 +121,7 @@ class TDTestCase:
tdLog.info("all data type")
tdSql.execute("drop database db1;")
# all data type
# all data type-taosc
os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 \
-b INT,TIMESTAMP,BIGINT,FLOAT,DOUBLE,SMALLINT,TINYINT,BOOL,UINT,UBIGINT,UTINYINT,USMALLINT,BINARY\(15\),NCHAR\(15\) -w 4096 \
-T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. " % (binPath,cfgPath))
......@@ -132,6 +132,43 @@ class TDTestCase:
tdSql.checkData(0, 0, 10)
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
tdLog.info("all data type")
tdSql.execute("drop database db1;")
# all data type-stmt
os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 \
-b INT,TIMESTAMP,BIGINT,FLOAT,DOUBLE,SMALLINT,TINYINT,BOOL,UINT,UBIGINT,UTINYINT,USMALLINT,BINARY\(15\),NCHAR\(15\) -w 4096 \
-T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I stmt " % (binPath,cfgPath))
tdSql.execute("use db1")
tdSql.query("select count(*) from meters")
tdSql.checkData(0, 0, 1000)
tdSql.query("select count(tbname) from meters")
tdSql.checkData(0, 0, 10)
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
# all data type-rest
os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 \
-b INT,TIMESTAMP,BIGINT,FLOAT,DOUBLE,SMALLINT,TINYINT,BOOL,UINT,UBIGINT,UTINYINT,USMALLINT,BINARY\(15\),NCHAR\(15\) -w 4096 \
-T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I rest " % (binPath,cfgPath))
tdSql.execute("use db1")
tdSql.query("select count(*) from meters")
tdSql.checkData(0, 0, 1000)
tdSql.query("select count(tbname) from meters")
tdSql.checkData(0, 0, 10)
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
# # all data type-rest
# os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 \
# -b INT,BIGINT,FLOAT,DOUBLE,SMALLINT,TINYINT,BOOL,UINT,UBIGINT,UTINYINT,USMALLINT,BINARY\(15\),NCHAR\(15\) -w 4096 \
# -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I sml " % (binPath,cfgPath))
# tdSql.execute("use db1")
# tdSql.query("select count(*) from meters")
# tdSql.checkData(0, 0, 1000)
# tdSql.query("select count(tbname) from meters")
# tdSql.checkData(0, 0, 10)
# # tdSql.query("select count(*) from `test.0`")
# # tdSql.checkData(0, 0, 100)
tdLog.info("all data type and interlace rows")
tdSql.execute("drop database db1;")
......@@ -164,11 +201,18 @@ class TDTestCase:
tdSql.query("select count(*) from `test.0`")
tdSql.checkData(0, 0, 100)
# tdLog.info("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 -b float,int,NCHAR\(4096\) \
# -w 40 -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I stmt" % (binPath,cfgPath))
# # taosdemo error-exceeds max length
# assert os.system("%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 -b float,int,NCHAR\(4096\) \
# -w 40 -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I taosc" % (binPath,cfgPath)) != 0
# taosdemo error
# too max length
sql = "%staosBenchmark -u root -c %s -h localhost -P 6030 -d db1 -a 1 -l 10 -b float,int,NCHAR\(4096\) \
-w 40 -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I taosc" % (binPath,cfgPath)
tdLog.info("%s" % sql )
assert os.system("%s" % sql ) != 0
# error password
sql = "%staosBenchmark -u root -c %s -h localhost -P 6030 -p123 -d db1 -a 1 -l 10 -b float,int,NCHAR\(40\) \
-w 40 -T 8 -i 10 -S 1000 -r 1000000 -t 10 -n 100 -M -x -y -O 10 -R 100 -E -m test. -I stmt" % (binPath,cfgPath)
tdLog.info("%s" % sql )
assert os.system("%s" % sql ) != 0
testcaseFilename = os.path.split(__file__)[-1]
os.system("rm -rf ./insert_res*.txt*")
......
......@@ -74,22 +74,23 @@ class TDTestCase:
tdSql.query("select count(*) from stb1")
tdSql.checkData(0, 0, 3000)
# # insert: using parament "insert_interval to controls spped of insert.
# # but We need to have accurate methods to control the speed, such as getting the speed value, checking the count and so on。
# os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insert-interval-speed-sml.json -y" % binPath)
# tdSql.execute("use db")
# tdSql.query("select tbname from db.stb0")
# tdSql.checkRows(100 )
# # tdSql.query("select count(*) from stb00_0")
# # tdSql.checkData(0, 0, 20)
# tdSql.query("select count(*) from stb0")
# tdSql.checkData(0, 0, 2000)
# tdSql.query("show stables")
# tdSql.checkData(1, 4, 20)
# # tdSql.query("select count(*) from stb01_0")
# # tdSql.checkData(0, 0, 35)
# tdSql.query("select count(*) from stb1")
# tdSql.checkData(0, 0, 700)
# insert: using parament "insert_interval to controls spped of insert.
# but We need to have accurate methods to control the speed, such as getting the speed value, checking the count and so on。
os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insert-interval-speed-sml.json -y" % binPath)
tdSql.execute("use db")
# tdSql.query("select count (tbname) from stb0")
tdSql.query("select tbname from db.stb0")
tdSql.checkRows(100 )
# tdSql.query("select count(*) from stb00_0")
# tdSql.checkData(0, 0, 20)
tdSql.query("select count(*) from stb0")
tdSql.checkData(0, 0, 2000)
tdSql.query("show stables")
tdSql.checkData(1, 4, 20)
# tdSql.query("select count(*) from stb01_0")
# tdSql.checkData(0, 0, 35)
tdSql.query("select count(*) from stb1")
tdSql.checkData(0, 0, 700)
# spend 2min30s for 3 testcases.
# insert: drop and child_table_exists combination test
......@@ -134,10 +135,13 @@ class TDTestCase:
# os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insertColumnsAndTagNum4096-sml.json -y " % binPath)
# tdSql.query("select count(*) from db.stb0")
# tdSql.checkData(0, 0, 10000)
# there is no limit of 4096 columns,so cancels this case
# tdSql.execute("drop database if exists db")
# os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insertInterlaceRowsLarge1M-sml.json -y " % binPath)
# tdSql.query("select count(*) from db.stb0")
# tdSql.checkRows(0)
tdSql.execute("drop database if exists db")
os.system("%staosBenchmark -f tools/taosdemoAllTest/sml/insertColumnsNum0-sml.json -y " % binPath)
tdSql.execute("use db")
......
###################################################################
# Copyright (c) 2020 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import taos
import time
from util.log import *
from util.cases import *
from util.sql import *
from util.dnodes import *
class TDTestCase:
def __init__(self):
self.err_case = 0
self.curret_case = 0
def caseDescription(self):
'''
case1 <cpwu>: [TD-11970] : there is no err return when create table using now+Ntimes.
'''
return
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def check_td11970(self):
# this case expect all create table sql with now+Ntime is success.
tdSql.prepare()
tdSql.execute(f"create stable stb1(ts timestamp, c1 int) tags (tag1 int, tag2 timestamp)")
try:
tdSql.execute(f"create table t1 using stb1 tags(1, now-100b)")
tdSql.execute(f"create table t2 using stb1 tags(2, now-100u)")
tdSql.execute(f"create table t3 using stb1 tags(3, now-100a)")
tdSql.execute(f"create table t4 using stb1 tags(4, now-100s)")
tdSql.execute(f"create table t5 using stb1 tags(5, now-100m)")
tdSql.execute(f"create table t6 using stb1 tags(6, now-100h)")
tdSql.execute(f"create table t7 using stb1 tags(7, now-100d)")
tdSql.execute(f"create table t8 using stb1 tags(8, now-100w)")
tdSql.execute(f"create table t9 using stb1 tags(9, now+10b)")
tdSql.execute(f"create table t10 using stb1 tags(10, now+10u)")
tdSql.execute(f"create table t11 using stb1 tags(11, now+10a)")
tdSql.execute(f"create table t12 using stb1 tags(12, now+10s)")
tdSql.execute(f"create table t13 using stb1 tags(13, now+10m)")
tdSql.execute(f"create table t14 using stb1 tags(14, now+10h)")
tdSql.execute(f"create table t15 using stb1 tags(15, now+10d)")
tdSql.execute(f"create table t16 using stb1 tags(16, now+10w)")
self.curret_case += 1
tdLog.printNoPrefix("the case for td-11970 run passed")
except:
self.err_case += 1
tdLog.printNoPrefix("the case for td-11970 run failed")
pass
def run(self):
self.check_td11970()
if self.err_case > 0:
tdLog.exit(f"{self.err_case} case run failed")
else:
tdLog.success("all case run passed")
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
###################################################################
# Copyright (c) 2020 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import taos
import time
from util.log import *
from util.cases import *
from util.sql import *
from util.dnodes import *
class TDTestCase:
def __init__(self):
self.err_case = 0
self.curret_case = 0
def caseDescription(self):
'''
case1 <cpwu>: [TD-11256] query the super table in a mixed way of expression + tbanme and using group by tbname
'''
return
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def create_stb(self):
basetime = int(round(time.time() * 1000))
tdSql.prepare()
tdSql.execute(f"create stable stb1(ts timestamp, c1 int) tags (tag1 int)")
for i in range(10):
tdSql.execute(f"create table t{i} using stb1 tags({i})")
tdSql.execute(f"insert into t{i} values ({basetime}, {i})")
pass
def check_td11256(self):
# this case expect connect is current after run group by sql
tdSql.query("select count(*) from stb1 group by ts")
try:
tdSql.error("select c1/2, tbname from stb1 group by tbname")
tdSql.query("show databases")
self.curret_case += 1
tdLog.printNoPrefix("the case1: td-11256 run passed")
except:
self.err_case += 1
tdLog.printNoPrefix("the case1: td-11256 run failed")
pass
def run(self):
self.create_stb()
self.check_td11256()
if self.err_case > 0:
tdLog.exit(f"{self.err_case} case for TD-11256 run failed")
else:
tdLog.success("case for TD-11256 run passed")
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
......@@ -19,6 +19,7 @@ from util.log import *
from util.cases import *
from util.sql import *
from util.dnodes import *
import subprocess
class TDTestCase:
def init(self, conn, logSql):
......@@ -48,10 +49,9 @@ class TDTestCase:
def caseDescription(self):
'''
case1 <wenzhouwww>: [TD-11389] :
this test case is an test case for cache error , it will let the cached data obtained by the client that has connected to taosd incorrect,
root cause : table schema is changed, tag hostname size is increased through schema-less insertion. The schema cache of client taos is not refreshed.
case1 <wenzhouwww>: [TD-12344] :
this test case is an test case for unexpectd crash for session function , it will coredump taoshell ;
'''
return
......@@ -81,9 +81,6 @@ class TDTestCase:
cfgPath = projPath + "/sim/dnode1/cfg "
return cfgPath
def run(self):
tdSql.prepare()
......@@ -91,17 +88,17 @@ class TDTestCase:
tdSql.execute("use testdb;")
tdSql.execute("create stable st (ts timestamp , id int , value double) tags(hostname binary(10) ,ind int);")
for i in range(self.num):
tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+100*i,i*2,i+10.00))
tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+200*i,i*2,i+10.00))
tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+300*i,i*2,i+10.00))
tdSql.execute("insert into sub_%s using st tags('host_%s' , %d) values (%d , %d , %f );"%(str(i),str(i),i*10,self.ts+10000*i,i*2,i+10.00))
tdSql.query('select elapsed(ts,10s) from sub_1 where ts>="2015-01-01 00:00:00.000" and ts < "2015-01-01 00:10:00.000" session(ts,1d) ;')
cfg_path = self.getcfgPath()
print(cfg_path)
# tdSql.execute('select elapsed(ts,10s) from st where ts>="2015-01-01 00:00:00.000" and ts < "2015-01-01 00:10:00.000" session(ts,1d) group by tbname;') # session not support super table
os.system("taos -c %s -s 'select elapsed(ts,10s) from testdb.st where ts>=\"2015-01-01 00:00:00.000\" and ts < \"2015-01-01 00:10:00.000\" session(ts,1d) group by tbname;' " % (cfg_path))
tdSql.execute('select elapsed(ts,10s) from testdb.st where ts>=\"2015-01-01 00:00:00.000\" and ts < \"2015-01-01 00:10:00.000\" session(ts,1d) group by tbname;') # session not support super table
taos_cmd1= "taos -c %s -s 'select elapsed(ts,10s) from testdb.st where ts>=\"2015-01-01 00:00:00.000\" and ts < \"2015-01-01 00:10:00.000\" session(ts,1d) group by tbname;' " % (cfg_path)
_ = subprocess.check_output(taos_cmd1, shell=True).decode("utf-8")
def stop(self):
tdSql.close()
......
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
cd ${WKC}/src/connector/C#
dotnet test
dotnet run --project src/test/Cases/Cases.csproj
cd ${WKC}/tests/examples/C#
dotnet run --project C#checker/C#checker.csproj
dotnet run --project TDengineTest/TDengineTest.csproj
dotnet run --project schemaless/schemaless.csproj
cd ${WKC}/tests/examples/C#/taosdemo
dotnet build -c Release
tree | true
./bin/Release/net5.0/taosdemo -c /etc/taos -y
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
cd ${WKC}/src/connector/nodejs
npm install
npm run test
cd ${WKC}/tests/examples/nodejs
npm install td2.0-connector > /dev/null 2>&1
node nodejsChecker.js host=localhost
node test1970.js
cd ${WKC}/tests/connectorTest/nodejsTest/nanosupport
npm install td2.0-connector > /dev/null 2>&1
node nanosecondTest.js
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../src/connector/python
pip3 install pytest
pytest tests/
python3 examples/bind-multi.py
python3 examples/bind-row.py
python3 examples/demo.py
python3 examples/insert-lines.py
python3 examples/pep-249.py
python3 examples/query-async.py
python3 examples/query-objectively.py
python3 examples/subscribe-sync.py
python3 examples/subscribe-async.py
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
#!/bin/bash
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
stopTaosd
rm -rf /var/lib/taos/*
rm -rf /var/log/taos/*
nohup taosd -c /etc/taos/ > /dev/null 2>&1 &
sleep 10
cd ../../
WKC=`pwd`
\ No newline at end of file
###################################################################
# Copyright (c) 2020 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from taosdata
#
###################################################################
# -*- coding: utf-8 -*-
import taos
import time
import requests
from util.log import *
from util.cases import *
from util.sql import *
from util.dnodes import *
class TDTestCase:
def __init__(self):
self.err_case = 0
self.curret_case = 0
self.url = "http://127.0.0.1:6041/rest/sql"
self.header = {'Authorization': 'Basic cm9vdDp0YW9zZGF0YQ=='}
def caseDescription(self):
'''
case1 <cpwu>: [TD-12163] alter table-schema using restful interface
'''
return
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def check_td12163(self):
# basetime = int(round(time.time() * 1000))
tdSql.prepare()
tdSql.execute(f"create stable stb1(ts timestamp, c1 int) tags (tag1 int)")
tdSql.execute(f"create table nt1 (nts timestamp, nc1 int)")
add_column_stb = "alter table db.stb1 add column c2 float"
drop_column_stb = "alter table db.stb1 drop column c2 "
add_column_ntb = "alter table db.nt1 add column nc2 float"
drop_column_ntb = "alter table db.nt1 drop column nc2 "
conn_add_stb = requests.post(url=self.url, headers=self.header, data=add_column_stb)
resp_code_stb_add = conn_add_stb.status_code
resp_add_stb = conn_add_stb.json()
try:
assert resp_code_stb_add//200 == 1
assert resp_add_stb["status"] == "succ"
self.curret_case += 1
tdLog.printNoPrefix("the case add column to stable successful")
except:
self.err_case += 1
tdLog.printNoPrefix("the case add column to stable failed")
conn_add_ntb = requests.post(url=self.url, headers=self.header, data=add_column_ntb)
resp_code_ntb_add = conn_add_ntb.status_code
resp_add_ntb = conn_add_ntb.json()
try:
assert resp_code_ntb_add//200 == 1
assert resp_add_ntb["status"] == "succ"
self.curret_case += 1
tdLog.printNoPrefix("the case add column to normal table successful")
except:
self.err_case += 1
tdLog.printNoPrefix("the case add column to normal table failed")
conn_drop_stb = requests.post(url=self.url, headers=self.header, data=drop_column_stb)
resp_code_stb_drop = conn_drop_stb.status_code
resp_drop_stb = conn_drop_stb.json()
try:
assert resp_code_stb_drop // 200 == 1
assert resp_drop_stb["status"] == "succ"
self.curret_case += 1
tdLog.printNoPrefix("the case drop column to stable successful")
except:
self.err_case += 1
tdLog.printNoPrefix("the case add column to stable failed")
conn_drop_ntb = requests.post(url=self.url, headers=self.header, data=drop_column_ntb)
resp_code_ntb_drop = conn_drop_ntb.status_code
resp_drop_ntb = conn_drop_ntb.json()
try:
assert resp_code_ntb_drop // 200 == 1
assert resp_drop_ntb["status"] == "succ"
self.curret_case += 1
tdLog.printNoPrefix("the case drop column to stable successful")
except:
self.err_case += 1
tdLog.printNoPrefix("the case add column to stable failed")
pass
def run(self):
self.check_td12163()
if self.err_case > 0:
tdLog.exit(f"{self.err_case} case for TD-12163 run failed")
else:
tdLog.success("case for TD-12163 run passed")
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
python3 ./test.py -f 1-insert/batchInsert.py
\ No newline at end of file
python3 ./test.py -f 1-insert/batchInsert.py
bash 3-connectors/c#/test.sh
bash 3-connectors/go/test.sh
bash 3-connectors/java/test.sh
bash 3-connectors/nodejs/test.sh
bash 3-connectors/python/test.sh
bash 3-connectors/restful/test.sh
bash 3-connectors/rust/test.sh
#python3 ./test.py -f 1-insert/batchInsert.py
python3 ./test.py -f 1-insert/batchInsert.py
python3 test.py -f 1-insert/TD-11970.py
python3 ./test.py -f 0-others/create_col_tag.py
python3 ./test.py -f 0-others/create_col_tag.py
\ No newline at end of file
python3 ./test.py -f 2-query/TD-11256.py
python3 ./test.py -f 2-query/TD-11389.py
python3 ./test.py -f 2-query/TD-11945_crash.py
python3 ./test.py -f 2-query/TD-12340-12342.py
......
python3 test.py -f 4-taosAdapter/TD-12163.py
python3 ./test.py -f 4-taosAdapter/taosAdapter_insert.py
python3 ./test.py -f 4-taosAdapter/taosAdapter_query.py
python3 ./test.py -f 4-taosAdapter/taosAdapter_query.py
\ No newline at end of file
#!/bin/bash
# Color setting
RED='\033[0;31m'
GREEN='\033[1;32m'
GREEN_DARK='\033[0;32m'
GREEN_UNDERLINE='\033[4;32m'
NC='\033[0m'
tests_dir=`pwd`
IN_TDINTERNAL="community"
function stopTaosd {
echo "Stop taosd"
sudo systemctl stop taosd || echo 'no sudo or systemctl or stop fail'
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
while [ -n "$PID" ]
do
pkill -TERM -x taosd
sleep 1
PID=`ps -ef|grep -w taosd | grep -v grep | awk '{print $2}'`
done
}
function dohavecore(){
corefile=`find $corepath -mmin 1`
if [ -n "$corefile" ];then
core_file=`echo $corefile|cut -d " " -f2`
proc=`file $core_file|awk -F "execfn:" '/execfn:/{print $2}'|tr -d \' |awk '{print $1}'|tr -d \,`
echo 'taosd or taos has generated core'
rm case.log
if [[ "$tests_dir" == *"$IN_TDINTERNAL"* ]] && [[ $1 == 1 ]]; then
cd ../../../
tar -zcPf $corepath'taos_'`date "+%Y_%m_%d_%H_%M_%S"`.tar.gz debug/build/bin/taosd debug/build/bin/tsim debug/build/lib/libtaos*so*
if [[ $2 == 1 ]];then
cp -r sim ~/sim_`date "+%Y_%m_%d_%H:%M:%S"`
else
cd community
cp -r sim ~/sim_`date "+%Y_%m_%d_%H:%M:%S" `
fi
else
cd ../../
if [[ $1 == 1 ]];then
tar -zcPf $corepath'taos_'`date "+%Y_%m_%d_%H_%M_%S"`.tar.gz debug/build/bin/taosd debug/build/bin/tsim debug/build/lib/libtaos*so*
cp -r sim ~/sim_`date "+%Y_%m_%d_%H:%M:%S" `
fi
fi
if [[ $1 == 1 ]];then
echo '\n'|gdb $proc $core_file -ex "bt 10" -ex quit
exit 8
fi
fi
}
function runPyCaseOneByOne {
while read -r line; do
if [[ $line =~ ^python.* ]]; then
if [[ $line != *sleep* ]]; then
if [[ $line =~ '-r' ]];then
case=`echo $line|awk '{print $4}'`
else
case=`echo $line|awk '{print $NF}'`
fi
start_time=`date +%s`
date +%F\ %T | tee -a pytest-out.log
echo -n $case
$line > /dev/null 2>&1 && \
echo -e "${GREEN} success${NC}" | tee -a pytest-out.log || \
echo -e "${RED} failed${NC}" | tee -a pytest-out.log
end_time=`date +%s`
out_log=`tail -1 pytest-out.log `
# if [[ $out_log =~ 'failed' ]];then
# exit 8
# fi
echo execution time of $case was `expr $end_time - $start_time`s. | tee -a pytest-out.log
else
$line > /dev/null 2>&1
fi
fi
done < $1
}
function runPyCaseOneByOnefq() {
end=`sed -n '$=' $1`
for ((i=1;i<=$end;i++)) ; do
if [[ $(($i%$2)) -eq $4 ]];then
line=`sed -n "$i"p $1`
if [[ $line =~ ^python.* ]]; then
if [[ $line != *sleep* ]]; then
if [[ $line =~ '-r' ]];then
case=`echo $line|awk '{print $4}'`
else
case=`echo $line|awk '{print $NF}'`
fi
start_time=`date +%s`
date +%F\ %T | tee -a pytest-out.log
echo -n $case
if [[ $1 =~ full ]] ; then
line=$line" -s"
fi
$line > case.log 2>&1 && \
echo -e "${GREEN} success${NC}" | tee -a pytest-out.log || \
echo -e "${RED} failed${NC}" | tee -a pytest-out.log
end_time=`date +%s`
out_log=`tail -1 pytest-out.log `
if [[ $out_log =~ 'failed' ]];then
cp -r ../../sim ~/sim_`date "+%Y_%m_%d_%H:%M:%S" `
echo '=====================log===================== '
cat case.log
rm -rf case.log
dohavecore $3 2
if [[ $3 == 1 ]];then
exit 8
fi
fi
echo execution time of $case was `expr $end_time - $start_time`s. | tee -a pytest-out.log
else
$line > /dev/null 2>&1
fi
dohavecore $3 2
else
echo $line
if [[ $line =~ ^bash.* ]]; then
# $line > case.log 2>&1 || cat case.log && exit 8
# cat case.log
$line > case.log 2>&1
if [ $? -ne 0 ];then
cat case.log
exit 8
fi
fi
fi
fi
done
rm -rf ../../sim/case.log
}
######################
# main entry
######################
unameOut="$(uname -s)"
case "${unameOut}" in
Linux*) OS=Linux;;
Darwin*) OS=Darwin;;
CYGWIN*) OS=Windows;;
*) OS=Unknown;;
esac
case "${OS}" in
Linux*) TAOSLIB=libtaos.so;;
Darwin*) TAOSLIB=libtaos.dylib;;
Windows*) TAOSLIB=taos.dll;;
Unknown) TAOSLIB="UNKNOWN:${unameOut}";;
esac
echo TAOSLIB is ${TAOSLIB}
totalFailed=0
totalPyFailed=0
totalJDBCFailed=0
totalUnitFailed=0
totalExampleFailed=0
totalApiFailed=0
if [ "${OS}" == "Linux" ]; then
corepath=`grep -oP '.*(?=core_)' /proc/sys/kernel/core_pattern||grep -oP '.*(?=core-)' /proc/sys/kernel/core_pattern`
if [ -z "$corepath" ];then
echo "/coredump/core_%e_%p_%t" > /proc/sys/kernel/core_pattern || echo "Permission denied"
corepath="/coredump/"
fi
fi
echo "### run Python test case ###"
cd $tests_dir
if [[ "$tests_dir" == *"$IN_TDINTERNAL"* ]]; then
cd ../..
else
cd ../
fi
TOP_DIR=`pwd`
TAOSLIB_DIR=`find . -name "${TAOSLIB}"|grep -w lib|head -n1`
if [[ "$TAOSLIB_DIR" == *"$IN_TDINTERNAL"* ]]; then
LIB_DIR=`find . -name "${TAOSLIB}"|grep -w lib|head -n1|cut -d '/' -f 2,3,4,5`
else
LIB_DIR=`find . -name "${TAOSLIB}"|grep -w lib|head -n1|cut -d '/' -f 2,3,4`
fi
export LD_LIBRARY_PATH=$TOP_DIR/$LIB_DIR:$LD_LIBRARY_PATH
cd $tests_dir/pytest
[ -f pytest-out.log ] && rm -f pytest-out.log
if [ "$1" == "full" ]; then
echo "### run Python full test ###"
runPyCaseOneByOne fulltest-tools.sh
runPyCaseOneByOne fulltest-query.sh
runPyCaseOneByOne fulltest-other.sh
runPyCaseOneByOne fulltest-insert.sh
runPyCaseOneByOne fulltest-connector.sh
else
echo "### run $1 $2 test ###"
if [ "$1" != "query" ] && [ "$1" != "taosAdapter" ] && [ "$1" != "other" ] && [ "$1" != "tools" ] && [ "$1" != "insert" ] && [ "$1" != "connector" ] ;then
echo " wrong option:$1 must one of [query,other,tools,insert,connector,taosAdapter]"
exit 8
fi
cd $tests_dir/pytest
runPyCaseOneByOnefq fulltest-$1.sh $2 1 $3
cd $tests_dir/develop-test
runPyCaseOneByOnefq fulltest-$1.sh $2 1 $3
cd $tests_dir/system-test
runPyCaseOneByOnefq fulltest-$1.sh $2 1 $3
fi
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册