Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
taosdata
TDengine
提交
25bddc0c
T
TDengine
项目概览
taosdata
/
TDengine
大约 2 年 前同步成功
通知
1192
Star
22018
Fork
4786
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
T
TDengine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
25bddc0c
编写于
10月 10, 2021
作者:
sangshuduo
浏览文件
操作
浏览文件
下载
差异文件
merge with develop branch.
上级
48dae824
11e28e3d
变更
113
显示空白变更内容
内联
并排
Showing
113 changed file
with
1243 addition
and
730 deletion
+1243
-730
Jenkinsfile
Jenkinsfile
+6
-3
deps/cJson/inc/cJSON.h
deps/cJson/inc/cJSON.h
+1
-1
deps/cJson/src/cJSON.c
deps/cJson/src/cJSON.c
+1
-1
src/client/inc/tscParseLine.h
src/client/inc/tscParseLine.h
+12
-1
src/client/src/TSDBJNIConnector.c
src/client/src/TSDBJNIConnector.c
+3
-2
src/client/src/tscParseLineProtocol.c
src/client/src/tscParseLineProtocol.c
+52
-7
src/client/src/tscParseOpenTSDB.c
src/client/src/tscParseOpenTSDB.c
+73
-39
src/connector/python/README.md
src/connector/python/README.md
+2
-2
src/connector/python/examples/insert-lines.py
src/connector/python/examples/insert-lines.py
+2
-2
src/connector/python/taos/__init__.py
src/connector/python/taos/__init__.py
+2
-2
src/connector/python/taos/cinterface.py
src/connector/python/taos/cinterface.py
+6
-23
src/connector/python/taos/connection.py
src/connector/python/taos/connection.py
+25
-24
src/connector/python/taos/error.py
src/connector/python/taos/error.py
+2
-12
src/connector/python/tests/test_lines.py
src/connector/python/tests/test_lines.py
+5
-5
src/inc/taos.h
src/inc/taos.h
+1
-5
src/inc/taoserror.h
src/inc/taoserror.h
+1
-0
src/kit/taosdump/taosdump.c
src/kit/taosdump/taosdump.c
+32
-20
src/os/src/detail/osTime.c
src/os/src/detail/osTime.c
+3
-1
src/util/src/terror.c
src/util/src/terror.c
+1
-0
tests/examples/c/apitest.c
tests/examples/c/apitest.c
+8
-8
tests/examples/c/schemaless.c
tests/examples/c/schemaless.c
+2
-2
tests/pytest/crash_gen/valgrind_taos.supp
tests/pytest/crash_gen/valgrind_taos.supp
+17
-0
tests/pytest/fulltest.sh
tests/pytest/fulltest.sh
+3
-2
tests/pytest/insert/insertJSONPayload.py
tests/pytest/insert/insertJSONPayload.py
+105
-128
tests/pytest/insert/insertTelnetLines.py
tests/pytest/insert/insertTelnetLines.py
+26
-26
tests/pytest/insert/line_insert.py
tests/pytest/insert/line_insert.py
+10
-10
tests/pytest/insert/openTsdbTelnetLinesInsert.py
tests/pytest/insert/openTsdbTelnetLinesInsert.py
+88
-88
tests/pytest/insert/schemalessInsert.py
tests/pytest/insert/schemalessInsert.py
+96
-96
tests/pytest/tools/insert-interlace.json
tests/pytest/tools/insert-interlace.json
+1
-1
tests/pytest/tools/insert-tblimit-tboffset-createdb.json
tests/pytest/tools/insert-tblimit-tboffset-createdb.json
+1
-1
tests/pytest/tools/insert-tblimit-tboffset-insertrec.json
tests/pytest/tools/insert-tblimit-tboffset-insertrec.json
+1
-1
tests/pytest/tools/insert-tblimit-tboffset.json
tests/pytest/tools/insert-tblimit-tboffset.json
+1
-1
tests/pytest/tools/insert-tblimit-tboffset0.json
tests/pytest/tools/insert-tblimit-tboffset0.json
+1
-1
tests/pytest/tools/insert-tblimit1-tboffset.json
tests/pytest/tools/insert-tblimit1-tboffset.json
+1
-1
tests/pytest/tools/schemalessInsertPerformance.py
tests/pytest/tools/schemalessInsertPerformance.py
+10
-10
tests/pytest/tools/taosdemoAllTest/insert-1s1tnt1r.json
tests/pytest/tools/taosdemoAllTest/insert-1s1tnt1r.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-1s1tntmr.json
tests/pytest/tools/taosdemoAllTest/insert-1s1tntmr.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-allDataType.json
tests/pytest/tools/taosdemoAllTest/insert-allDataType.json
+88
-0
tests/pytest/tools/taosdemoAllTest/insert-disorder.json
tests/pytest/tools/taosdemoAllTest/insert-disorder.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-drop-exist-auto-N00.json
...est/tools/taosdemoAllTest/insert-drop-exist-auto-N00.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-drop-exist-auto-Y00.json
...est/tools/taosdemoAllTest/insert-drop-exist-auto-Y00.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-illegal.json
tests/pytest/tools/taosdemoAllTest/insert-illegal.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-interlace-row.json
tests/pytest/tools/taosdemoAllTest/insert-interlace-row.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-interval-speed.json
...s/pytest/tools/taosdemoAllTest/insert-interval-speed.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-newdb.json
tests/pytest/tools/taosdemoAllTest/insert-newdb.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-newtable.json
tests/pytest/tools/taosdemoAllTest/insert-newtable.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-nodbnodrop.json
tests/pytest/tools/taosdemoAllTest/insert-nodbnodrop.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-offset.json
tests/pytest/tools/taosdemoAllTest/insert-offset.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-renewdb.json
tests/pytest/tools/taosdemoAllTest/insert-renewdb.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-sample.json
tests/pytest/tools/taosdemoAllTest/insert-sample.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insert-timestep.json
tests/pytest/tools/taosdemoAllTest/insert-timestep.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertBinaryLenLarge16374AllcolLar49151.json
...sdemoAllTest/insertBinaryLenLarge16374AllcolLar49151.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertChildTab0.json
tests/pytest/tools/taosdemoAllTest/insertChildTab0.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertChildTabLess0.json
tests/pytest/tools/taosdemoAllTest/insertChildTabLess0.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertColumnsAndTagNum4096.json
...est/tools/taosdemoAllTest/insertColumnsAndTagNum4096.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertColumnsAndTagNumLarge4096.json
...ools/taosdemoAllTest/insertColumnsAndTagNumLarge4096.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertColumnsNum0.json
tests/pytest/tools/taosdemoAllTest/insertColumnsNum0.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertInterlaceRowsLarge1M.json
...est/tools/taosdemoAllTest/insertInterlaceRowsLarge1M.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertMaxNumPerReq.json
tests/pytest/tools/taosdemoAllTest/insertMaxNumPerReq.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertNumOfrecordPerReq0.json
...ytest/tools/taosdemoAllTest/insertNumOfrecordPerReq0.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertNumOfrecordPerReqless0.json
...t/tools/taosdemoAllTest/insertNumOfrecordPerReqless0.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertRestful.json
tests/pytest/tools/taosdemoAllTest/insertRestful.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertSigcolumnsNum4096.json
...pytest/tools/taosdemoAllTest/insertSigcolumnsNum4096.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertTagsNumLarge128.json
...s/pytest/tools/taosdemoAllTest/insertTagsNumLarge128.json
+1
-1
tests/pytest/tools/taosdemoAllTest/insertTimestepMulRowsLargeint16.json
...ools/taosdemoAllTest/insertTimestepMulRowsLargeint16.json
+2
-1
tests/pytest/tools/taosdemoAllTest/insert_5M_rows.json
tests/pytest/tools/taosdemoAllTest/insert_5M_rows.json
+1
-1
tests/pytest/tools/taosdemoAllTest/manual_block1_comp.json
tests/pytest/tools/taosdemoAllTest/manual_block1_comp.json
+1
-1
tests/pytest/tools/taosdemoAllTest/manual_block2.json
tests/pytest/tools/taosdemoAllTest/manual_block2.json
+1
-1
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit1.json
.../pytest/tools/taosdemoAllTest/moredemo-offset-limit1.json
+1
-1
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit5.json
.../pytest/tools/taosdemoAllTest/moredemo-offset-limit5.json
+1
-1
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit94.json
...pytest/tools/taosdemoAllTest/moredemo-offset-limit94.json
+1
-1
tests/pytest/tools/taosdemoAllTest/moredemo-offset-newdb.json
...s/pytest/tools/taosdemoAllTest/moredemo-offset-newdb.json
+1
-1
tests/pytest/tools/taosdemoAllTest/query-interrupt.json
tests/pytest/tools/taosdemoAllTest/query-interrupt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/queryInsertdata.json
tests/pytest/tools/taosdemoAllTest/queryInsertdata.json
+1
-1
tests/pytest/tools/taosdemoAllTest/queryInsertrestdata.json
tests/pytest/tools/taosdemoAllTest/queryInsertrestdata.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/1174-large-stmt.json
tests/pytest/tools/taosdemoAllTest/stmt/1174-large-stmt.json
+2
-1
tests/pytest/tools/taosdemoAllTest/stmt/1174-large-taosc.json
...s/pytest/tools/taosdemoAllTest/stmt/1174-large-taosc.json
+2
-1
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-stmt-random.json
...st/tools/taosdemoAllTest/stmt/1174-small-stmt-random.json
+2
-1
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-stmt.json
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-stmt.json
+2
-1
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-taosc.json
...s/pytest/tools/taosdemoAllTest/stmt/1174-small-taosc.json
+2
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-1s1tnt1r-stmt.json
...test/tools/taosdemoAllTest/stmt/insert-1s1tnt1r-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-1s1tntmr-stmt.json
...test/tools/taosdemoAllTest/stmt/insert-1s1tntmr-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-allDataType-stmt.json
...t/tools/taosdemoAllTest/stmt/insert-allDataType-stmt.json
+88
-0
tests/pytest/tools/taosdemoAllTest/stmt/insert-disorder-stmt.json
...test/tools/taosdemoAllTest/stmt/insert-disorder-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-drop-exist-auto-N00-stmt.json
...taosdemoAllTest/stmt/insert-drop-exist-auto-N00-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-drop-exist-auto-Y00-stmt.json
...taosdemoAllTest/stmt/insert-drop-exist-auto-Y00-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-interlace-row-stmt.json
...tools/taosdemoAllTest/stmt/insert-interlace-row-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-interval-speed-stmt.json
...ools/taosdemoAllTest/stmt/insert-interval-speed-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-newdb-stmt.json
.../pytest/tools/taosdemoAllTest/stmt/insert-newdb-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-newtable-stmt.json
...test/tools/taosdemoAllTest/stmt/insert-newtable-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-nodbnodrop-stmt.json
...st/tools/taosdemoAllTest/stmt/insert-nodbnodrop-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-offset-stmt.json
...pytest/tools/taosdemoAllTest/stmt/insert-offset-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-renewdb-stmt.json
...ytest/tools/taosdemoAllTest/stmt/insert-renewdb-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-sample-stmt.json
...pytest/tools/taosdemoAllTest/stmt/insert-sample-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insert-timestep-stmt.json
...test/tools/taosdemoAllTest/stmt/insert-timestep-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertBinaryLenLarge16374AllcolLar49151-stmt.json
...st/stmt/insertBinaryLenLarge16374AllcolLar49151-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertChildTab0-stmt.json
...test/tools/taosdemoAllTest/stmt/insertChildTab0-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertChildTabLess0-stmt.json
.../tools/taosdemoAllTest/stmt/insertChildTabLess0-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertColumnsAndTagNum4096-stmt.json
...taosdemoAllTest/stmt/insertColumnsAndTagNum4096-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertColumnsNum0-stmt.json
...st/tools/taosdemoAllTest/stmt/insertColumnsNum0-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertInterlaceRowsLarge1M-stmt.json
...taosdemoAllTest/stmt/insertInterlaceRowsLarge1M-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertMaxNumPerReq-stmt.json
...t/tools/taosdemoAllTest/stmt/insertMaxNumPerReq-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertNumOfrecordPerReq0-stmt.json
...s/taosdemoAllTest/stmt/insertNumOfrecordPerReq0-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertNumOfrecordPerReqless0-stmt.json
...osdemoAllTest/stmt/insertNumOfrecordPerReqless0-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertSigcolumnsNum4096-stmt.json
...ls/taosdemoAllTest/stmt/insertSigcolumnsNum4096-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/insertTagsNumLarge128-stmt.json
...ools/taosdemoAllTest/stmt/insertTagsNumLarge128-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/stmt/nsertColumnsAndTagNumLarge4096-stmt.json
...demoAllTest/stmt/nsertColumnsAndTagNumLarge4096-stmt.json
+1
-1
tests/pytest/tools/taosdemoAllTest/subInsertdata.json
tests/pytest/tools/taosdemoAllTest/subInsertdata.json
+1
-1
tests/pytest/tools/taosdemoAllTest/subInsertdataMaxsql100.json
.../pytest/tools/taosdemoAllTest/subInsertdataMaxsql100.json
+1
-1
tests/pytest/tools/taosdemoAllTest/taosdemoTestInsertAllType.py
...pytest/tools/taosdemoAllTest/taosdemoTestInsertAllType.py
+106
-0
tests/pytest/tools/taosdumpTest3.py
tests/pytest/tools/taosdumpTest3.py
+200
-0
tests/script/api/openTSDBTest.c
tests/script/api/openTSDBTest.c
+80
-130
tests/tsim/src/simExe.c
tests/tsim/src/simExe.c
+2
-2
未找到文件。
Jenkinsfile
浏览文件 @
25bddc0c
...
@@ -114,6 +114,7 @@ def pre_test(){
...
@@ -114,6 +114,7 @@ def pre_test(){
}
}
def
pre_test_win
(){
def
pre_test_win
(){
bat
'''
bat
'''
taskkill /f /t /im python.exe
cd C:\\
cd C:\\
rd /s /Q C:\\TDengine
rd /s /Q C:\\TDengine
cd C:\\workspace\\TDinternal
cd C:\\workspace\\TDinternal
...
@@ -180,9 +181,9 @@ def pre_test_win(){
...
@@ -180,9 +181,9 @@ def pre_test_win(){
cd debug
cd debug
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\vcvarsall.bat" amd64
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\vcvarsall.bat" amd64
cmake ../ -G "NMake Makefiles"
cmake ../ -G "NMake Makefiles"
nmake
nmake
|| exit 8
nmake install
nmake install
|| exit 8
xcopy /e/y/i/f C:\\workspace\\TDinternal\\debug\\build\\lib\\taos.dll C:\\Windows\\System32
xcopy /e/y/i/f C:\\workspace\\TDinternal\\debug\\build\\lib\\taos.dll C:\\Windows\\System32
|| exit 8
cd C:\\workspace\\TDinternal\\community\\src\\connector\\python
cd C:\\workspace\\TDinternal\\community\\src\\connector\\python
python -m pip install .
python -m pip install .
...
@@ -484,11 +485,13 @@ pipeline {
...
@@ -484,11 +485,13 @@ pipeline {
catchError
(
buildResult:
'FAILURE'
,
stageResult:
'FAILURE'
)
{
catchError
(
buildResult:
'FAILURE'
,
stageResult:
'FAILURE'
)
{
pre_test_win
()
pre_test_win
()
timeout
(
time:
20
,
unit:
'MINUTES'
){
bat
'''
bat
'''
cd C:\\workspace\\TDinternal\\community\\tests\\pytest
cd C:\\workspace\\TDinternal\\community\\tests\\pytest
.\\test-all.bat Wintest
.\\test-all.bat Wintest
'''
'''
}
}
}
script
{
script
{
win_stop
=
1
win_stop
=
1
}
}
...
...
deps/cJson/inc/cJSON.h
浏览文件 @
25bddc0c
...
@@ -73,7 +73,7 @@ typedef struct cJSON
...
@@ -73,7 +73,7 @@ typedef struct cJSON
char
*
string
;
char
*
string
;
//Keep the original string of number
//Keep the original string of number
char
numberstring
[
13
];
char
numberstring
[
64
];
}
cJSON
;
}
cJSON
;
typedef
struct
cJSON_Hooks
typedef
struct
cJSON_Hooks
...
...
deps/cJson/src/cJSON.c
浏览文件 @
25bddc0c
...
@@ -290,7 +290,7 @@ loop_end:
...
@@ -290,7 +290,7 @@ loop_end:
input_buffer
->
offset
+=
(
size_t
)(
after_end
-
number_c_string
);
input_buffer
->
offset
+=
(
size_t
)(
after_end
-
number_c_string
);
strncpy
(
item
->
numberstring
,
(
const
char
*
)
number_c_string
,
12
);
strncpy
(
item
->
numberstring
,
(
const
char
*
)
number_c_string
,
strlen
((
const
char
*
)
number_c_string
)
);
return
true
;
return
true
;
}
}
...
...
src/client/inc/tscParseLine.h
浏览文件 @
25bddc0c
...
@@ -47,6 +47,12 @@ typedef enum {
...
@@ -47,6 +47,12 @@ typedef enum {
SML_TIME_STAMP_NANO_SECONDS
SML_TIME_STAMP_NANO_SECONDS
}
SMLTimeStampType
;
}
SMLTimeStampType
;
typedef
enum
{
SML_LINE_PROTOCOL
=
0
,
SML_TELNET_PROTOCOL
=
1
,
SML_JSON_PROTOCOL
=
2
,
}
SMLProtocolType
;
typedef
struct
{
typedef
struct
{
uint64_t
id
;
uint64_t
id
;
SHashObj
*
smlDataToSchema
;
SHashObj
*
smlDataToSchema
;
...
@@ -57,7 +63,7 @@ bool checkDuplicateKey(char *key, SHashObj *pHash, SSmlLinesInfo* info);
...
@@ -57,7 +63,7 @@ bool checkDuplicateKey(char *key, SHashObj *pHash, SSmlLinesInfo* info);
bool
isValidInteger
(
char
*
str
);
bool
isValidInteger
(
char
*
str
);
bool
isValidFloat
(
char
*
str
);
bool
isValidFloat
(
char
*
str
);
int32_t
isValidChildTableName
(
const
char
*
pTbName
,
int16_t
len
);
int32_t
isValidChildTableName
(
const
char
*
pTbName
,
int16_t
len
,
SSmlLinesInfo
*
info
);
bool
convertSmlValueType
(
TAOS_SML_KV
*
pVal
,
char
*
value
,
bool
convertSmlValueType
(
TAOS_SML_KV
*
pVal
,
char
*
value
,
uint16_t
len
,
SSmlLinesInfo
*
info
);
uint16_t
len
,
SSmlLinesInfo
*
info
);
...
@@ -66,6 +72,11 @@ int32_t convertSmlTimeStamp(TAOS_SML_KV *pVal, char *value,
...
@@ -66,6 +72,11 @@ int32_t convertSmlTimeStamp(TAOS_SML_KV *pVal, char *value,
void
destroySmlDataPoint
(
TAOS_SML_DATA_POINT
*
point
);
void
destroySmlDataPoint
(
TAOS_SML_DATA_POINT
*
point
);
int
taos_insert_sml_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
);
int
taos_insert_telnet_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
);
int
taos_insert_json_payload
(
TAOS
*
taos
,
char
*
payload
);
#ifdef __cplusplus
#ifdef __cplusplus
}
}
#endif
#endif
...
...
src/client/src/TSDBJNIConnector.c
浏览文件 @
25bddc0c
...
@@ -17,6 +17,7 @@
...
@@ -17,6 +17,7 @@
#include "taos.h"
#include "taos.h"
#include "tlog.h"
#include "tlog.h"
#include "tscUtil.h"
#include "tscUtil.h"
#include "tscParseLine.h"
#include "com_taosdata_jdbc_TSDBJNIConnector.h"
#include "com_taosdata_jdbc_TSDBJNIConnector.h"
...
@@ -1070,7 +1071,7 @@ JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_insertLinesImp(J
...
@@ -1070,7 +1071,7 @@ JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_insertLinesImp(J
c_lines
[
i
]
=
(
char
*
)(
*
env
)
->
GetStringUTFChars
(
env
,
line
,
0
);
c_lines
[
i
]
=
(
char
*
)(
*
env
)
->
GetStringUTFChars
(
env
,
line
,
0
);
}
}
int
code
=
taos_
insert_lines
(
taos
,
c_lines
,
numLines
);
int
code
=
taos_
schemaless_insert
(
taos
,
c_lines
,
numLines
,
SML_LINE_PROTOCOL
);
for
(
int
i
=
0
;
i
<
numLines
;
++
i
)
{
for
(
int
i
=
0
;
i
<
numLines
;
++
i
)
{
jstring
line
=
(
jstring
)((
*
env
)
->
GetObjectArrayElement
(
env
,
lines
,
i
));
jstring
line
=
(
jstring
)((
*
env
)
->
GetObjectArrayElement
(
env
,
lines
,
i
));
...
...
src/client/src/tscParseLineProtocol.c
浏览文件 @
25bddc0c
...
@@ -1811,8 +1811,8 @@ static int32_t parseSmlKey(TAOS_SML_KV *pKV, const char **index, SHashObj *pHash
...
@@ -1811,8 +1811,8 @@ static int32_t parseSmlKey(TAOS_SML_KV *pKV, const char **index, SHashObj *pHash
return
TSDB_CODE_TSC_LINE_SYNTAX_ERROR
;
return
TSDB_CODE_TSC_LINE_SYNTAX_ERROR
;
}
}
while
(
*
cur
!=
'\0'
)
{
while
(
*
cur
!=
'\0'
)
{
if
(
len
>
TSDB_COL_NAME_LEN
)
{
if
(
len
>
=
TSDB_COL_NAME_LEN
-
1
)
{
tscError
(
"SML:0x%"
PRIx64
" Key field cannot exceeds
65 characters"
,
info
->
id
);
tscError
(
"SML:0x%"
PRIx64
" Key field cannot exceeds
%d characters"
,
info
->
id
,
TSDB_COL_NAME_LEN
-
1
);
return
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
;
return
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
;
}
}
//unescaped '=' identifies a tag key
//unescaped '=' identifies a tag key
...
@@ -1898,8 +1898,8 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
...
@@ -1898,8 +1898,8 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
}
}
while
(
*
cur
!=
'\0'
)
{
while
(
*
cur
!=
'\0'
)
{
if
(
len
>
TSDB_TABLE_NAME_LEN
)
{
if
(
len
>
=
TSDB_TABLE_NAME_LEN
-
1
)
{
tscError
(
"SML:0x%"
PRIx64
" Measurement field cannot exceeds
193 characters"
,
info
->
id
);
tscError
(
"SML:0x%"
PRIx64
" Measurement field cannot exceeds
%d characters"
,
info
->
id
,
TSDB_TABLE_NAME_LEN
-
1
);
free
(
pSml
->
stableName
);
free
(
pSml
->
stableName
);
pSml
->
stableName
=
NULL
;
pSml
->
stableName
=
NULL
;
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
...
@@ -1917,7 +1917,7 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
...
@@ -1917,7 +1917,7 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
if
(
*
cur
==
'\\'
)
{
if
(
*
cur
==
'\\'
)
{
escapeSpecialCharacter
(
1
,
&
cur
);
escapeSpecialCharacter
(
1
,
&
cur
);
}
}
pSml
->
stableName
[
len
]
=
*
cur
;
pSml
->
stableName
[
len
]
=
tolower
(
*
cur
)
;
cur
++
;
cur
++
;
len
++
;
len
++
;
}
}
...
@@ -1929,7 +1929,11 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
...
@@ -1929,7 +1929,11 @@ static int32_t parseSmlMeasurement(TAOS_SML_DATA_POINT *pSml, const char **index
}
}
//Table name can only contain digits(0-9),alphebet(a-z),underscore(_)
//Table name can only contain digits(0-9),alphebet(a-z),underscore(_)
int32_t
isValidChildTableName
(
const
char
*
pTbName
,
int16_t
len
)
{
int32_t
isValidChildTableName
(
const
char
*
pTbName
,
int16_t
len
,
SSmlLinesInfo
*
info
)
{
if
(
len
>
TSDB_TABLE_NAME_LEN
-
1
)
{
tscError
(
"SML:0x%"
PRIx64
" child table name cannot exceeds %d characters"
,
info
->
id
,
TSDB_TABLE_NAME_LEN
-
1
);
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
}
const
char
*
cur
=
pTbName
;
const
char
*
cur
=
pTbName
;
for
(
int
i
=
0
;
i
<
len
;
++
i
)
{
for
(
int
i
=
0
;
i
<
len
;
++
i
)
{
if
(
!
isdigit
(
cur
[
i
])
&&
!
isalpha
(
cur
[
i
])
&&
(
cur
[
i
]
!=
'_'
))
{
if
(
!
isdigit
(
cur
[
i
])
&&
!
isalpha
(
cur
[
i
])
&&
(
cur
[
i
]
!=
'_'
))
{
...
@@ -1975,12 +1979,13 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
...
@@ -1975,12 +1979,13 @@ static int32_t parseSmlKvPairs(TAOS_SML_KV **pKVs, int *num_kvs,
}
}
if
(
!
isField
&&
if
(
!
isField
&&
(
strcasecmp
(
pkv
->
key
,
"ID"
)
==
0
)
&&
pkv
->
type
==
TSDB_DATA_TYPE_BINARY
)
{
(
strcasecmp
(
pkv
->
key
,
"ID"
)
==
0
)
&&
pkv
->
type
==
TSDB_DATA_TYPE_BINARY
)
{
ret
=
isValidChildTableName
(
pkv
->
value
,
pkv
->
length
);
ret
=
isValidChildTableName
(
pkv
->
value
,
pkv
->
length
,
info
);
if
(
ret
)
{
if
(
ret
)
{
goto
error
;
goto
error
;
}
}
smlData
->
childTableName
=
malloc
(
pkv
->
length
+
1
);
smlData
->
childTableName
=
malloc
(
pkv
->
length
+
1
);
memcpy
(
smlData
->
childTableName
,
pkv
->
value
,
pkv
->
length
);
memcpy
(
smlData
->
childTableName
,
pkv
->
value
,
pkv
->
length
);
strntolower_s
(
smlData
->
childTableName
,
smlData
->
childTableName
,
(
int32_t
)
pkv
->
length
);
smlData
->
childTableName
[
pkv
->
length
]
=
'\0'
;
smlData
->
childTableName
[
pkv
->
length
]
=
'\0'
;
free
(
pkv
->
key
);
free
(
pkv
->
key
);
free
(
pkv
->
value
);
free
(
pkv
->
value
);
...
@@ -2184,3 +2189,43 @@ cleanup:
...
@@ -2184,3 +2189,43 @@ cleanup:
return
code
;
return
code
;
}
}
/**
* taos_schemaless_insert() parse and insert data points into database according to
* different protocol.
*
* @param $lines input array may contain multiple lines, each line indicates a data point.
* If protocol=2 is used input array should contain single JSON
* string(e.g. char *lines[] = {"$JSON_string"}). If need to insert
* multiple data points in JSON format, should include them in $JSON_string
* as a JSON array.
* @param $numLines indicates how many data points in $lines.
* If protocol = 2 is used this param will be ignored as $lines should
* contain single JSON string.
* @param $protocol indicates which protocol to use for parsing:
* 0 - influxDB line protocol
* 1 - OpenTSDB telnet line protocol
* 2 - OpenTSDB JSON format protocol
* @return return zero for successful insertion. Otherwise return none-zero error code of
* failure reason.
*
*/
int
taos_schemaless_insert
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
,
int
protocol
)
{
int
code
;
switch
(
protocol
)
{
case
SML_LINE_PROTOCOL
:
code
=
taos_insert_lines
(
taos
,
lines
,
numLines
);
break
;
case
SML_TELNET_PROTOCOL
:
code
=
taos_insert_telnet_lines
(
taos
,
lines
,
numLines
);
break
;
case
SML_JSON_PROTOCOL
:
code
=
taos_insert_json_payload
(
taos
,
*
lines
);
break
;
default:
code
=
TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE
;
break
;
}
return
code
;
}
src/client/src/tscParseOpenTSDB.c
浏览文件 @
25bddc0c
...
@@ -37,7 +37,7 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
...
@@ -37,7 +37,7 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
const
char
*
cur
=
*
index
;
const
char
*
cur
=
*
index
;
uint16_t
len
=
0
;
uint16_t
len
=
0
;
pSml
->
stableName
=
tcalloc
(
TSDB_TABLE_NAME_LEN
+
1
,
1
);
// +1 to avoid 1772 line over write
pSml
->
stableName
=
tcalloc
(
TSDB_TABLE_NAME_LEN
,
1
);
if
(
pSml
->
stableName
==
NULL
)
{
if
(
pSml
->
stableName
==
NULL
)
{
return
TSDB_CODE_TSC_OUT_OF_MEMORY
;
return
TSDB_CODE_TSC_OUT_OF_MEMORY
;
}
}
...
@@ -48,8 +48,8 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
...
@@ -48,8 +48,8 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
}
}
while
(
*
cur
!=
'\0'
)
{
while
(
*
cur
!=
'\0'
)
{
if
(
len
>
TSDB_TABLE_NAME_LEN
)
{
if
(
len
>
=
TSDB_TABLE_NAME_LEN
-
1
)
{
tscError
(
"OTD:0x%"
PRIx64
" Metric cannot exceeds
193 characters"
,
info
->
id
);
tscError
(
"OTD:0x%"
PRIx64
" Metric cannot exceeds
%d characters"
,
info
->
id
,
TSDB_TABLE_NAME_LEN
-
1
);
tfree
(
pSml
->
stableName
);
tfree
(
pSml
->
stableName
);
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
}
}
...
@@ -62,7 +62,7 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
...
@@ -62,7 +62,7 @@ static int32_t parseTelnetMetric(TAOS_SML_DATA_POINT *pSml, const char **index,
if
(
*
cur
==
'.'
)
{
if
(
*
cur
==
'.'
)
{
pSml
->
stableName
[
len
]
=
'_'
;
pSml
->
stableName
[
len
]
=
'_'
;
}
else
{
}
else
{
pSml
->
stableName
[
len
]
=
*
cur
;
pSml
->
stableName
[
len
]
=
tolower
(
*
cur
)
;
}
}
cur
++
;
cur
++
;
...
@@ -171,7 +171,7 @@ static int32_t parseTelnetMetricValue(TAOS_SML_KV **pKVs, int *num_kvs, const ch
...
@@ -171,7 +171,7 @@ static int32_t parseTelnetMetricValue(TAOS_SML_KV **pKVs, int *num_kvs, const ch
static
int32_t
parseTelnetTagKey
(
TAOS_SML_KV
*
pKV
,
const
char
**
index
,
SHashObj
*
pHash
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseTelnetTagKey
(
TAOS_SML_KV
*
pKV
,
const
char
**
index
,
SHashObj
*
pHash
,
SSmlLinesInfo
*
info
)
{
const
char
*
cur
=
*
index
;
const
char
*
cur
=
*
index
;
char
key
[
TSDB_COL_NAME_LEN
+
1
];
// +1 to avoid key[len] over write
char
key
[
TSDB_COL_NAME_LEN
];
uint16_t
len
=
0
;
uint16_t
len
=
0
;
//key field cannot start with digit
//key field cannot start with digit
...
@@ -180,8 +180,8 @@ static int32_t parseTelnetTagKey(TAOS_SML_KV *pKV, const char **index, SHashObj
...
@@ -180,8 +180,8 @@ static int32_t parseTelnetTagKey(TAOS_SML_KV *pKV, const char **index, SHashObj
return
TSDB_CODE_TSC_LINE_SYNTAX_ERROR
;
return
TSDB_CODE_TSC_LINE_SYNTAX_ERROR
;
}
}
while
(
*
cur
!=
'\0'
)
{
while
(
*
cur
!=
'\0'
)
{
if
(
len
>
TSDB_COL_NAME_LEN
)
{
if
(
len
>
=
TSDB_COL_NAME_LEN
-
1
)
{
tscError
(
"OTD:0x%"
PRIx64
" Tag key cannot exceeds
65 characters"
,
info
->
id
);
tscError
(
"OTD:0x%"
PRIx64
" Tag key cannot exceeds
%d characters"
,
info
->
id
,
TSDB_COL_NAME_LEN
-
1
);
return
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
;
return
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
;
}
}
if
(
*
cur
==
' '
)
{
if
(
*
cur
==
' '
)
{
...
@@ -276,13 +276,14 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
...
@@ -276,13 +276,14 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
return
ret
;
return
ret
;
}
}
if
((
strcasecmp
(
pkv
->
key
,
"ID"
)
==
0
)
&&
pkv
->
type
==
TSDB_DATA_TYPE_BINARY
)
{
if
((
strcasecmp
(
pkv
->
key
,
"ID"
)
==
0
)
&&
pkv
->
type
==
TSDB_DATA_TYPE_BINARY
)
{
ret
=
isValidChildTableName
(
pkv
->
value
,
pkv
->
length
);
ret
=
isValidChildTableName
(
pkv
->
value
,
pkv
->
length
,
info
);
if
(
ret
)
{
if
(
ret
)
{
return
ret
;
return
ret
;
}
}
*
childTableName
=
malloc
(
pkv
->
length
+
1
);
*
childTableName
=
malloc
(
pkv
->
length
+
1
);
memcpy
(
*
childTableName
,
pkv
->
value
,
pkv
->
length
);
memcpy
(
*
childTableName
,
pkv
->
value
,
pkv
->
length
);
(
*
childTableName
)[
pkv
->
length
]
=
'\0'
;
(
*
childTableName
)[
pkv
->
length
]
=
'\0'
;
strntolower_s
(
*
childTableName
,
*
childTableName
,
(
int32_t
)
pkv
->
length
);
tfree
(
pkv
->
key
);
tfree
(
pkv
->
key
);
tfree
(
pkv
->
value
);
tfree
(
pkv
->
value
);
}
else
{
}
else
{
...
@@ -311,7 +312,7 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
...
@@ -311,7 +312,7 @@ static int32_t parseTelnetTagKvs(TAOS_SML_KV **pKVs, int *num_kvs,
return
ret
;
return
ret
;
}
}
int32_t
tscParseTelnetLine
(
const
char
*
line
,
TAOS_SML_DATA_POINT
*
smlData
,
SSmlLinesInfo
*
info
)
{
static
int32_t
tscParseTelnetLine
(
const
char
*
line
,
TAOS_SML_DATA_POINT
*
smlData
,
SSmlLinesInfo
*
info
)
{
const
char
*
index
=
line
;
const
char
*
index
=
line
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
...
@@ -354,7 +355,7 @@ int32_t tscParseTelnetLine(const char* line, TAOS_SML_DATA_POINT* smlData, SSmlL
...
@@ -354,7 +355,7 @@ int32_t tscParseTelnetLine(const char* line, TAOS_SML_DATA_POINT* smlData, SSmlL
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
tscParseTelnetLines
(
char
*
lines
[],
int
numLines
,
SArray
*
points
,
SArray
*
failedLines
,
SSmlLinesInfo
*
info
)
{
static
int32_t
tscParseTelnetLines
(
char
*
lines
[],
int
numLines
,
SArray
*
points
,
SArray
*
failedLines
,
SSmlLinesInfo
*
info
)
{
for
(
int32_t
i
=
0
;
i
<
numLines
;
++
i
)
{
for
(
int32_t
i
=
0
;
i
<
numLines
;
++
i
)
{
TAOS_SML_DATA_POINT
point
=
{
0
};
TAOS_SML_DATA_POINT
point
=
{
0
};
int32_t
code
=
tscParseTelnetLine
(
lines
[
i
],
&
point
,
info
);
int32_t
code
=
tscParseTelnetLine
(
lines
[
i
],
&
point
,
info
);
...
@@ -438,15 +439,15 @@ int taos_telnet_insert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint) {
...
@@ -438,15 +439,15 @@ int taos_telnet_insert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint) {
/* telnet style API parser */
/* telnet style API parser */
int32_t
parseMetricFromJSON
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseMetricFromJSON
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
cJSON
*
metric
=
cJSON_GetObjectItem
(
root
,
"metric"
);
cJSON
*
metric
=
cJSON_GetObjectItem
(
root
,
"metric"
);
if
(
!
cJSON_IsString
(
metric
))
{
if
(
!
cJSON_IsString
(
metric
))
{
return
TSDB_CODE_TSC_INVALID_JSON
;
return
TSDB_CODE_TSC_INVALID_JSON
;
}
}
size_t
stableLen
=
strlen
(
metric
->
valuestring
);
size_t
stableLen
=
strlen
(
metric
->
valuestring
);
if
(
stableLen
>
TSDB_TABLE_NAME_LEN
)
{
if
(
stableLen
>
TSDB_TABLE_NAME_LEN
-
1
)
{
tscError
(
"OTD:0x%"
PRIx64
" Metric cannot exceeds
193 characters in JSON"
,
info
->
id
);
tscError
(
"OTD:0x%"
PRIx64
" Metric cannot exceeds
%d characters in JSON"
,
info
->
id
,
TSDB_TABLE_NAME_LEN
-
1
);
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
}
}
...
@@ -462,19 +463,20 @@ int32_t parseMetricFromJSON(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInf
...
@@ -462,19 +463,20 @@ int32_t parseMetricFromJSON(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInf
}
}
//convert dot to underscore for now, will be removed once dot is allowed in tbname.
//convert dot to underscore for now, will be removed once dot is allowed in tbname.
for
(
int
i
=
0
;
i
<
st
rlen
(
metric
->
valuestring
)
;
++
i
)
{
for
(
int
i
=
0
;
i
<
st
ableLen
;
++
i
)
{
if
(
metric
->
valuestring
[
i
]
==
'.'
)
{
if
(
metric
->
valuestring
[
i
]
==
'.'
)
{
metric
->
valuestring
[
i
]
=
'_'
;
metric
->
valuestring
[
i
]
=
'_'
;
}
}
}
}
tstrncpy
(
pSml
->
stableName
,
metric
->
valuestring
,
stableLen
+
1
);
tstrncpy
(
pSml
->
stableName
,
metric
->
valuestring
,
stableLen
+
1
);
strntolower_s
(
pSml
->
stableName
,
pSml
->
stableName
,
(
int32_t
)
stableLen
);
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
parseTimestampFromJSONObj
(
cJSON
*
root
,
int64_t
*
tsVal
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseTimestampFromJSONObj
(
cJSON
*
root
,
int64_t
*
tsVal
,
SSmlLinesInfo
*
info
)
{
int32_t
size
=
cJSON_GetArraySize
(
root
);
int32_t
size
=
cJSON_GetArraySize
(
root
);
if
(
size
!=
OTD_JSON_SUB_FIELDS_NUM
)
{
if
(
size
!=
OTD_JSON_SUB_FIELDS_NUM
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
return
TSDB_CODE_TSC_INVALID_JSON
;
...
@@ -490,7 +492,7 @@ int32_t parseTimestampFromJSONObj(cJSON *root, int64_t *tsVal, SSmlLinesInfo* in
...
@@ -490,7 +492,7 @@ int32_t parseTimestampFromJSONObj(cJSON *root, int64_t *tsVal, SSmlLinesInfo* in
return
TSDB_CODE_TSC_INVALID_JSON
;
return
TSDB_CODE_TSC_INVALID_JSON
;
}
}
*
tsVal
=
value
->
valueint
;
*
tsVal
=
strtoll
(
value
->
numberstring
,
NULL
,
10
)
;
//if timestamp value is 0 use current system time
//if timestamp value is 0 use current system time
if
(
*
tsVal
==
0
)
{
if
(
*
tsVal
==
0
)
{
*
tsVal
=
taosGetTimestampNs
();
*
tsVal
=
taosGetTimestampNs
();
...
@@ -526,7 +528,7 @@ int32_t parseTimestampFromJSONObj(cJSON *root, int64_t *tsVal, SSmlLinesInfo* in
...
@@ -526,7 +528,7 @@ int32_t parseTimestampFromJSONObj(cJSON *root, int64_t *tsVal, SSmlLinesInfo* in
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
parseTimestampFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pTS
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseTimestampFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pTS
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
//Timestamp must be the first KV to parse
//Timestamp must be the first KV to parse
assert
(
*
num_kvs
==
0
);
assert
(
*
num_kvs
==
0
);
int64_t
tsVal
;
int64_t
tsVal
;
...
@@ -538,7 +540,8 @@ int32_t parseTimestampFromJSON(cJSON *root, TAOS_SML_KV **pTS, int *num_kvs, SSm
...
@@ -538,7 +540,8 @@ int32_t parseTimestampFromJSON(cJSON *root, TAOS_SML_KV **pTS, int *num_kvs, SSm
if
(
timestamp
->
valueint
==
0
)
{
if
(
timestamp
->
valueint
==
0
)
{
tsVal
=
taosGetTimestampNs
();
tsVal
=
taosGetTimestampNs
();
}
else
{
}
else
{
tsVal
=
convertTimePrecision
(
timestamp
->
valueint
,
TSDB_TIME_PRECISION_MICRO
,
TSDB_TIME_PRECISION_NANO
);
tsVal
=
strtoll
(
timestamp
->
numberstring
,
NULL
,
10
);
tsVal
=
convertTimePrecision
(
tsVal
,
TSDB_TIME_PRECISION_MICRO
,
TSDB_TIME_PRECISION_NANO
);
}
}
}
else
if
(
cJSON_IsObject
(
timestamp
))
{
}
else
if
(
cJSON_IsObject
(
timestamp
))
{
int32_t
ret
=
parseTimestampFromJSONObj
(
timestamp
,
&
tsVal
,
info
);
int32_t
ret
=
parseTimestampFromJSONObj
(
timestamp
,
&
tsVal
,
info
);
...
@@ -567,7 +570,7 @@ int32_t parseTimestampFromJSON(cJSON *root, TAOS_SML_KV **pTS, int *num_kvs, SSm
...
@@ -567,7 +570,7 @@ int32_t parseTimestampFromJSON(cJSON *root, TAOS_SML_KV **pTS, int *num_kvs, SSm
}
}
int32_t
convertJSONBool
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
int64_t
valueInt
,
SSmlLinesInfo
*
info
)
{
static
int32_t
convertJSONBool
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
int64_t
valueInt
,
SSmlLinesInfo
*
info
)
{
if
(
strcasecmp
(
typeStr
,
"bool"
)
!=
0
)
{
if
(
strcasecmp
(
typeStr
,
"bool"
)
!=
0
)
{
tscError
(
"OTD:0x%"
PRIx64
" invalid type(%s) for JSON Bool"
,
info
->
id
,
typeStr
);
tscError
(
"OTD:0x%"
PRIx64
" invalid type(%s) for JSON Bool"
,
info
->
id
,
typeStr
);
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
...
@@ -580,7 +583,7 @@ int32_t convertJSONBool(TAOS_SML_KV *pVal, char* typeStr, int64_t valueInt, SSml
...
@@ -580,7 +583,7 @@ int32_t convertJSONBool(TAOS_SML_KV *pVal, char* typeStr, int64_t valueInt, SSml
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
convertJSONNumber
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
static
int32_t
convertJSONNumber
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
//tinyint
//tinyint
if
(
strcasecmp
(
typeStr
,
"i8"
)
==
0
||
if
(
strcasecmp
(
typeStr
,
"i8"
)
==
0
||
strcasecmp
(
typeStr
,
"tinyint"
)
==
0
)
{
strcasecmp
(
typeStr
,
"tinyint"
)
==
0
)
{
...
@@ -623,14 +626,19 @@ int32_t convertJSONNumber(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
...
@@ -623,14 +626,19 @@ int32_t convertJSONNumber(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
//bigint
//bigint
if
(
strcasecmp
(
typeStr
,
"i64"
)
==
0
||
if
(
strcasecmp
(
typeStr
,
"i64"
)
==
0
||
strcasecmp
(
typeStr
,
"bigint"
)
==
0
)
{
strcasecmp
(
typeStr
,
"bigint"
)
==
0
)
{
if
(
!
IS_VALID_BIGINT
(
value
->
valueint
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%"
PRId64
") cannot fit in type(bigint)"
,
info
->
id
,
value
->
valueint
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_BIGINT
;
pVal
->
type
=
TSDB_DATA_TYPE_BIGINT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int64_t
*
)(
pVal
->
value
)
=
(
int64_t
)(
value
->
valueint
);
/* cJSON conversion of legit BIGINT may overflow,
* use original string to do the conversion.
*/
errno
=
0
;
int64_t
val
=
(
int64_t
)
strtoll
(
value
->
numberstring
,
NULL
,
10
);
if
(
errno
==
ERANGE
||
!
IS_VALID_BIGINT
(
val
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%s) cannot fit in type(bigint)"
,
info
->
id
,
value
->
numberstring
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
*
(
int64_t
*
)(
pVal
->
value
)
=
val
;
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
//float
//float
...
@@ -665,7 +673,7 @@ int32_t convertJSONNumber(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
...
@@ -665,7 +673,7 @@ int32_t convertJSONNumber(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
}
}
int32_t
convertJSONString
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
static
int32_t
convertJSONString
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
if
(
strcasecmp
(
typeStr
,
"binary"
)
==
0
)
{
if
(
strcasecmp
(
typeStr
,
"binary"
)
==
0
)
{
pVal
->
type
=
TSDB_DATA_TYPE_BINARY
;
pVal
->
type
=
TSDB_DATA_TYPE_BINARY
;
}
else
if
(
strcasecmp
(
typeStr
,
"nchar"
)
==
0
)
{
}
else
if
(
strcasecmp
(
typeStr
,
"nchar"
)
==
0
)
{
...
@@ -680,7 +688,7 @@ int32_t convertJSONString(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
...
@@ -680,7 +688,7 @@ int32_t convertJSONString(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLi
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
parseValueFromJSONObj
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseValueFromJSONObj
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
size
=
cJSON_GetArraySize
(
root
);
int32_t
size
=
cJSON_GetArraySize
(
root
);
...
@@ -728,7 +736,7 @@ int32_t parseValueFromJSONObj(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* inf
...
@@ -728,7 +736,7 @@ int32_t parseValueFromJSONObj(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* inf
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
parseValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
int
type
=
root
->
type
;
int
type
=
root
->
type
;
switch
(
type
)
{
switch
(
type
)
{
...
@@ -746,7 +754,16 @@ int32_t parseValueFromJSON(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info)
...
@@ -746,7 +754,16 @@ int32_t parseValueFromJSON(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info)
pVal
->
type
=
TSDB_DATA_TYPE_BIGINT
;
pVal
->
type
=
TSDB_DATA_TYPE_BIGINT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int64_t
*
)(
pVal
->
value
)
=
(
int64_t
)(
root
->
valuedouble
);
/* cJSON conversion of legit BIGINT may overflow,
* use original string to do the conversion.
*/
errno
=
0
;
int64_t
val
=
(
int64_t
)
strtoll
(
root
->
numberstring
,
NULL
,
10
);
if
(
errno
==
ERANGE
||
!
IS_VALID_BIGINT
(
val
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%s) cannot fit in type(bigint)"
,
info
->
id
,
root
->
numberstring
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
*
(
int64_t
*
)(
pVal
->
value
)
=
val
;
}
else
if
(
isValidFloat
(
root
->
numberstring
))
{
}
else
if
(
isValidFloat
(
root
->
numberstring
))
{
pVal
->
type
=
TSDB_DATA_TYPE_DOUBLE
;
pVal
->
type
=
TSDB_DATA_TYPE_DOUBLE
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
...
@@ -790,7 +807,7 @@ int32_t parseValueFromJSON(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info)
...
@@ -790,7 +807,7 @@ int32_t parseValueFromJSON(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info)
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
parseMetricValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseMetricValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
//skip timestamp
//skip timestamp
TAOS_SML_KV
*
pVal
=
*
pKVs
+
1
;
TAOS_SML_KV
*
pVal
=
*
pKVs
+
1
;
char
key
[]
=
OTD_METRIC_VALUE_COLUMN_NAME
;
char
key
[]
=
OTD_METRIC_VALUE_COLUMN_NAME
;
...
@@ -813,7 +830,9 @@ int32_t parseMetricValueFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs,
...
@@ -813,7 +830,9 @@ int32_t parseMetricValueFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs,
}
}
int32_t
parseTagsFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
char
**
childTableName
,
SSmlLinesInfo
*
info
)
{
static
int32_t
parseTagsFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
char
**
childTableName
,
SHashObj
*
pHash
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
cJSON
*
tags
=
cJSON_GetObjectItem
(
root
,
"tags"
);
cJSON
*
tags
=
cJSON_GetObjectItem
(
root
,
"tags"
);
...
@@ -825,16 +844,19 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
...
@@ -825,16 +844,19 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
cJSON
*
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
cJSON
*
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
if
(
id
!=
NULL
)
{
if
(
id
!=
NULL
)
{
size_t
idLen
=
strlen
(
id
->
valuestring
);
size_t
idLen
=
strlen
(
id
->
valuestring
);
ret
=
isValidChildTableName
(
id
->
valuestring
,
(
int16_t
)
idLen
);
ret
=
isValidChildTableName
(
id
->
valuestring
,
(
int16_t
)
idLen
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
return
ret
;
}
}
*
childTableName
=
tcalloc
(
idLen
+
1
,
sizeof
(
char
));
*
childTableName
=
tcalloc
(
idLen
+
1
,
sizeof
(
char
));
memcpy
(
*
childTableName
,
id
->
valuestring
,
idLen
);
memcpy
(
*
childTableName
,
id
->
valuestring
,
idLen
);
//remove all ID fields from tags list no case sensitive
strntolower_s
(
*
childTableName
,
*
childTableName
,
(
int32_t
)
idLen
);
while
(
id
!=
NULL
)
{
//check duplicate IDs
cJSON_DeleteItemFromObject
(
tags
,
"ID"
);
cJSON_DeleteItemFromObject
(
tags
,
"ID"
);
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
if
(
id
!=
NULL
)
{
return
TSDB_CODE_TSC_DUP_TAG_NAMES
;
}
}
}
}
...
@@ -853,8 +875,16 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
...
@@ -853,8 +875,16 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
if
(
tag
==
NULL
)
{
if
(
tag
==
NULL
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
return
TSDB_CODE_TSC_INVALID_JSON
;
}
}
//check duplicate keys
if
(
checkDuplicateKey
(
tag
->
string
,
pHash
,
info
))
{
return
TSDB_CODE_TSC_DUP_TAG_NAMES
;
}
//key
//key
size_t
keyLen
=
strlen
(
tag
->
string
);
size_t
keyLen
=
strlen
(
tag
->
string
);
if
(
keyLen
>
TSDB_COL_NAME_LEN
-
1
)
{
tscError
(
"OTD:0x%"
PRIx64
" Tag key cannot exceeds %d characters in JSON"
,
info
->
id
,
TSDB_COL_NAME_LEN
-
1
);
return
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
;
}
pkv
->
key
=
tcalloc
(
keyLen
+
1
,
sizeof
(
char
));
pkv
->
key
=
tcalloc
(
keyLen
+
1
,
sizeof
(
char
));
strncpy
(
pkv
->
key
,
tag
->
string
,
keyLen
);
strncpy
(
pkv
->
key
,
tag
->
string
,
keyLen
);
//value
//value
...
@@ -864,13 +894,14 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
...
@@ -864,13 +894,14 @@ int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **
}
}
*
num_kvs
+=
1
;
*
num_kvs
+=
1
;
pkv
++
;
pkv
++
;
}
}
return
ret
;
return
ret
;
}
}
int32_t
tscParseJSONPayload
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
static
int32_t
tscParseJSONPayload
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
if
(
!
cJSON_IsObject
(
root
))
{
if
(
!
cJSON_IsObject
(
root
))
{
...
@@ -910,17 +941,20 @@ int32_t tscParseJSONPayload(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInf
...
@@ -910,17 +941,20 @@ int32_t tscParseJSONPayload(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInf
tscDebug
(
"OTD:0x%"
PRIx64
" Parse metric value from JSON payload finished"
,
info
->
id
);
tscDebug
(
"OTD:0x%"
PRIx64
" Parse metric value from JSON payload finished"
,
info
->
id
);
//Parse tags
//Parse tags
ret
=
parseTagsFromJSON
(
root
,
&
pSml
->
tags
,
&
pSml
->
tagNum
,
&
pSml
->
childTableName
,
info
);
SHashObj
*
keyHashTable
=
taosHashInit
(
128
,
taosGetDefaultHashFunction
(
TSDB_DATA_TYPE_BINARY
),
true
,
false
);
ret
=
parseTagsFromJSON
(
root
,
&
pSml
->
tags
,
&
pSml
->
tagNum
,
&
pSml
->
childTableName
,
keyHashTable
,
info
);
if
(
ret
)
{
if
(
ret
)
{
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse tags from JSON payload"
,
info
->
id
);
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse tags from JSON payload"
,
info
->
id
);
taosHashCleanup
(
keyHashTable
);
return
ret
;
return
ret
;
}
}
tscDebug
(
"OTD:0x%"
PRIx64
" Parse tags from JSON payload finished"
,
info
->
id
);
tscDebug
(
"OTD:0x%"
PRIx64
" Parse tags from JSON payload finished"
,
info
->
id
);
taosHashCleanup
(
keyHashTable
);
return
TSDB_CODE_SUCCESS
;
return
TSDB_CODE_SUCCESS
;
}
}
int32_t
tscParseMultiJSONPayload
(
char
*
payload
,
SArray
*
points
,
SSmlLinesInfo
*
info
)
{
static
int32_t
tscParseMultiJSONPayload
(
char
*
payload
,
SArray
*
points
,
SSmlLinesInfo
*
info
)
{
int32_t
payloadNum
,
ret
;
int32_t
payloadNum
,
ret
;
ret
=
TSDB_CODE_SUCCESS
;
ret
=
TSDB_CODE_SUCCESS
;
...
...
src/connector/python/README.md
浏览文件 @
25bddc0c
...
@@ -404,13 +404,13 @@ lines = [
...
@@ -404,13 +404,13 @@ lines = [
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns'
,
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns'
,
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
]
]
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
print
(
"inserted"
)
print
(
"inserted"
)
lines
=
[
lines
=
[
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"pass it again_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
]
]
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
result
=
conn
.
query
(
"show tables"
)
result
=
conn
.
query
(
"show tables"
)
for
row
in
result
:
for
row
in
result
:
...
...
src/connector/python/examples/insert-lines.py
浏览文件 @
25bddc0c
...
@@ -9,10 +9,10 @@ conn.select_db(dbname)
...
@@ -9,10 +9,10 @@ conn.select_db(dbname)
lines
=
[
lines
=
[
'st,t1=3i64,t2=4f64,t3="t3" c1=3i64,c3=L"pass",c2=false,c4=4f64 1626006833639000000ns'
,
'st,t1=3i64,t2=4f64,t3="t3" c1=3i64,c3=L"pass",c2=false,c4=4f64 1626006833639000000ns'
,
]
]
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
print
(
"inserted"
)
print
(
"inserted"
)
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
result
=
conn
.
query
(
"show tables"
)
result
=
conn
.
query
(
"show tables"
)
for
row
in
result
:
for
row
in
result
:
...
...
src/connector/python/taos/__init__.py
浏览文件 @
25bddc0c
...
@@ -406,13 +406,13 @@ lines = [
...
@@ -406,13 +406,13 @@ lines = [
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns',
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns',
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns',
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns',
]
]
conn.
insert_lines(lines
)
conn.
schemaless_insert(lines, 0
)
print("inserted")
print("inserted")
lines = [
lines = [
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns',
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns',
]
]
conn.
insert_lines(lines
)
conn.
schemaless_insert(lines, 0
)
result = conn.query("show tables")
result = conn.query("show tables")
for row in result:
for row in result:
...
...
src/connector/python/taos/cinterface.py
浏览文件 @
25bddc0c
...
@@ -809,40 +809,23 @@ def taos_stmt_use_result(stmt):
...
@@ -809,40 +809,23 @@ def taos_stmt_use_result(stmt):
return
result
return
result
try
:
try
:
_libtaos
.
taos_
insert_lines
.
restype
=
c_int
_libtaos
.
taos_
schemaless_insert
.
restype
=
c_int
_libtaos
.
taos_
insert_lines
.
argstype
=
c_void_p
,
c_void_p
,
c_int
_libtaos
.
taos_
schemaless_insert
.
argstype
=
c_void_p
,
c_void_p
,
c_int
except
AttributeError
:
except
AttributeError
:
print
(
"WARNING: libtaos(%s) does not support
insert_lines
"
%
taos_get_client_info
())
print
(
"WARNING: libtaos(%s) does not support
schemaless_insert
"
%
taos_get_client_info
())
def
taos_
insert_lines
(
connection
,
lines
):
def
taos_
schemaless_insert
(
connection
,
lines
,
protocol
):
# type: (c_void_p, list[str] | tuple(str)) -> None
# type: (c_void_p, list[str] | tuple(str)) -> None
num_of_lines
=
len
(
lines
)
num_of_lines
=
len
(
lines
)
lines
=
(
c_char_p
(
line
.
encode
(
"utf-8"
))
for
line
in
lines
)
lines
=
(
c_char_p
(
line
.
encode
(
"utf-8"
))
for
line
in
lines
)
lines_type
=
ctypes
.
c_char_p
*
num_of_lines
lines_type
=
ctypes
.
c_char_p
*
num_of_lines
p_lines
=
lines_type
(
*
lines
)
p_lines
=
lines_type
(
*
lines
)
errno
=
_libtaos
.
taos_
insert_lines
(
connection
,
p_lines
,
num_of_lines
)
errno
=
_libtaos
.
taos_
schemaless_insert
(
connection
,
p_lines
,
num_of_lines
,
protocol
)
if
errno
!=
0
:
if
errno
!=
0
:
raise
LinesError
(
"insert lines error"
,
errno
)
raise
SchemalessError
(
"schemaless insert error"
,
errno
)
def
taos_insert_telnet_lines
(
connection
,
lines
):
# type: (c_void_p, list[str] | tuple(str)) -> None
num_of_lines
=
len
(
lines
)
lines
=
(
c_char_p
(
line
.
encode
(
"utf-8"
))
for
line
in
lines
)
lines_type
=
ctypes
.
c_char_p
*
num_of_lines
p_lines
=
lines_type
(
*
lines
)
errno
=
_libtaos
.
taos_insert_telnet_lines
(
connection
,
p_lines
,
num_of_lines
)
if
errno
!=
0
:
raise
TelnetLinesError
(
"insert telnet lines error"
,
errno
)
def
taos_insert_json_payload
(
connection
,
payload
):
# type: (c_void_p, list[str] | tuple(str)) -> None
payload
=
payload
.
encode
(
"utf-8"
)
errno
=
_libtaos
.
taos_insert_json_payload
(
connection
,
payload
)
if
errno
!=
0
:
raise
JsonPayloadError
(
"insert json payload error"
,
errno
)
class
CTaosInterface
(
object
):
class
CTaosInterface
(
object
):
def
__init__
(
self
,
config
=
None
):
def
__init__
(
self
,
config
=
None
):
...
...
src/connector/python/taos/connection.py
浏览文件 @
25bddc0c
...
@@ -117,9 +117,10 @@ class TaosConnection(object):
...
@@ -117,9 +117,10 @@ class TaosConnection(object):
stream
=
taos_open_stream
(
self
.
_conn
,
sql
,
callback
,
stime
,
param
,
callback2
)
stream
=
taos_open_stream
(
self
.
_conn
,
sql
,
callback
,
stime
,
param
,
callback2
)
return
TaosStream
(
stream
)
return
TaosStream
(
stream
)
def
insert_lines
(
self
,
lines
):
def
schemaless_insert
(
self
,
lines
,
protocol
):
# type: (list[str]) -> None
# type: (list[str]) -> None
"""Line protocol and schemaless support
"""
1.Line protocol and schemaless support
## Example
## Example
...
@@ -131,34 +132,31 @@ class TaosConnection(object):
...
@@ -131,34 +132,31 @@ class TaosConnection(object):
lines = [
lines = [
'ste,t2=5,t3=L"ste" c1=true,c2=4,c3="string" 1626056811855516532',
'ste,t2=5,t3=L"ste" c1=true,c2=4,c3="string" 1626056811855516532',
]
]
conn.insert_lines(lines)
conn.schemaless_insert(lines, 0)
```
## Exception
```python
try:
conn.insert_lines(lines)
except SchemalessError as err:
print(err)
```
```
"""
return
taos_insert_lines
(
self
.
_conn
,
lines
)
def
insert_telnet_lines
(
self
,
lines
):
2.OpenTSDB telnet style API format support
"""OpenTSDB telnet style API format support
## Example
## Example
cpu_load 1626056811855516532ns 2.0f32 id="tb1",host="host0",interface="eth0"
import taos
conn = taos.connect()
conn.exec("drop database if exists test")
conn.select_db("test")
lines = [
'cpu_load 1626056811855516532ns 2.0f32 id="tb1",host="host0",interface="eth0"',
]
conn.schemaless_insert(lines, 1)
"""
return
taos_insert_telnet_lines
(
self
.
_conn
,
lines
)
def
insert_json_payload
(
self
,
payload
):
3.OpenTSDB HTTP JSON format support
"""OpenTSDB HTTP JSON format support
## Example
## Example
"{
import taos
conn = taos.connect()
conn.exec("drop database if exists test")
conn.select_db("test")
payload = ['''
{
"metric": "cpu_load_0",
"metric": "cpu_load_0",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
"value": 55.5,
"value": 55.5,
...
@@ -168,10 +166,13 @@ class TaosConnection(object):
...
@@ -168,10 +166,13 @@ class TaosConnection(object):
"interface": "eth0",
"interface": "eth0",
"Id": "tb0"
"Id": "tb0"
}
}
}"
}
''']
conn.schemaless_insert(lines, 2)
"""
"""
return
taos_insert_json_payload
(
self
.
_conn
,
payload
)
return
taos_schemaless_insert
(
self
.
_conn
,
lines
,
protocol
)
def
cursor
(
self
):
def
cursor
(
self
):
# type: () -> TaosCursor
# type: () -> TaosCursor
...
...
src/connector/python/taos/error.py
浏览文件 @
25bddc0c
...
@@ -80,17 +80,7 @@ class ResultError(DatabaseError):
...
@@ -80,17 +80,7 @@ class ResultError(DatabaseError):
pass
pass
class
LinesError
(
DatabaseError
):
class
SchemalessError
(
DatabaseError
):
"""taos_insert_lines errors."""
"""taos_schemaless_insert errors."""
pass
class
TelnetLinesError
(
DatabaseError
):
"""taos_insert_telnet_lines errors."""
pass
class
JsonPayloadError
(
DatabaseError
):
"""taos_insert_json_payload errors."""
pass
pass
src/connector/python/tests/test_lines.py
浏览文件 @
25bddc0c
...
@@ -13,10 +13,10 @@ def conn():
...
@@ -13,10 +13,10 @@ def conn():
return
connect
()
return
connect
()
def
test_
insert_lines
(
conn
):
def
test_
schemaless_insert
(
conn
):
# type: (TaosConnection) -> None
# type: (TaosConnection) -> None
dbname
=
"pytest_taos_
insert_lines
"
dbname
=
"pytest_taos_
schemaless_insert
"
try
:
try
:
conn
.
execute
(
"drop database if exists %s"
%
dbname
)
conn
.
execute
(
"drop database if exists %s"
%
dbname
)
conn
.
execute
(
"create database if not exists %s precision 'us'"
%
dbname
)
conn
.
execute
(
"create database if not exists %s precision 'us'"
%
dbname
)
...
@@ -27,13 +27,13 @@ def test_insert_lines(conn):
...
@@ -27,13 +27,13 @@ def test_insert_lines(conn):
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns'
,
'st,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin",c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns'
,
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
'stf,t1=4i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
]
]
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
print
(
"inserted"
)
print
(
"inserted"
)
lines
=
[
lines
=
[
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
'stf,t1=5i64,t3="t4",t2=5f64,t4=5f64 c1=3i64,c3=L"passitagin_stf",c2=false,c5=5f64,c6=7u64 1626006933641000000ns'
,
]
]
conn
.
insert_lines
(
lines
)
conn
.
schemaless_insert
(
lines
,
0
)
print
(
"inserted"
)
print
(
"inserted"
)
result
=
conn
.
query
(
"select * from st"
)
result
=
conn
.
query
(
"select * from st"
)
print
(
*
result
.
fields
)
print
(
*
result
.
fields
)
...
@@ -54,4 +54,4 @@ def test_insert_lines(conn):
...
@@ -54,4 +54,4 @@ def test_insert_lines(conn):
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
test_
insert_lines
(
connect
())
test_
schemaless_insert
(
connect
())
src/inc/taos.h
浏览文件 @
25bddc0c
...
@@ -187,11 +187,7 @@ DLL_EXPORT void taos_close_stream(TAOS_STREAM *tstr);
...
@@ -187,11 +187,7 @@ DLL_EXPORT void taos_close_stream(TAOS_STREAM *tstr);
DLL_EXPORT
int
taos_load_table_info
(
TAOS
*
taos
,
const
char
*
tableNameList
);
DLL_EXPORT
int
taos_load_table_info
(
TAOS
*
taos
,
const
char
*
tableNameList
);
DLL_EXPORT
int
taos_insert_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
);
DLL_EXPORT
int
taos_schemaless_insert
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
,
int
protocol
);
DLL_EXPORT
int
taos_insert_telnet_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
);
DLL_EXPORT
int
taos_insert_json_payload
(
TAOS
*
taos
,
char
*
payload
);
#ifdef __cplusplus
#ifdef __cplusplus
}
}
...
...
src/inc/taoserror.h
浏览文件 @
25bddc0c
...
@@ -112,6 +112,7 @@ int32_t* taosGetErrno();
...
@@ -112,6 +112,7 @@ int32_t* taosGetErrno();
#define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type")
#define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type")
#define TSDB_CODE_TSC_INVALID_JSON_CONFIG TAOS_DEF_ERROR_CODE(0, 0x0223) //"Invalid JSON configuration")
#define TSDB_CODE_TSC_INVALID_JSON_CONFIG TAOS_DEF_ERROR_CODE(0, 0x0223) //"Invalid JSON configuration")
#define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0224) //"Value out of range")
#define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0224) //"Value out of range")
#define TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE TAOS_DEF_ERROR_CODE(0, 0x0225) //"Invalid line protocol type")
// mnode
// mnode
#define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed")
#define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed")
...
...
src/kit/taosdump/taosdump.c
浏览文件 @
25bddc0c
...
@@ -1020,25 +1020,25 @@ static void dumpCreateMTableClause(
...
@@ -1020,25 +1020,25 @@ static void dumpCreateMTableClause(
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"nchar"
)
==
0
)
{
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"nchar"
)
==
0
)
{
//pstr += sprintf(pstr, ", \'%s\'", tableDes->cols[counter].note);
//pstr += sprintf(pstr, ", \'%s\'", tableDes->cols[counter].note);
if
(
tableDes
->
cols
[
counter
].
var_value
)
{
if
(
tableDes
->
cols
[
counter
].
var_value
)
{
pstr
+=
sprintf
(
pstr
,
",
%s
"
,
pstr
+=
sprintf
(
pstr
,
",
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
var_value
);
tableDes
->
cols
[
counter
].
var_value
);
}
else
{
}
else
{
pstr
+=
sprintf
(
pstr
,
",
%s
"
,
tableDes
->
cols
[
counter
].
value
);
pstr
+=
sprintf
(
pstr
,
",
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
value
);
}
}
}
else
{
}
else
{
pstr
+=
sprintf
(
pstr
,
",
%s
"
,
tableDes
->
cols
[
counter
].
value
);
pstr
+=
sprintf
(
pstr
,
",
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
value
);
}
}
}
else
{
}
else
{
if
(
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"binary"
)
==
0
||
if
(
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"binary"
)
==
0
||
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"nchar"
)
==
0
)
{
strcasecmp
(
tableDes
->
cols
[
counter
].
type
,
"nchar"
)
==
0
)
{
//pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].note);
//pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].note);
if
(
tableDes
->
cols
[
counter
].
var_value
)
{
if
(
tableDes
->
cols
[
counter
].
var_value
)
{
pstr
+=
sprintf
(
pstr
,
"
%s
"
,
tableDes
->
cols
[
counter
].
var_value
);
pstr
+=
sprintf
(
pstr
,
"
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
var_value
);
}
else
{
}
else
{
pstr
+=
sprintf
(
pstr
,
"
%s
"
,
tableDes
->
cols
[
counter
].
value
);
pstr
+=
sprintf
(
pstr
,
"
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
value
);
}
}
}
else
{
}
else
{
pstr
+=
sprintf
(
pstr
,
"
%s
"
,
tableDes
->
cols
[
counter
].
value
);
pstr
+=
sprintf
(
pstr
,
"
\'
%s
\'
"
,
tableDes
->
cols
[
counter
].
value
);
}
}
/* pstr += sprintf(pstr, "%s", tableDes->cols[counter].note); */
/* pstr += sprintf(pstr, "%s", tableDes->cols[counter].note); */
}
}
...
@@ -1149,6 +1149,10 @@ static int64_t dumpNormalTable(
...
@@ -1149,6 +1149,10 @@ static int64_t dumpNormalTable(
colCount
=
getTableDes
(
dbName
,
tbName
,
tableDes
,
false
);
colCount
=
getTableDes
(
dbName
,
tbName
,
tableDes
,
false
);
if
(
colCount
<
0
)
{
if
(
colCount
<
0
)
{
errorPrint
(
"%s() LN%d, failed to get table[%s] schema
\n
"
,
__func__
,
__LINE__
,
tbName
);
free
(
tableDes
);
free
(
tableDes
);
return
-
1
;
return
-
1
;
}
}
...
@@ -1160,6 +1164,10 @@ static int64_t dumpNormalTable(
...
@@ -1160,6 +1164,10 @@ static int64_t dumpNormalTable(
colCount
=
getTableDes
(
dbName
,
tbName
,
tableDes
,
false
);
colCount
=
getTableDes
(
dbName
,
tbName
,
tableDes
,
false
);
if
(
colCount
<
0
)
{
if
(
colCount
<
0
)
{
errorPrint
(
"%s() LN%d, failed to get table[%s] schema
\n
"
,
__func__
,
__LINE__
,
tbName
);
free
(
tableDes
);
free
(
tableDes
);
return
-
1
;
return
-
1
;
}
}
...
@@ -1172,20 +1180,21 @@ static int64_t dumpNormalTable(
...
@@ -1172,20 +1180,21 @@ static int64_t dumpNormalTable(
if
(
g_args
.
avro
)
{
if
(
g_args
.
avro
)
{
if
(
0
!=
convertTbDesToAvroSchema
(
if
(
0
!=
convertTbDesToAvroSchema
(
dbName
,
tbName
,
tableDes
,
colCount
,
&
jsonAvroSchema
))
{
dbName
,
tbName
,
tableDes
,
colCount
,
&
jsonAvroSchema
))
{
errorPrint
(
"%s() LN%d, convertTbDesToAvroSchema failed
\n
"
,
__func__
,
__LINE__
);
freeTbDes
(
tableDes
);
freeTbDes
(
tableDes
);
return
-
1
;
return
-
1
;
}
}
}
}
tfree
(
tableDes
);
int64_t
ret
=
0
;
int64_t
ret
=
0
;
if
(
!
g_args
.
schemaonly
)
{
if
(
!
g_args
.
schemaonly
)
{
ret
=
dumpTableData
(
fp
,
tbName
,
dbName
,
precision
,
ret
=
dumpTableData
(
fp
,
tbName
,
dbName
,
precision
,
jsonAvroSchema
);
jsonAvroSchema
);
}
}
tfree
(
jsonAvroSchema
);
freeTbDes
(
tableDes
);
return
ret
;
return
ret
;
}
}
...
@@ -1282,20 +1291,23 @@ static void *dumpNtbOfDb(void *arg) {
...
@@ -1282,20 +1291,23 @@ static void *dumpNtbOfDb(void *arg) {
return
NULL
;
return
NULL
;
}
}
int64_t
count
;
for
(
int64_t
i
=
0
;
i
<
pThreadInfo
->
tablesOfDumpOut
;
i
++
)
{
for
(
int64_t
i
=
0
;
i
<
pThreadInfo
->
tablesOfDumpOut
;
i
++
)
{
debugPrint
(
"[%d] No.
\t
%"
PRId64
" table name: %s
\n
"
,
debugPrint
(
"[%d] No.
\t
%"
PRId64
" table name: %s
\n
"
,
pThreadInfo
->
threadIndex
,
i
,
pThreadInfo
->
threadIndex
,
i
,
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
name
);
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
name
);
dumpNormalTable
(
count
=
dumpNormalTable
(
pThreadInfo
->
dbName
,
pThreadInfo
->
dbName
,
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
stable
,
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
stable
,
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
name
,
((
TableInfo
*
)(
g_tablesList
+
pThreadInfo
->
tableFrom
+
i
))
->
name
,
pThreadInfo
->
precision
,
pThreadInfo
->
precision
,
fp
);
fp
);
if
(
count
<
0
)
{
break
;
}
}
}
fclose
(
fp
);
fclose
(
fp
);
return
NULL
;
return
NULL
;
}
}
...
@@ -1341,16 +1353,20 @@ static void *dumpNormalTablesOfStb(void *arg) {
...
@@ -1341,16 +1353,20 @@ static void *dumpNormalTablesOfStb(void *arg) {
TAOS_ROW
row
=
NULL
;
TAOS_ROW
row
=
NULL
;
int64_t
i
=
0
;
int64_t
i
=
0
;
int64_t
count
;
while
((
row
=
taos_fetch_row
(
res
))
!=
NULL
)
{
while
((
row
=
taos_fetch_row
(
res
))
!=
NULL
)
{
debugPrint
(
"[%d] sub table %"
PRId64
": name: %s
\n
"
,
debugPrint
(
"[%d] sub table %"
PRId64
": name: %s
\n
"
,
pThreadInfo
->
threadIndex
,
i
++
,
(
char
*
)
row
[
TSDB_SHOW_TABLES_NAME_INDEX
]);
pThreadInfo
->
threadIndex
,
i
++
,
(
char
*
)
row
[
TSDB_SHOW_TABLES_NAME_INDEX
]);
dumpNormalTable
(
count
=
dumpNormalTable
(
pThreadInfo
->
dbName
,
pThreadInfo
->
dbName
,
pThreadInfo
->
stbName
,
pThreadInfo
->
stbName
,
(
char
*
)
row
[
TSDB_SHOW_TABLES_NAME_INDEX
],
(
char
*
)
row
[
TSDB_SHOW_TABLES_NAME_INDEX
],
pThreadInfo
->
precision
,
pThreadInfo
->
precision
,
fp
);
fp
);
if
(
count
<
0
)
{
break
;
}
}
}
fclose
(
fp
);
fclose
(
fp
);
...
@@ -2007,9 +2023,9 @@ static int getTableDes(
...
@@ -2007,9 +2023,9 @@ static int getTableDes(
if
(
row
[
TSDB_SHOW_TABLES_NAME_INDEX
]
==
NULL
)
{
if
(
row
[
TSDB_SHOW_TABLES_NAME_INDEX
]
==
NULL
)
{
sprintf
(
tableDes
->
cols
[
i
].
note
,
"%s"
,
"NUL"
);
sprintf
(
tableDes
->
cols
[
i
].
note
,
"%s"
,
"NUL"
);
sprintf
(
tableDes
->
cols
[
i
].
value
,
"%s"
,
"NULL"
);
taos_free_result
(
res
);
taos_free_result
(
res
);
res
=
NULL
;
res
=
NULL
;
taos_close
(
taos
);
continue
;
continue
;
}
}
...
@@ -2051,26 +2067,22 @@ static int getTableDes(
...
@@ -2051,26 +2067,22 @@ static int getTableDes(
int
len
=
strlen
((
char
*
)
row
[
0
]);
int
len
=
strlen
((
char
*
)
row
[
0
]);
// FIXME for long value
// FIXME for long value
if
(
len
<
(
COL_VALUEBUF_LEN
-
2
))
{
if
(
len
<
(
COL_VALUEBUF_LEN
-
2
))
{
tableDes
->
cols
[
i
].
value
[
0
]
=
'\''
;
converStringToReadable
(
converStringToReadable
(
(
char
*
)
row
[
0
],
(
char
*
)
row
[
0
],
length
[
0
],
length
[
0
],
tableDes
->
cols
[
i
].
value
+
1
,
tableDes
->
cols
[
i
].
value
,
len
);
len
);
tableDes
->
cols
[
i
].
value
[
len
+
1
]
=
'\''
;
}
else
{
}
else
{
tableDes
->
cols
[
i
].
var_value
=
calloc
(
1
,
len
+
2
);
tableDes
->
cols
[
i
].
var_value
=
calloc
(
1
,
len
*
2
);
if
(
tableDes
->
cols
[
i
].
var_value
==
NULL
)
{
if
(
tableDes
->
cols
[
i
].
var_value
==
NULL
)
{
errorPrint
(
"%s() LN%d, memory alalocation failed!
\n
"
,
errorPrint
(
"%s() LN%d, memory alalocation failed!
\n
"
,
__func__
,
__LINE__
);
__func__
,
__LINE__
);
taos_free_result
(
res
);
taos_free_result
(
res
);
return
-
1
;
return
-
1
;
}
}
tableDes
->
cols
[
i
].
var_value
[
0
]
=
'\''
;
converStringToReadable
((
char
*
)
row
[
0
],
converStringToReadable
((
char
*
)
row
[
0
],
length
[
0
],
length
[
0
],
(
char
*
)(
tableDes
->
cols
[
i
].
var_value
+
1
),
len
);
(
char
*
)(
tableDes
->
cols
[
i
].
var_value
),
len
);
tableDes
->
cols
[
i
].
var_value
[
len
+
1
]
=
'\''
;
}
}
break
;
break
;
...
...
src/os/src/detail/osTime.c
浏览文件 @
25bddc0c
...
@@ -411,8 +411,10 @@ int64_t convertTimePrecision(int64_t time, int32_t fromPrecision, int32_t toPrec
...
@@ -411,8 +411,10 @@ int64_t convertTimePrecision(int64_t time, int32_t fromPrecision, int32_t toPrec
return
time
;
return
time
;
}
}
}
//end from nano
}
//end from nano
default:
default:
{
assert
(
0
);
assert
(
0
);
return
time
;
// only to pass windows compilation
}
}
//end switch fromPrecision
}
//end switch fromPrecision
}
}
...
...
src/util/src/terror.c
浏览文件 @
25bddc0c
...
@@ -120,6 +120,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON, "Invalid JSON format")
...
@@ -120,6 +120,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON, "Invalid JSON format")
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON_TYPE
,
"Invalid JSON data type"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON_TYPE
,
"Invalid JSON data type"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON_CONFIG
,
"Invalid JSON configuration"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON_CONFIG
,
"Invalid JSON configuration"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
,
"Value out of range"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
,
"Value out of range"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE
,
"Invalid line protocol type"
)
// mnode
// mnode
TAOS_DEFINE_ERROR
(
TSDB_CODE_MND_MSG_NOT_PROCESSED
,
"Message not processed"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_MND_MSG_NOT_PROCESSED
,
"Message not processed"
)
...
...
tests/examples/c/apitest.c
浏览文件 @
25bddc0c
...
@@ -980,40 +980,40 @@ int32_t verify_schema_less(TAOS* taos) {
...
@@ -980,40 +980,40 @@ int32_t verify_schema_less(TAOS* taos) {
"stf,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
"stf,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
};
};
code
=
taos_
insert_lines
(
taos
,
lines
,
sizeof
(
lines
)
/
sizeof
(
char
*
)
);
code
=
taos_
schemaless_insert
(
taos
,
lines
,
sizeof
(
lines
)
/
sizeof
(
char
*
),
0
);
char
*
lines2
[]
=
{
char
*
lines2
[]
=
{
"stg,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"stg,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"stg,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64 1626006833640000000ns"
"stg,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64 1626006833640000000ns"
};
};
code
=
taos_
insert_lines
(
taos
,
&
lines2
[
0
],
1
);
code
=
taos_
schemaless_insert
(
taos
,
&
lines2
[
0
],
1
,
0
);
code
=
taos_
insert_lines
(
taos
,
&
lines2
[
1
],
1
);
code
=
taos_
schemaless_insert
(
taos
,
&
lines2
[
1
],
1
,
0
);
char
*
lines3
[]
=
{
char
*
lines3
[]
=
{
"sth,t1=4i64,t2=5f64,t4=5f64,ID=
\"
childtable
\"
c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641ms"
,
"sth,t1=4i64,t2=5f64,t4=5f64,ID=
\"
childtable
\"
c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641ms"
,
"sth,t1=4i64,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933654ms"
"sth,t1=4i64,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933654ms"
};
};
code
=
taos_
insert_lines
(
taos
,
lines3
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines3
,
2
,
0
);
char
*
lines4
[]
=
{
char
*
lines4
[]
=
{
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"dgtyqodr,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
"dgtyqodr,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
};
};
code
=
taos_
insert_lines
(
taos
,
lines4
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines4
,
2
,
0
);
char
*
lines5
[]
=
{
char
*
lines5
[]
=
{
"zqlbgs,id=
\"
zqlbgs_39302_21680
\"
,t0=f,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
binaryTagValue
\"
,t8=L
\"
ncharTagValue
\"
c0=f,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
binaryColValue
\"
,c8=L
\"
ncharColValue
\"
,c9=7u64 1626006833639000000ns"
,
"zqlbgs,id=
\"
zqlbgs_39302_21680
\"
,t0=f,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
binaryTagValue
\"
,t8=L
\"
ncharTagValue
\"
c0=f,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
binaryColValue
\"
,c8=L
\"
ncharColValue
\"
,c9=7u64 1626006833639000000ns"
,
"zqlbgs,t9=f,id=
\"
zqlbgs_39302_21680
\"
,t0=f,t1=127i8,t11=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
binaryTagValue
\"
,t8=L
\"
ncharTagValue
\"
,t10=L
\"
ncharTagValue
\"
c10=f,c0=f,c1=127i8,c12=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
binaryColValue
\"
,c8=L
\"
ncharColValue
\"
,c9=7u64,c11=L
\"
ncharColValue
\"
1626006833639000000ns"
"zqlbgs,t9=f,id=
\"
zqlbgs_39302_21680
\"
,t0=f,t1=127i8,t11=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
binaryTagValue
\"
,t8=L
\"
ncharTagValue
\"
,t10=L
\"
ncharTagValue
\"
c10=f,c0=f,c1=127i8,c12=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
binaryColValue
\"
,c8=L
\"
ncharColValue
\"
,c9=7u64,c11=L
\"
ncharColValue
\"
1626006833639000000ns"
};
};
code
=
taos_
insert_lines
(
taos
,
&
lines5
[
0
],
1
);
code
=
taos_
schemaless_insert
(
taos
,
&
lines5
[
0
],
1
,
0
);
code
=
taos_
insert_lines
(
taos
,
&
lines5
[
1
],
1
);
code
=
taos_
schemaless_insert
(
taos
,
&
lines5
[
1
],
1
,
0
);
char
*
lines6
[]
=
{
char
*
lines6
[]
=
{
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"dgtyqodr,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
"dgtyqodr,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
};
};
code
=
taos_
insert_lines
(
taos
,
lines6
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines6
,
2
,
0
);
return
(
code
);
return
(
code
);
}
}
...
...
tests/examples/c/schemaless.c
浏览文件 @
25bddc0c
...
@@ -77,9 +77,9 @@ int main(int argc, char* argv[]) {
...
@@ -77,9 +77,9 @@ int main(int argc, char* argv[]) {
}
}
//shuffle(lines, numSuperTables * numChildTables * numRowsPerChildTable);
//shuffle(lines, numSuperTables * numChildTables * numRowsPerChildTable);
printf
(
"%s
\n
"
,
"begin taos_
insert_lines
"
);
printf
(
"%s
\n
"
,
"begin taos_
schemaless_insert
"
);
int64_t
begin
=
getTimeInUs
();
int64_t
begin
=
getTimeInUs
();
int32_t
code
=
taos_
insert_lines
(
taos
,
lines
,
numSuperTables
*
numChildTables
*
numRowsPerChildTable
);
int32_t
code
=
taos_
schemaless_insert
(
taos
,
lines
,
numSuperTables
*
numChildTables
*
numRowsPerChildTable
,
0
);
int64_t
end
=
getTimeInUs
();
int64_t
end
=
getTimeInUs
();
printf
(
"code: %d, %s. time used: %"
PRId64
"
\n
"
,
code
,
tstrerror
(
code
),
end
-
begin
);
printf
(
"code: %d, %s. time used: %"
PRId64
"
\n
"
,
code
,
tstrerror
(
code
),
end
-
begin
);
...
...
tests/pytest/crash_gen/valgrind_taos.supp
浏览文件 @
25bddc0c
...
@@ -18231,3 +18231,20 @@
...
@@ -18231,3 +18231,20 @@
obj:/usr/bin/python3.8
obj:/usr/bin/python3.8
obj:/usr/bin/python3.8
obj:/usr/bin/python3.8
}
}
{
<insert_a_suppression_name_here>
Memcheck:Leak
match-leak-kinds: definite
fun:malloc
obj:/usr/bin/python3.8
fun:_PyObject_MakeTpCall
fun:_PyEval_EvalFrameDefault
obj:/usr/bin/python3.8
fun:_PyObject_MakeTpCall
fun:_PyEval_EvalFrameDefault
obj:/usr/bin/python3.8
fun:_PyEval_EvalFrameDefault
obj:/usr/bin/python3.8
fun:_PyEval_EvalFrameDefault
fun:_PyEval_EvalCodeWithName
}
\ No newline at end of file
tests/pytest/fulltest.sh
浏览文件 @
25bddc0c
...
@@ -48,7 +48,7 @@ python3 ./test.py -f table/del_stable.py
...
@@ -48,7 +48,7 @@ python3 ./test.py -f table/del_stable.py
#stable
#stable
python3 ./test.py
-f
stable/insert.py
python3 ./test.py
-f
stable/insert.py
#
python3 test.py -f tools/taosdemoAllTest/taosdemoTestInsertWithJsonStmt.py
python3 test.py
-f
tools/taosdemoAllTest/taosdemoTestInsertWithJsonStmt.py
# tag
# tag
python3 ./test.py
-f
tag_lite/filter.py
python3 ./test.py
-f
tag_lite/filter.py
...
@@ -217,8 +217,9 @@ python3 ./test.py -f perfbenchmark/bug3433.py
...
@@ -217,8 +217,9 @@ python3 ./test.py -f perfbenchmark/bug3433.py
python3 ./test.py
-f
perfbenchmark/taosdemoInsert.py
python3 ./test.py
-f
perfbenchmark/taosdemoInsert.py
#taosdemo
#taosdemo
#
python3 test.py -f tools/taosdemoAllTest/taosdemoTestInsertWithJson.py
python3 test.py
-f
tools/taosdemoAllTest/taosdemoTestInsertWithJson.py
python3 test.py
-f
tools/taosdemoAllTest/taosdemoTestQueryWithJson.py
python3 test.py
-f
tools/taosdemoAllTest/taosdemoTestQueryWithJson.py
python3 test.py
-f
tools/taosdemoAllTest/taosdemoTestInsertAllType.py
#query
#query
python3 test.py
-f
query/distinctOneColTb.py
python3 test.py
-f
query/distinctOneColTb.py
...
...
tests/pytest/insert/insertJSONPayload.py
浏览文件 @
25bddc0c
...
@@ -33,7 +33,7 @@ class TDTestCase:
...
@@ -33,7 +33,7 @@ class TDTestCase:
### Default format ###
### Default format ###
### metric ###
### metric ###
print
(
"============= step0 : test metric ================"
)
print
(
"============= step0 : test metric ================"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": ".stb.0.",
"metric": ".stb.0.",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -45,16 +45,16 @@ class TDTestCase:
...
@@ -45,16 +45,16 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe _stb_0_"
)
tdSql
.
query
(
"describe _stb_0_"
)
tdSql
.
checkRows
(
6
)
tdSql
.
checkRows
(
6
)
### metric value ###
### metric value ###
print
(
"============= step1 : test metric value types ================"
)
print
(
"============= step1 : test metric value types ================"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_0",
"metric": "stb0_0",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -66,14 +66,14 @@ class TDTestCase:
...
@@ -66,14 +66,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_0"
)
tdSql
.
query
(
"describe stb0_0"
)
tdSql
.
checkData
(
1
,
1
,
"BIGINT"
)
tdSql
.
checkData
(
1
,
1
,
"BIGINT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_1",
"metric": "stb0_1",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -85,14 +85,14 @@ class TDTestCase:
...
@@ -85,14 +85,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_1"
)
tdSql
.
query
(
"describe stb0_1"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_2",
"metric": "stb0_2",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -104,14 +104,14 @@ class TDTestCase:
...
@@ -104,14 +104,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_2"
)
tdSql
.
query
(
"describe stb0_2"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_3",
"metric": "stb0_3",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -123,14 +123,14 @@ class TDTestCase:
...
@@ -123,14 +123,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_3"
)
tdSql
.
query
(
"describe stb0_3"
)
tdSql
.
checkData
(
1
,
1
,
"BINARY"
)
tdSql
.
checkData
(
1
,
1
,
"BINARY"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_4",
"metric": "stb0_4",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -142,14 +142,14 @@ class TDTestCase:
...
@@ -142,14 +142,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_4"
)
tdSql
.
query
(
"describe stb0_4"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_5",
"metric": "stb0_5",
"timestamp": 1626006833610123,
"timestamp": 1626006833610123,
...
@@ -161,9 +161,9 @@ class TDTestCase:
...
@@ -161,9 +161,9 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_5"
)
tdSql
.
query
(
"describe stb0_5"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
...
@@ -171,7 +171,7 @@ class TDTestCase:
...
@@ -171,7 +171,7 @@ class TDTestCase:
print
(
"============= step2 : test timestamp ================"
)
print
(
"============= step2 : test timestamp ================"
)
### timestamp 0 ###
### timestamp 0 ###
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_6",
"metric": "stb0_6",
"timestamp": 0,
"timestamp": 0,
...
@@ -183,37 +183,14 @@ class TDTestCase:
...
@@ -183,37 +183,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
print
(
"============= step3 : test tags ================"
)
print
(
"============= step3 : test tags ================"
)
### ID ###
payload
=
'''
{
"metric": "stb0_7",
"timestamp": 0,
"value": 123,
"tags": {
"ID": "tb0_7",
"t1": true,
"iD": "tb000",
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"id": "tb555"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select tbname from stb0_7"
)
tdSql
.
checkData
(
0
,
0
,
"tb0_7"
)
### Default tag numeric types ###
### Default tag numeric types ###
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_8",
"metric": "stb0_8",
"timestamp": 0,
"timestamp": 0,
...
@@ -222,14 +199,14 @@ class TDTestCase:
...
@@ -222,14 +199,14 @@ class TDTestCase:
"t1": 123
"t1": 123
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_8"
)
tdSql
.
query
(
"describe stb0_8"
)
tdSql
.
checkData
(
2
,
1
,
"BIGINT"
)
tdSql
.
checkData
(
2
,
1
,
"BIGINT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_9",
"metric": "stb0_9",
"timestamp": 0,
"timestamp": 0,
...
@@ -238,14 +215,14 @@ class TDTestCase:
...
@@ -238,14 +215,14 @@ class TDTestCase:
"t1": 123.00
"t1": 123.00
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_9"
)
tdSql
.
query
(
"describe stb0_9"
)
tdSql
.
checkData
(
2
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
2
,
1
,
"DOUBLE"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb0_10",
"metric": "stb0_10",
"timestamp": 0,
"timestamp": 0,
...
@@ -254,9 +231,9 @@ class TDTestCase:
...
@@ -254,9 +231,9 @@ class TDTestCase:
"t1": 123E-1
"t1": 123E-1
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_10"
)
tdSql
.
query
(
"describe stb0_10"
)
tdSql
.
checkData
(
2
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
2
,
1
,
"DOUBLE"
)
...
@@ -265,7 +242,7 @@ class TDTestCase:
...
@@ -265,7 +242,7 @@ class TDTestCase:
print
(
"============= step4 : test nested format ================"
)
print
(
"============= step4 : test nested format ================"
)
### timestamp ###
### timestamp ###
#seconds
#seconds
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb1_0",
"metric": "stb1_0",
"timestamp": {
"timestamp": {
...
@@ -280,15 +257,15 @@ class TDTestCase:
...
@@ -280,15 +257,15 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_0"
)
tdSql
.
query
(
"select ts from stb1_0"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.000000"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.000000"
)
#milliseconds
#milliseconds
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb1_1",
"metric": "stb1_1",
"timestamp": {
"timestamp": {
...
@@ -303,15 +280,15 @@ class TDTestCase:
...
@@ -303,15 +280,15 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_1"
)
tdSql
.
query
(
"select ts from stb1_1"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610000"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610000"
)
#microseconds
#microseconds
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb1_2",
"metric": "stb1_2",
"timestamp": {
"timestamp": {
...
@@ -326,19 +303,19 @@ class TDTestCase:
...
@@ -326,19 +303,19 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_2"
)
tdSql
.
query
(
"select ts from stb1_2"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
#nanoseconds
#nanoseconds
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb1_3",
"metric": "stb1_3",
"timestamp": {
"timestamp": {
"value": 1
.6260068336101233e+18
,
"value": 1
626006833610123321
,
"type": "ns"
"type": "ns"
},
},
"value": 10,
"value": 10,
...
@@ -349,16 +326,16 @@ class TDTestCase:
...
@@ -349,16 +326,16 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_3"
)
tdSql
.
query
(
"select ts from stb1_3"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
#now
#now
tdSql
.
execute
(
'use test'
)
tdSql
.
execute
(
'use test'
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb1_4",
"metric": "stb1_4",
"timestamp": {
"timestamp": {
...
@@ -373,12 +350,12 @@ class TDTestCase:
...
@@ -373,12 +350,12 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
### metric value ###
### metric value ###
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_0",
"metric": "stb2_0",
"timestamp": {
"timestamp": {
...
@@ -396,14 +373,14 @@ class TDTestCase:
...
@@ -396,14 +373,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_0"
)
tdSql
.
query
(
"describe stb2_0"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_1",
"metric": "stb2_1",
"timestamp": {
"timestamp": {
...
@@ -421,14 +398,14 @@ class TDTestCase:
...
@@ -421,14 +398,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_1"
)
tdSql
.
query
(
"describe stb2_1"
)
tdSql
.
checkData
(
1
,
1
,
"TINYINT"
)
tdSql
.
checkData
(
1
,
1
,
"TINYINT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_2",
"metric": "stb2_2",
"timestamp": {
"timestamp": {
...
@@ -446,14 +423,14 @@ class TDTestCase:
...
@@ -446,14 +423,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_2"
)
tdSql
.
query
(
"describe stb2_2"
)
tdSql
.
checkData
(
1
,
1
,
"SMALLINT"
)
tdSql
.
checkData
(
1
,
1
,
"SMALLINT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_3",
"metric": "stb2_3",
"timestamp": {
"timestamp": {
...
@@ -471,14 +448,14 @@ class TDTestCase:
...
@@ -471,14 +448,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_3"
)
tdSql
.
query
(
"describe stb2_3"
)
tdSql
.
checkData
(
1
,
1
,
"INT"
)
tdSql
.
checkData
(
1
,
1
,
"INT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_4",
"metric": "stb2_4",
"timestamp": {
"timestamp": {
...
@@ -496,14 +473,14 @@ class TDTestCase:
...
@@ -496,14 +473,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_4"
)
tdSql
.
query
(
"describe stb2_4"
)
tdSql
.
checkData
(
1
,
1
,
"BIGINT"
)
tdSql
.
checkData
(
1
,
1
,
"BIGINT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_5",
"metric": "stb2_5",
"timestamp": {
"timestamp": {
...
@@ -521,14 +498,14 @@ class TDTestCase:
...
@@ -521,14 +498,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_5"
)
tdSql
.
query
(
"describe stb2_5"
)
tdSql
.
checkData
(
1
,
1
,
"FLOAT"
)
tdSql
.
checkData
(
1
,
1
,
"FLOAT"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_6",
"metric": "stb2_6",
"timestamp": {
"timestamp": {
...
@@ -546,14 +523,14 @@ class TDTestCase:
...
@@ -546,14 +523,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_6"
)
tdSql
.
query
(
"describe stb2_6"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_7",
"metric": "stb2_7",
"timestamp": {
"timestamp": {
...
@@ -571,14 +548,14 @@ class TDTestCase:
...
@@ -571,14 +548,14 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_7"
)
tdSql
.
query
(
"describe stb2_7"
)
tdSql
.
checkData
(
1
,
1
,
"BINARY"
)
tdSql
.
checkData
(
1
,
1
,
"BINARY"
)
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb2_8",
"metric": "stb2_8",
"timestamp": {
"timestamp": {
...
@@ -596,16 +573,16 @@ class TDTestCase:
...
@@ -596,16 +573,16 @@ class TDTestCase:
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_8"
)
tdSql
.
query
(
"describe stb2_8"
)
tdSql
.
checkData
(
1
,
1
,
"NCHAR"
)
tdSql
.
checkData
(
1
,
1
,
"NCHAR"
)
### tag value ###
### tag value ###
payload
=
'''
payload
=
[
'''
{
{
"metric": "stb3_0",
"metric": "stb3_0",
"timestamp": {
"timestamp": {
...
@@ -655,9 +632,9 @@ class TDTestCase:
...
@@ -655,9 +632,9 @@ class TDTestCase:
}
}
}
}
}
}
'''
'''
]
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
code
=
self
.
_conn
.
schemaless_insert
(
payload
,
2
)
print
(
"
insert_json_payload
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb3_0"
)
tdSql
.
query
(
"describe stb3_0"
)
tdSql
.
checkData
(
2
,
1
,
"BOOL"
)
tdSql
.
checkData
(
2
,
1
,
"BOOL"
)
...
...
tests/pytest/insert/insertTelnetLines.py
浏览文件 @
25bddc0c
...
@@ -39,8 +39,8 @@ class TDTestCase:
...
@@ -39,8 +39,8 @@ class TDTestCase:
".stb0.3. 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
".stb0.3. 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines0
)
code
=
self
.
_conn
.
schemaless_insert
(
lines0
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"show stables"
)
tdSql
.
query
(
"show stables"
)
tdSql
.
checkRows
(
4
)
tdSql
.
checkRows
(
4
)
...
@@ -68,8 +68,8 @@ class TDTestCase:
...
@@ -68,8 +68,8 @@ class TDTestCase:
"stb1 0 6i8 host=
\"
host0
\"
"
,
"stb1 0 6i8 host=
\"
host0
\"
"
,
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines
1
)
code
=
self
.
_conn
.
schemaless_insert
(
lines1
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb1"
)
tdSql
.
query
(
"select * from stb1"
)
tdSql
.
checkRows
(
6
)
tdSql
.
checkRows
(
6
)
...
@@ -82,8 +82,8 @@ class TDTestCase:
...
@@ -82,8 +82,8 @@ class TDTestCase:
"stb2_0 1626006833651ms -127i8 host=
\"
host0
\"
"
,
"stb2_0 1626006833651ms -127i8 host=
\"
host0
\"
"
,
"stb2_0 1626006833652ms 127i8 host=
\"
host0
\"
"
"stb2_0 1626006833652ms 127i8 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_0
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_0
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_0"
)
tdSql
.
query
(
"select * from stb2_0"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -97,8 +97,8 @@ class TDTestCase:
...
@@ -97,8 +97,8 @@ class TDTestCase:
"stb2_1 1626006833651ms -32767i16 host=
\"
host0
\"
"
,
"stb2_1 1626006833651ms -32767i16 host=
\"
host0
\"
"
,
"stb2_1 1626006833652ms 32767i16 host=
\"
host0
\"
"
"stb2_1 1626006833652ms 32767i16 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_
1
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_1
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_1"
)
tdSql
.
query
(
"select * from stb2_1"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -113,8 +113,8 @@ class TDTestCase:
...
@@ -113,8 +113,8 @@ class TDTestCase:
"stb2_2 1626006833652ms 2147483647i32 host=
\"
host0
\"
"
"stb2_2 1626006833652ms 2147483647i32 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_2
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_2
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_2"
)
tdSql
.
query
(
"select * from stb2_2"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -130,8 +130,8 @@ class TDTestCase:
...
@@ -130,8 +130,8 @@ class TDTestCase:
"stb2_3 1626006833662ms 9223372036854775807 host=
\"
host0
\"
"
"stb2_3 1626006833662ms 9223372036854775807 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_3
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_3
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_3"
)
tdSql
.
query
(
"select * from stb2_3"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
...
@@ -154,8 +154,8 @@ class TDTestCase:
...
@@ -154,8 +154,8 @@ class TDTestCase:
"stb2_4 1626006833710ms -3.4E38f32 host=
\"
host0
\"
"
"stb2_4 1626006833710ms -3.4E38f32 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_4
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_4
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_4"
)
tdSql
.
query
(
"select * from stb2_4"
)
tdSql
.
checkRows
(
10
)
tdSql
.
checkRows
(
10
)
...
@@ -179,8 +179,8 @@ class TDTestCase:
...
@@ -179,8 +179,8 @@ class TDTestCase:
"stb2_5 1626006833710ms 3.15 host=
\"
host0
\"
"
"stb2_5 1626006833710ms 3.15 host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_5
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_5
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_5"
)
tdSql
.
query
(
"select * from stb2_5"
)
tdSql
.
checkRows
(
11
)
tdSql
.
checkRows
(
11
)
...
@@ -203,8 +203,8 @@ class TDTestCase:
...
@@ -203,8 +203,8 @@ class TDTestCase:
"stb2_6 1626006833700ms FALSE host=
\"
host0
\"
"
"stb2_6 1626006833700ms FALSE host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_6
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_6
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_6"
)
tdSql
.
query
(
"select * from stb2_6"
)
tdSql
.
checkRows
(
10
)
tdSql
.
checkRows
(
10
)
...
@@ -220,8 +220,8 @@ class TDTestCase:
...
@@ -220,8 +220,8 @@ class TDTestCase:
"stb2_7 1626006833630ms
\"
binary_val.()[]{}<>
\"
host=
\"
host0
\"
"
"stb2_7 1626006833630ms
\"
binary_val.()[]{}<>
\"
host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_7
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_7
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_7"
)
tdSql
.
query
(
"select * from stb2_7"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
...
@@ -236,8 +236,8 @@ class TDTestCase:
...
@@ -236,8 +236,8 @@ class TDTestCase:
"stb2_8 1626006833620ms L
\"
nchar_val数值二
\"
host=
\"
host0
\"
"
"stb2_8 1626006833620ms L
\"
nchar_val数值二
\"
host=
\"
host0
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines2_8
)
code
=
self
.
_conn
.
schemaless_insert
(
lines2_8
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb2_8"
)
tdSql
.
query
(
"select * from stb2_8"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -254,8 +254,8 @@ class TDTestCase:
...
@@ -254,8 +254,8 @@ class TDTestCase:
"stb3_0 1626006833610ms 2 t1=-127i8 t2=-32767i16 t3=-2147483647i32 t4=-9223372036854775807i64 t5=-3.4E38f32 t6=-1.7E308f64 t7=false t8=
\"
binary_val_2
\"
t9=L
\"
标签值2
\"
"
"stb3_0 1626006833610ms 2 t1=-127i8 t2=-32767i16 t3=-2147483647i32 t4=-9223372036854775807i64 t5=-3.4E38f32 t6=-1.7E308f64 t7=false t8=
\"
binary_val_2
\"
t9=L
\"
标签值2
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines3_0
)
code
=
self
.
_conn
.
schemaless_insert
(
lines3_0
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb3_0"
)
tdSql
.
query
(
"select * from stb3_0"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -298,8 +298,8 @@ class TDTestCase:
...
@@ -298,8 +298,8 @@ class TDTestCase:
"stb3_1 1626006833610ms 3 ID=
\"
child_table3
\"
host=
\"
host3
\"
"
"stb3_1 1626006833610ms 3 ID=
\"
child_table3
\"
host=
\"
host3
\"
"
]
]
code
=
self
.
_conn
.
insert_telnet_lines
(
lines3_
1
)
code
=
self
.
_conn
.
schemaless_insert
(
lines3_1
,
1
)
print
(
"
insert_telnet_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from stb3_1"
)
tdSql
.
query
(
"select * from stb3_1"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
...
...
tests/pytest/insert/line_insert.py
浏览文件 @
25bddc0c
...
@@ -42,18 +42,18 @@ class TDTestCase:
...
@@ -42,18 +42,18 @@ class TDTestCase:
"stf,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
"stf,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
]
]
code
=
self
.
_conn
.
insert_lines
(
lines
)
code
=
self
.
_conn
.
schemaless_insert
(
lines
,
0
)
print
(
"
insert_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
lines2
=
[
"stg,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
lines2
=
[
"stg,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
"stg,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64 1626006833640000000ns"
"stg,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64 1626006833640000000ns"
]
]
code
=
self
.
_conn
.
insert_lines
([
lines2
[
0
]
]
)
code
=
self
.
_conn
.
schemaless_insert
([
lines2
[
0
]
],
0
)
print
(
"
insert_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
self
.
_conn
.
insert_lines
([
lines2
[
1
]
]
)
self
.
_conn
.
schemaless_insert
([
lines2
[
1
]
],
0
)
print
(
"
insert_lines
result {}"
.
format
(
code
))
print
(
"
schemaless_insert
result {}"
.
format
(
code
))
tdSql
.
query
(
"select * from st"
)
tdSql
.
query
(
"select * from st"
)
tdSql
.
checkRows
(
4
)
tdSql
.
checkRows
(
4
)
...
@@ -73,10 +73,10 @@ class TDTestCase:
...
@@ -73,10 +73,10 @@ class TDTestCase:
tdSql
.
query
(
"describe stf"
)
tdSql
.
query
(
"describe stf"
)
tdSql
.
checkData
(
2
,
2
,
14
)
tdSql
.
checkData
(
2
,
2
,
14
)
self
.
_conn
.
insert_lines
([
self
.
_conn
.
schemaless_insert
([
"sth,t1=4i64,t2=5f64,t4=5f64,ID=
\"
childtable
\"
c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641ms"
,
"sth,t1=4i64,t2=5f64,t4=5f64,ID=
\"
childtable
\"
c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641ms"
,
"sth,t1=4i64,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933654ms"
"sth,t1=4i64,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933654ms"
])
]
,
0
)
tdSql
.
execute
(
'reset query cache'
)
tdSql
.
execute
(
'reset query cache'
)
tdSql
.
query
(
'select tbname, * from sth'
)
tdSql
.
query
(
'select tbname, * from sth'
)
...
...
tests/pytest/insert/openTsdbTelnetLinesInsert.py
浏览文件 @
25bddc0c
...
@@ -13,7 +13,7 @@
...
@@ -13,7 +13,7 @@
import
traceback
import
traceback
import
random
import
random
from
taos.error
import
TelnetLine
sError
from
taos.error
import
Schemales
sError
import
time
import
time
import
numpy
as
np
import
numpy
as
np
from
util.log
import
*
from
util.log
import
*
...
@@ -282,7 +282,7 @@ class TDTestCase:
...
@@ -282,7 +282,7 @@ class TDTestCase:
def
resCmp
(
self
,
input_sql
,
stb_name
,
query_sql
=
"select * from"
,
condition
=
""
,
ts
=
None
,
id
=
True
,
none_check_tag
=
None
):
def
resCmp
(
self
,
input_sql
,
stb_name
,
query_sql
=
"select * from"
,
condition
=
""
,
ts
=
None
,
id
=
True
,
none_check_tag
=
None
):
expect_list
=
self
.
inputHandle
(
input_sql
)
expect_list
=
self
.
inputHandle
(
input_sql
)
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
query_sql
=
f
"
{
query_sql
}
{
stb_name
}
{
condition
}
"
query_sql
=
f
"
{
query_sql
}
{
stb_name
}
{
condition
}
"
res_row_list
,
res_field_list_without_ts
,
res_type_list
=
self
.
resHandle
(
query_sql
,
True
)
res_row_list
,
res_field_list_without_ts
,
res_type_list
=
self
.
resHandle
(
query_sql
,
True
)
if
ts
==
0
:
if
ts
==
0
:
...
@@ -389,13 +389,13 @@ class TDTestCase:
...
@@ -389,13 +389,13 @@ class TDTestCase:
"""
"""
for
input_sql
in
[
self
.
genLongSql
(
128
)[
0
]]:
for
input_sql
in
[
self
.
genLongSql
(
128
)[
0
]]:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
for
input_sql
in
[
self
.
genLongSql
(
129
)[
0
]]:
for
input_sql
in
[
self
.
genLongSql
(
129
)[
0
]]:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
idIllegalNameCheckCase
(
self
):
def
idIllegalNameCheckCase
(
self
):
...
@@ -408,9 +408,9 @@ class TDTestCase:
...
@@ -408,9 +408,9 @@ class TDTestCase:
for
i
in
rstr
:
for
i
in
rstr
:
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
aaa
{
i
}
bbb
\"
"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
aaa
{
i
}
bbb
\"
"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
idStartWithNumCheckCase
(
self
):
def
idStartWithNumCheckCase
(
self
):
...
@@ -420,9 +420,9 @@ class TDTestCase:
...
@@ -420,9 +420,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
1aaabbb
\"
"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
1aaabbb
\"
"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
nowTsCheckCase
(
self
):
def
nowTsCheckCase
(
self
):
...
@@ -432,9 +432,9 @@ class TDTestCase:
...
@@ -432,9 +432,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"now"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"now"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
dateFormatTsCheckCase
(
self
):
def
dateFormatTsCheckCase
(
self
):
...
@@ -444,9 +444,9 @@ class TDTestCase:
...
@@ -444,9 +444,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"2021-07-21\ 19:01:46.920"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"2021-07-21\ 19:01:46.920"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
illegalTsCheckCase
(
self
):
def
illegalTsCheckCase
(
self
):
...
@@ -456,9 +456,9 @@ class TDTestCase:
...
@@ -456,9 +456,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"16260068336390us19"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"16260068336390us19"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
tagValueLengthCheckCase
(
self
):
def
tagValueLengthCheckCase
(
self
):
...
@@ -473,9 +473,9 @@ class TDTestCase:
...
@@ -473,9 +473,9 @@ class TDTestCase:
for
t1
in
[
"-128i8"
,
"128i8"
]:
for
t1
in
[
"-128i8"
,
"128i8"
]:
input_sql
=
self
.
genFullTypeSql
(
t1
=
t1
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t1
=
t1
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
#i16
#i16
...
@@ -485,9 +485,9 @@ class TDTestCase:
...
@@ -485,9 +485,9 @@ class TDTestCase:
for
t2
in
[
"-32768i16"
,
"32768i16"
]:
for
t2
in
[
"-32768i16"
,
"32768i16"
]:
input_sql
=
self
.
genFullTypeSql
(
t2
=
t2
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t2
=
t2
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
#i32
#i32
...
@@ -497,9 +497,9 @@ class TDTestCase:
...
@@ -497,9 +497,9 @@ class TDTestCase:
for
t3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
for
t3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
input_sql
=
self
.
genFullTypeSql
(
t3
=
t3
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t3
=
t3
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
#i64
#i64
...
@@ -509,9 +509,9 @@ class TDTestCase:
...
@@ -509,9 +509,9 @@ class TDTestCase:
for
t4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
for
t4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
input_sql
=
self
.
genFullTypeSql
(
t4
=
t4
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t4
=
t4
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# f32
# f32
...
@@ -522,9 +522,9 @@ class TDTestCase:
...
@@ -522,9 +522,9 @@ class TDTestCase:
for
t5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
for
t5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
input_sql
=
self
.
genFullTypeSql
(
t5
=
t5
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t5
=
t5
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
...
@@ -536,33 +536,33 @@ class TDTestCase:
...
@@ -536,33 +536,33 @@ class TDTestCase:
for
t6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
for
t6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
input_sql
=
self
.
genFullTypeSql
(
t6
=
t6
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t6
=
t6
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# binary
# binary
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
"'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1="
{
tdCom
.
getLongName
(
16375
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1="
{
tdCom
.
getLongName
(
16375
,
"letters"
)
}
"'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# nchar
# nchar
# * legal nchar could not be larger than 16374/4
# * legal nchar could not be larger than 16374/4
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
"'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1=L"
{
tdCom
.
getLongName
(
4094
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns t t0=t t1=L"
{
tdCom
.
getLongName
(
4094
,
"letters"
)
}
"'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
colValueLengthCheckCase
(
self
):
def
colValueLengthCheckCase
(
self
):
...
@@ -578,9 +578,9 @@ class TDTestCase:
...
@@ -578,9 +578,9 @@ class TDTestCase:
for
value
in
[
"-128i8"
,
"128i8"
]:
for
value
in
[
"-128i8"
,
"128i8"
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i16
# i16
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
...
@@ -591,9 +591,9 @@ class TDTestCase:
...
@@ -591,9 +591,9 @@ class TDTestCase:
for
value
in
[
"-32768i16"
,
"32768i16"
]:
for
value
in
[
"-32768i16"
,
"32768i16"
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i32
# i32
...
@@ -605,9 +605,9 @@ class TDTestCase:
...
@@ -605,9 +605,9 @@ class TDTestCase:
for
value
in
[
"-2147483648i32"
,
"2147483648i32"
]:
for
value
in
[
"-2147483648i32"
,
"2147483648i32"
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i64
# i64
...
@@ -619,9 +619,9 @@ class TDTestCase:
...
@@ -619,9 +619,9 @@ class TDTestCase:
for
value
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
for
value
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# f32
# f32
...
@@ -634,9 +634,9 @@ class TDTestCase:
...
@@ -634,9 +634,9 @@ class TDTestCase:
for
value
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
for
value
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# f64
# f64
...
@@ -649,23 +649,23 @@ class TDTestCase:
...
@@ -649,23 +649,23 @@ class TDTestCase:
for
value
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
for
value
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
value
=
value
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# # binary
# # binary
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns "
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns "
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t0=t'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns "
{
tdCom
.
getLongName
(
16375
,
"letters"
)
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns "
{
tdCom
.
getLongName
(
16375
,
"letters"
)
}
" t0=t'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# nchar
# nchar
...
@@ -673,14 +673,14 @@ class TDTestCase:
...
@@ -673,14 +673,14 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t0=t'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns L"
{
tdCom
.
getLongName
(
4094
,
"letters"
)
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns L"
{
tdCom
.
getLongName
(
4094
,
"letters"
)
}
" t0=t'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
tagColIllegalValueCheckCase
(
self
):
def
tagColIllegalValueCheckCase
(
self
):
...
@@ -706,9 +706,9 @@ class TDTestCase:
...
@@ -706,9 +706,9 @@ class TDTestCase:
self
.
genFullTypeSql
(
t6
=
"11.1s45f64"
)[
0
],
self
.
genFullTypeSql
(
t6
=
"11.1s45f64"
)[
0
],
]:
]:
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# check binary and nchar blank
# check binary and nchar blank
...
@@ -718,9 +718,9 @@ class TDTestCase:
...
@@ -718,9 +718,9 @@ class TDTestCase:
input_sql4
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns t t0=L"abc aaa"'
input_sql4
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns t t0=L"abc aaa"'
for
input_sql
in
[
input_sql1
,
input_sql2
,
input_sql3
,
input_sql4
]:
for
input_sql
in
[
input_sql1
,
input_sql2
,
input_sql3
,
input_sql4
]:
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# check accepted binary and nchar symbols
# check accepted binary and nchar symbols
...
@@ -728,8 +728,8 @@ class TDTestCase:
...
@@ -728,8 +728,8 @@ class TDTestCase:
for
symbol
in
list
(
'~!@#$¥%^&*()-+={}|[]、「」:;'
):
for
symbol
in
list
(
'~!@#$¥%^&*()-+={}|[]、「」:;'
):
input_sql1
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns "abc
{
symbol
}
aaa" t0=t'
input_sql1
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns "abc
{
symbol
}
aaa" t0=t'
input_sql2
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns t t0=t t1="abc
{
symbol
}
aaa"'
input_sql2
=
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns t t0=t t1="abc
{
symbol
}
aaa"'
self
.
_conn
.
insert_telnet_lines
([
input_sql1
]
)
self
.
_conn
.
schemaless_insert
([
input_sql1
],
1
)
self
.
_conn
.
insert_telnet_lines
([
input_sql2
]
)
self
.
_conn
.
schemaless_insert
([
input_sql2
],
1
)
def
blankCheckCase
(
self
):
def
blankCheckCase
(
self
):
'''
'''
...
@@ -748,9 +748,9 @@ class TDTestCase:
...
@@ -748,9 +748,9 @@ class TDTestCase:
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns L"abaaa" t0=L"abcaaa3"'
]
f
'
{
tdCom
.
getLongName
(
7
,
"letters"
)
}
1626006833639000000ns L"abaaa" t0=L"abcaaa3"'
]
for
input_sql
in
input_sql_list
:
for
input_sql
in
input_sql_list
:
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
duplicateIdTagColInsertCheckCase
(
self
):
def
duplicateIdTagColInsertCheckCase
(
self
):
...
@@ -760,17 +760,17 @@ class TDTestCase:
...
@@ -760,17 +760,17 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql_id
=
self
.
genFullTypeSql
(
id_double_tag
=
True
)[
0
]
input_sql_id
=
self
.
genFullTypeSql
(
id_double_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql_id
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_id
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql_tag
=
input_sql
.
replace
(
"t5"
,
"t6"
)
input_sql_tag
=
input_sql
.
replace
(
"t5"
,
"t6"
)
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql_tag
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_tag
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
##### stb exist #####
##### stb exist #####
...
@@ -794,7 +794,7 @@ class TDTestCase:
...
@@ -794,7 +794,7 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
,
stb_name
=
self
.
genFullTypeSql
()
input_sql
,
stb_name
=
self
.
genFullTypeSql
()
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
resCmp
(
input_sql
,
stb_name
)
def
tagColBinaryNcharLengthCheckCase
(
self
):
def
tagColBinaryNcharLengthCheckCase
(
self
):
...
@@ -865,7 +865,7 @@ class TDTestCase:
...
@@ -865,7 +865,7 @@ class TDTestCase:
tdSql
.
checkRows
(
1
)
tdSql
.
checkRows
(
1
)
tdSql
.
checkEqual
(
tb_name1
,
tb_name2
)
tdSql
.
checkEqual
(
tb_name1
,
tb_name2
)
input_sql
,
stb_name
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t0
=
"f"
,
value
=
"f"
,
id_noexist_tag
=
True
,
t_add_tag
=
True
)
input_sql
,
stb_name
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t0
=
"f"
,
value
=
"f"
,
id_noexist_tag
=
True
,
t_add_tag
=
True
)
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
tb_name3
=
self
.
getNoIdTbName
(
stb_name
)
tb_name3
=
self
.
getNoIdTbName
(
stb_name
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -881,19 +881,19 @@ class TDTestCase:
...
@@ -881,19 +881,19 @@ class TDTestCase:
tb_name
=
f
'
{
stb_name
}
_1'
tb_name
=
f
'
{
stb_name
}
_1'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f id="
{
tb_name
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f id="
{
tb_name
}
" t0=t'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
# * every binary and nchar must be length+2, so here is two tag, max length could not larger than 16384-2*2
# * every binary and nchar must be length+2, so here is two tag, max length could not larger than 16384-2*2
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t2="
{
tdCom
.
getLongName
(
5
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t2="
{
tdCom
.
getLongName
(
5
,
"letters"
)
}
"'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t2="
{
tdCom
.
getLongName
(
6
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1="
{
tdCom
.
getLongName
(
16374
,
"letters"
)
}
" t2="
{
tdCom
.
getLongName
(
6
,
"letters"
)
}
"'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -907,18 +907,18 @@ class TDTestCase:
...
@@ -907,18 +907,18 @@ class TDTestCase:
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
stb_name
=
tdCom
.
getLongName
(
7
,
"letters"
)
tb_name
=
f
'
{
stb_name
}
_1'
tb_name
=
f
'
{
stb_name
}
_1'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f id="
{
tb_name
}
" t0=t'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f id="
{
tb_name
}
" t0=t'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
# * legal nchar could not be larger than 16374/4
# * legal nchar could not be larger than 16374/4
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t2=L"
{
tdCom
.
getLongName
(
1
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t2=L"
{
tdCom
.
getLongName
(
1
,
"letters"
)
}
"'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t2=L"
{
tdCom
.
getLongName
(
2
,
"letters"
)
}
"'
input_sql
=
f
'
{
stb_name
}
1626006833639000000ns f t0=t t1=L"
{
tdCom
.
getLongName
(
4093
,
"letters"
)
}
" t2=L"
{
tdCom
.
getLongName
(
2
,
"letters"
)
}
"'
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -941,7 +941,7 @@ class TDTestCase:
...
@@ -941,7 +941,7 @@ class TDTestCase:
"st123456 1626006933640000000ns 8i64 t1=4i64 t3=
\"
t4
\"
t2=5f64 t4=5f64"
,
"st123456 1626006933640000000ns 8i64 t1=4i64 t3=
\"
t4
\"
t2=5f64 t4=5f64"
,
"st123456 1626006933641000000ns 9i64 t1=4i64 t3=
\"
t4
\"
t2=5f64 t4=5f64"
"st123456 1626006933641000000ns 9i64 t1=4i64 t3=
\"
t4
\"
t2=5f64 t4=5f64"
]
]
self
.
_conn
.
insert_telnet_lines
(
lines
)
self
.
_conn
.
schemaless_insert
(
lines
,
1
)
tdSql
.
query
(
'show stables'
)
tdSql
.
query
(
'show stables'
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
tdSql
.
query
(
'show tables'
)
tdSql
.
query
(
'show tables'
)
...
@@ -960,7 +960,7 @@ class TDTestCase:
...
@@ -960,7 +960,7 @@ class TDTestCase:
for
i
in
range
(
count
):
for
i
in
range
(
count
):
input_sql
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t7
=
f
'"
{
tdCom
.
getLongName
(
8
,
"letters"
)
}
"'
,
value
=
f
'"
{
tdCom
.
getLongName
(
8
,
"letters"
)
}
"'
,
id_noexist_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t7
=
f
'"
{
tdCom
.
getLongName
(
8
,
"letters"
)
}
"'
,
value
=
f
'"
{
tdCom
.
getLongName
(
8
,
"letters"
)
}
"'
,
id_noexist_tag
=
True
)[
0
]
sql_list
.
append
(
input_sql
)
sql_list
.
append
(
input_sql
)
self
.
_conn
.
insert_telnet_lines
(
sql_list
)
self
.
_conn
.
schemaless_insert
(
sql_list
,
1
)
tdSql
.
query
(
'show tables'
)
tdSql
.
query
(
'show tables'
)
tdSql
.
checkRows
(
count
)
tdSql
.
checkRows
(
count
)
...
@@ -973,9 +973,9 @@ class TDTestCase:
...
@@ -973,9 +973,9 @@ class TDTestCase:
lines
=
[
"st123456 1626006833639000000ns 3i 64 t1=3i64 t2=4f64 t3=
\"
t3
\"
"
,
lines
=
[
"st123456 1626006833639000000ns 3i 64 t1=3i64 t2=4f64 t3=
\"
t3
\"
"
,
f
"
{
stb_name
}
1626056811823316532ns tRue t2=5f64 t3=L
\"
ste
\"
"
]
f
"
{
stb_name
}
1626056811823316532ns tRue t2=5f64 t3=L
\"
ste
\"
"
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
(
lines
)
self
.
_conn
.
schemaless_insert
(
lines
,
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
multiColsInsertCheckCase
(
self
):
def
multiColsInsertCheckCase
(
self
):
...
@@ -985,9 +985,9 @@ class TDTestCase:
...
@@ -985,9 +985,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
t_multi_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t_multi_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
blankColInsertCheckCase
(
self
):
def
blankColInsertCheckCase
(
self
):
...
@@ -997,9 +997,9 @@ class TDTestCase:
...
@@ -997,9 +997,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
c_blank_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c_blank_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
blankTagInsertCheckCase
(
self
):
def
blankTagInsertCheckCase
(
self
):
...
@@ -1009,9 +1009,9 @@ class TDTestCase:
...
@@ -1009,9 +1009,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
t_blank_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t_blank_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
chineseCheckCase
(
self
):
def
chineseCheckCase
(
self
):
...
@@ -1029,9 +1029,9 @@ class TDTestCase:
...
@@ -1029,9 +1029,9 @@ class TDTestCase:
tdCom
.
cleanTb
()
tdCom
.
cleanTb
()
input_sql
=
self
.
genFullTypeSql
(
multi_field_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
multi_field_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
errorTypeCheckCase
(
self
):
def
errorTypeCheckCase
(
self
):
...
@@ -1120,7 +1120,7 @@ class TDTestCase:
...
@@ -1120,7 +1120,7 @@ class TDTestCase:
def
genMultiThreadSeq
(
self
,
sql_list
):
def
genMultiThreadSeq
(
self
,
sql_list
):
tlist
=
list
()
tlist
=
list
()
for
insert_sql
in
sql_list
:
for
insert_sql
in
sql_list
:
t
=
threading
.
Thread
(
target
=
self
.
_conn
.
insert_telnet_lines
,
args
=
([
insert_sql
[
0
]],
))
t
=
threading
.
Thread
(
target
=
self
.
_conn
.
schemaless_insert
,
args
=
([
insert_sql
[
0
]],
1
))
tlist
.
append
(
t
)
tlist
.
append
(
t
)
return
tlist
return
tlist
...
@@ -1320,8 +1320,8 @@ class TDTestCase:
...
@@ -1320,8 +1320,8 @@ class TDTestCase:
def
test
(
self
):
def
test
(
self
):
try
:
try
:
input_sql
=
f
'test_nchar 0 L"涛思数据" t0=f t1=L"涛思数据" t2=32767i16 t3=2147483647i32 t4=9223372036854775807i64 t5=11.12345f32 t6=22.123456789f64'
input_sql
=
f
'test_nchar 0 L"涛思数据" t0=f t1=L"涛思数据" t2=32767i16 t3=2147483647i32 t4=9223372036854775807i64 t5=11.12345f32 t6=22.123456789f64'
self
.
_conn
.
insert_telnet_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
1
)
except
TelnetLine
sError
as
err
:
except
Schemales
sError
as
err
:
print
(
err
.
errno
)
print
(
err
.
errno
)
def
runAll
(
self
):
def
runAll
(
self
):
...
...
tests/pytest/insert/schemalessInsert.py
浏览文件 @
25bddc0c
...
@@ -14,7 +14,7 @@
...
@@ -14,7 +14,7 @@
import
traceback
import
traceback
import
random
import
random
import
string
import
string
from
taos.error
import
Line
sError
from
taos.error
import
Schemales
sError
import
time
import
time
from
copy
import
deepcopy
from
copy
import
deepcopy
import
numpy
as
np
import
numpy
as
np
...
@@ -294,7 +294,7 @@ class TDTestCase:
...
@@ -294,7 +294,7 @@ class TDTestCase:
def
resCmp
(
self
,
input_sql
,
stb_name
,
query_sql
=
"select * from"
,
condition
=
""
,
ts
=
None
,
id
=
True
,
none_check_tag
=
None
):
def
resCmp
(
self
,
input_sql
,
stb_name
,
query_sql
=
"select * from"
,
condition
=
""
,
ts
=
None
,
id
=
True
,
none_check_tag
=
None
):
expect_list
=
self
.
inputHandle
(
input_sql
)
expect_list
=
self
.
inputHandle
(
input_sql
)
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
query_sql
=
f
"
{
query_sql
}
{
stb_name
}
{
condition
}
"
query_sql
=
f
"
{
query_sql
}
{
stb_name
}
{
condition
}
"
res_row_list
,
res_field_list_without_ts
,
res_type_list
=
self
.
resHandle
(
query_sql
,
True
)
res_row_list
,
res_field_list_without_ts
,
res_type_list
=
self
.
resHandle
(
query_sql
,
True
)
if
ts
==
0
:
if
ts
==
0
:
...
@@ -409,12 +409,12 @@ class TDTestCase:
...
@@ -409,12 +409,12 @@ class TDTestCase:
"""
"""
for
input_sql
in
[
self
.
genLongSql
(
128
,
1
)[
0
],
self
.
genLongSql
(
1
,
4094
)[
0
]]:
for
input_sql
in
[
self
.
genLongSql
(
128
,
1
)[
0
],
self
.
genLongSql
(
1
,
4094
)[
0
]]:
self
.
cleanStb
()
self
.
cleanStb
()
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
for
input_sql
in
[
self
.
genLongSql
(
129
,
1
)[
0
],
self
.
genLongSql
(
1
,
4095
)[
0
]]:
for
input_sql
in
[
self
.
genLongSql
(
129
,
1
)[
0
],
self
.
genLongSql
(
1
,
4095
)[
0
]]:
self
.
cleanStb
()
self
.
cleanStb
()
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
idIllegalNameCheckCase
(
self
):
def
idIllegalNameCheckCase
(
self
):
...
@@ -427,8 +427,8 @@ class TDTestCase:
...
@@ -427,8 +427,8 @@ class TDTestCase:
for
i
in
rstr
:
for
i
in
rstr
:
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
aaa
{
i
}
bbb
\"
"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
aaa
{
i
}
bbb
\"
"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
idStartWithNumCheckCase
(
self
):
def
idStartWithNumCheckCase
(
self
):
...
@@ -438,8 +438,8 @@ class TDTestCase:
...
@@ -438,8 +438,8 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
1aaabbb
\"
"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
tb_name
=
f
"
\"
1aaabbb
\"
"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
nowTsCheckCase
(
self
):
def
nowTsCheckCase
(
self
):
...
@@ -449,8 +449,8 @@ class TDTestCase:
...
@@ -449,8 +449,8 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"now"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"now"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
dateFormatTsCheckCase
(
self
):
def
dateFormatTsCheckCase
(
self
):
...
@@ -460,8 +460,8 @@ class TDTestCase:
...
@@ -460,8 +460,8 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"2021-07-21\ 19:01:46.920"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"2021-07-21\ 19:01:46.920"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
illegalTsCheckCase
(
self
):
def
illegalTsCheckCase
(
self
):
...
@@ -471,8 +471,8 @@ class TDTestCase:
...
@@ -471,8 +471,8 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql
=
self
.
genFullTypeSql
(
ts
=
"16260068336390us19"
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
ts
=
"16260068336390us19"
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
def
tagValueLengthCheckCase
(
self
):
def
tagValueLengthCheckCase
(
self
):
...
@@ -487,8 +487,8 @@ class TDTestCase:
...
@@ -487,8 +487,8 @@ class TDTestCase:
for
t1
in
[
"-128i8"
,
"128i8"
]:
for
t1
in
[
"-128i8"
,
"128i8"
]:
input_sql
=
self
.
genFullTypeSql
(
t1
=
t1
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t1
=
t1
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
#i16
#i16
...
@@ -498,8 +498,8 @@ class TDTestCase:
...
@@ -498,8 +498,8 @@ class TDTestCase:
for
t2
in
[
"-32768i16"
,
"32768i16"
]:
for
t2
in
[
"-32768i16"
,
"32768i16"
]:
input_sql
=
self
.
genFullTypeSql
(
t2
=
t2
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t2
=
t2
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
#i32
#i32
...
@@ -509,8 +509,8 @@ class TDTestCase:
...
@@ -509,8 +509,8 @@ class TDTestCase:
for
t3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
for
t3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
input_sql
=
self
.
genFullTypeSql
(
t3
=
t3
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t3
=
t3
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
#i64
#i64
...
@@ -520,8 +520,8 @@ class TDTestCase:
...
@@ -520,8 +520,8 @@ class TDTestCase:
for
t4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
for
t4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
input_sql
=
self
.
genFullTypeSql
(
t4
=
t4
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t4
=
t4
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
# f32
# f32
...
@@ -532,9 +532,9 @@ class TDTestCase:
...
@@ -532,9 +532,9 @@ class TDTestCase:
for
t5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
for
t5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
input_sql
=
self
.
genFullTypeSql
(
t5
=
t5
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
t5
=
t5
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
...
@@ -546,34 +546,34 @@ class TDTestCase:
...
@@ -546,34 +546,34 @@ class TDTestCase:
for
c6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
for
c6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
input_sql
=
self
.
genFullTypeSql
(
c6
=
c6
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c6
=
c6
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# binary
# binary
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
" c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16375
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16375
,
"letters"
)
}
" c0=f 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
pass
pass
# nchar
# nchar
# * legal nchar could not be larger than 16374/4
# * legal nchar could not be larger than 16374/4
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
" c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4094
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4094
,
"letters"
)
}
" c0=f 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
colValueLengthCheckCase
(
self
):
def
colValueLengthCheckCase
(
self
):
...
@@ -589,9 +589,9 @@ class TDTestCase:
...
@@ -589,9 +589,9 @@ class TDTestCase:
for
c1
in
[
"-128i8"
,
"128i8"
]:
for
c1
in
[
"-128i8"
,
"128i8"
]:
input_sql
=
self
.
genFullTypeSql
(
c1
=
c1
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c1
=
c1
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i16
# i16
for
c2
in
[
"-32767i16"
]:
for
c2
in
[
"-32767i16"
]:
...
@@ -600,9 +600,9 @@ class TDTestCase:
...
@@ -600,9 +600,9 @@ class TDTestCase:
for
c2
in
[
"-32768i16"
,
"32768i16"
]:
for
c2
in
[
"-32768i16"
,
"32768i16"
]:
input_sql
=
self
.
genFullTypeSql
(
c2
=
c2
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c2
=
c2
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i32
# i32
...
@@ -612,9 +612,9 @@ class TDTestCase:
...
@@ -612,9 +612,9 @@ class TDTestCase:
for
c3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
for
c3
in
[
"-2147483648i32"
,
"2147483648i32"
]:
input_sql
=
self
.
genFullTypeSql
(
c3
=
c3
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c3
=
c3
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i64
# i64
...
@@ -624,9 +624,9 @@ class TDTestCase:
...
@@ -624,9 +624,9 @@ class TDTestCase:
for
c4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
for
c4
in
[
"-9223372036854775808i64"
,
"9223372036854775808i64"
]:
input_sql
=
self
.
genFullTypeSql
(
c4
=
c4
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c4
=
c4
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# f32
# f32
...
@@ -637,9 +637,9 @@ class TDTestCase:
...
@@ -637,9 +637,9 @@ class TDTestCase:
for
c5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
for
c5
in
[
f
"
{
-
3.4028234664
*
(
10
**
38
)
}
f32"
,
f
"
{
3.4028234664
*
(
10
**
38
)
}
f32"
]:
input_sql
=
self
.
genFullTypeSql
(
c5
=
c5
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c5
=
c5
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# f64
# f64
...
@@ -650,34 +650,34 @@ class TDTestCase:
...
@@ -650,34 +650,34 @@ class TDTestCase:
for
c6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
for
c6
in
[
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
,
f
'
{
-
1.797693134862316
*
(
10
**
308
)
}
f64'
]:
input_sql
=
self
.
genFullTypeSql
(
c6
=
c6
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
c6
=
c6
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# # binary
# # binary
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
" 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16375
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16375
,
"letters"
)
}
" 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# nchar
# nchar
# * legal nchar could not be larger than 16374/4
# * legal nchar could not be larger than 16374/4
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
" 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4094
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4094
,
"letters"
)
}
" 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
tagColIllegalValueCheckCase
(
self
):
def
tagColIllegalValueCheckCase
(
self
):
...
@@ -690,15 +690,15 @@ class TDTestCase:
...
@@ -690,15 +690,15 @@ class TDTestCase:
for
i
in
[
"TrUe"
,
"tRue"
,
"trUe"
,
"truE"
,
"FalsE"
,
"fAlse"
,
"faLse"
,
"falSe"
,
"falsE"
]:
for
i
in
[
"TrUe"
,
"tRue"
,
"trUe"
,
"truE"
,
"FalsE"
,
"fAlse"
,
"faLse"
,
"falSe"
,
"falsE"
]:
input_sql1
=
self
.
genFullTypeSql
(
t0
=
i
)[
0
]
input_sql1
=
self
.
genFullTypeSql
(
t0
=
i
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql1
]
)
self
.
_conn
.
schemaless_insert
([
input_sql1
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
input_sql2
=
self
.
genFullTypeSql
(
c0
=
i
)[
0
]
input_sql2
=
self
.
genFullTypeSql
(
c0
=
i
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql2
]
)
self
.
_conn
.
schemaless_insert
([
input_sql2
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# i8 i16 i32 i64 f32 f64
# i8 i16 i32 i64 f32 f64
...
@@ -718,9 +718,9 @@ class TDTestCase:
...
@@ -718,9 +718,9 @@ class TDTestCase:
self
.
genFullTypeSql
(
c9
=
"1s1u64"
)[
0
]
self
.
genFullTypeSql
(
c9
=
"1s1u64"
)[
0
]
]:
]:
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# check binary and nchar blank
# check binary and nchar blank
...
@@ -731,9 +731,9 @@ class TDTestCase:
...
@@ -731,9 +731,9 @@ class TDTestCase:
input_sql4
=
f
'
{
stb_name
}
,t0=t,t1=L"abc aaa" c0=f 1626006833639000000ns'
input_sql4
=
f
'
{
stb_name
}
,t0=t,t1=L"abc aaa" c0=f 1626006833639000000ns'
for
input_sql
in
[
input_sql1
,
input_sql2
,
input_sql3
,
input_sql4
]:
for
input_sql
in
[
input_sql1
,
input_sql2
,
input_sql3
,
input_sql4
]:
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
# check accepted binary and nchar symbols
# check accepted binary and nchar symbols
...
@@ -741,8 +741,8 @@ class TDTestCase:
...
@@ -741,8 +741,8 @@ class TDTestCase:
for
symbol
in
list
(
'~!@#$¥%^&*()-+={}|[]、「」:;'
):
for
symbol
in
list
(
'~!@#$¥%^&*()-+={}|[]、「」:;'
):
input_sql1
=
f
'
{
stb_name
}
,t0=t c0=f,c1="abc
{
symbol
}
aaa" 1626006833639000000ns'
input_sql1
=
f
'
{
stb_name
}
,t0=t c0=f,c1="abc
{
symbol
}
aaa" 1626006833639000000ns'
input_sql2
=
f
'
{
stb_name
}
,t0=t,t1="abc
{
symbol
}
aaa" c0=f 1626006833639000000ns'
input_sql2
=
f
'
{
stb_name
}
,t0=t,t1="abc
{
symbol
}
aaa" c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql1
]
)
self
.
_conn
.
schemaless_insert
([
input_sql1
],
0
)
self
.
_conn
.
insert_lines
([
input_sql2
]
)
self
.
_conn
.
schemaless_insert
([
input_sql2
],
0
)
def
duplicateIdTagColInsertCheckCase
(
self
):
def
duplicateIdTagColInsertCheckCase
(
self
):
...
@@ -752,33 +752,33 @@ class TDTestCase:
...
@@ -752,33 +752,33 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql_id
=
self
.
genFullTypeSql
(
id_double_tag
=
True
)[
0
]
input_sql_id
=
self
.
genFullTypeSql
(
id_double_tag
=
True
)[
0
]
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql_id
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_id
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql_tag
=
input_sql
.
replace
(
"t5"
,
"t6"
)
input_sql_tag
=
input_sql
.
replace
(
"t5"
,
"t6"
)
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql_tag
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_tag
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql_col
=
input_sql
.
replace
(
"c5"
,
"c6"
)
input_sql_col
=
input_sql
.
replace
(
"c5"
,
"c6"
)
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql_col
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_col
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql
=
self
.
genFullTypeSql
()[
0
]
input_sql_col
=
input_sql
.
replace
(
"c5"
,
"C6"
)
input_sql_col
=
input_sql
.
replace
(
"c5"
,
"C6"
)
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql_col
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_col
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
##### stb exist #####
##### stb exist #####
...
@@ -802,7 +802,7 @@ class TDTestCase:
...
@@ -802,7 +802,7 @@ class TDTestCase:
self
.
cleanStb
()
self
.
cleanStb
()
input_sql
,
stb_name
=
self
.
genFullTypeSql
()
input_sql
,
stb_name
=
self
.
genFullTypeSql
()
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
self
.
resCmp
(
input_sql
,
stb_name
)
self
.
resCmp
(
input_sql
,
stb_name
)
def
tagColBinaryNcharLengthCheckCase
(
self
):
def
tagColBinaryNcharLengthCheckCase
(
self
):
...
@@ -869,7 +869,7 @@ class TDTestCase:
...
@@ -869,7 +869,7 @@ class TDTestCase:
tdSql
.
checkRows
(
1
)
tdSql
.
checkRows
(
1
)
tdSql
.
checkEqual
(
tb_name1
,
tb_name2
)
tdSql
.
checkEqual
(
tb_name1
,
tb_name2
)
input_sql
,
stb_name
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t0
=
"f"
,
c0
=
"f"
,
id_noexist_tag
=
True
,
ct_add_tag
=
True
)
input_sql
,
stb_name
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t0
=
"f"
,
c0
=
"f"
,
id_noexist_tag
=
True
,
ct_add_tag
=
True
)
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
tb_name3
=
self
.
getNoIdTbName
(
stb_name
)
tb_name3
=
self
.
getNoIdTbName
(
stb_name
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
...
@@ -884,34 +884,34 @@ class TDTestCase:
...
@@ -884,34 +884,34 @@ class TDTestCase:
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
tb_name
=
f
'
{
stb_name
}
_1'
tb_name
=
f
'
{
stb_name
}
_1'
input_sql
=
f
'
{
stb_name
}
,id="
{
tb_name
}
",t0=t c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,id="
{
tb_name
}
",t0=t c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
# * every binary and nchar must be length+2, so here is two tag, max length could not larger than 16384-2*2
# * every binary and nchar must be length+2, so here is two tag, max length could not larger than 16384-2*2
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",t2="
{
self
.
getLongName
(
5
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",t2="
{
self
.
getLongName
(
5
,
"letters"
)
}
" c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",t2="
{
self
.
getLongName
(
6
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",t2="
{
self
.
getLongName
(
6
,
"letters"
)
}
" c0=f 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
:
except
Schemales
sError
:
pass
pass
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
# # * check col,col+ts max in describe ---> 16143
# # * check col,col+ts max in describe ---> 16143
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c2="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c3="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c4="
{
self
.
getLongName
(
12
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c2="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c3="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c4="
{
self
.
getLongName
(
12
,
"letters"
)
}
" 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c2="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c3="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c4="
{
self
.
getLongName
(
13
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c2="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c3="
{
self
.
getLongName
(
16374
,
"letters"
)
}
",c4="
{
self
.
getLongName
(
13
,
"letters"
)
}
" 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
...
@@ -925,31 +925,31 @@ class TDTestCase:
...
@@ -925,31 +925,31 @@ class TDTestCase:
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
stb_name
=
self
.
getLongName
(
7
,
"letters"
)
tb_name
=
f
'
{
stb_name
}
_1'
tb_name
=
f
'
{
stb_name
}
_1'
input_sql
=
f
'
{
stb_name
}
,id="
{
tb_name
}
",t0=t c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,id="
{
tb_name
}
",t0=t c0=f 1626006833639000000ns'
code
=
self
.
_conn
.
insert_lines
([
input_sql
]
)
code
=
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
# * legal nchar could not be larger than 16374/4
# * legal nchar could not be larger than 16374/4
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",t2=L"
{
self
.
getLongName
(
1
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",t2=L"
{
self
.
getLongName
(
1
,
"letters"
)
}
" c0=f 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",t2=L"
{
self
.
getLongName
(
2
,
"letters"
)
}
" c0=f 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t,t1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",t2=L"
{
self
.
getLongName
(
2
,
"letters"
)
}
" c0=f 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
2
)
tdSql
.
checkRows
(
2
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c2=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c3=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c4=L"
{
self
.
getLongName
(
4
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c2=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c3=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c4=L"
{
self
.
getLongName
(
4
,
"letters"
)
}
" 1626006833639000000ns'
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c2=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c3=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c4=L"
{
self
.
getLongName
(
5
,
"letters"
)
}
" 1626006833639000000ns'
input_sql
=
f
'
{
stb_name
}
,t0=t c0=f,c1=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c2=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c3=L"
{
self
.
getLongName
(
4093
,
"letters"
)
}
",c4=L"
{
self
.
getLongName
(
5
,
"letters"
)
}
" 1626006833639000000ns'
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
query
(
f
"select * from
{
stb_name
}
"
)
tdSql
.
checkRows
(
3
)
tdSql
.
checkRows
(
3
)
...
@@ -971,7 +971,7 @@ class TDTestCase:
...
@@ -971,7 +971,7 @@ class TDTestCase:
"st123456,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns"
,
"st123456,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin
\"
,c2=true,c4=5f64,c5=5f64,c6=7u64 1626006933640000000ns"
,
"st123456,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
"st123456,t1=4i64,t3=
\"
t4
\"
,t2=5f64,t4=5f64 c1=3i64,c3=L
\"
passitagin_stf
\"
,c2=false,c5=5f64,c6=7u64 1626006933641000000ns"
]
]
self
.
_conn
.
insert_lines
(
lines
)
self
.
_conn
.
schemaless_insert
(
lines
,
0
)
def
multiInsertCheckCase
(
self
,
count
):
def
multiInsertCheckCase
(
self
,
count
):
"""
"""
...
@@ -984,7 +984,7 @@ class TDTestCase:
...
@@ -984,7 +984,7 @@ class TDTestCase:
for
i
in
range
(
count
):
for
i
in
range
(
count
):
input_sql
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t7
=
f
'"
{
self
.
getLongName
(
8
,
"letters"
)
}
"'
,
c7
=
f
'"
{
self
.
getLongName
(
8
,
"letters"
)
}
"'
,
id_noexist_tag
=
True
)[
0
]
input_sql
=
self
.
genFullTypeSql
(
stb_name
=
stb_name
,
t7
=
f
'"
{
self
.
getLongName
(
8
,
"letters"
)
}
"'
,
c7
=
f
'"
{
self
.
getLongName
(
8
,
"letters"
)
}
"'
,
id_noexist_tag
=
True
)[
0
]
sql_list
.
append
(
input_sql
)
sql_list
.
append
(
input_sql
)
self
.
_conn
.
insert_lines
(
sql_list
)
self
.
_conn
.
schemaless_insert
(
sql_list
,
0
)
def
batchErrorInsertCheckCase
(
self
):
def
batchErrorInsertCheckCase
(
self
):
"""
"""
...
@@ -995,9 +995,9 @@ class TDTestCase:
...
@@ -995,9 +995,9 @@ class TDTestCase:
lines
=
[
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
lines
=
[
"st123456,t1=3i64,t2=4f64,t3=
\"
t3
\"
c1=3i64,c3=L
\"
passit
\"
,c2=false,c4=4f64 1626006833639000000ns"
,
f
"
{
stb_name
}
,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
]
f
"
{
stb_name
}
,t2=5f64,t3=L
\"
ste
\"
c1=tRue,c2=4i64,c3=
\"
iam
\"
1626056811823316532ns"
]
try
:
try
:
self
.
_conn
.
insert_lines
(
lines
)
self
.
_conn
.
schemaless_insert
(
lines
,
0
)
raise
Exception
(
"should not reach here"
)
raise
Exception
(
"should not reach here"
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
tdSql
.
checkNotEqual
(
err
.
errno
,
0
)
def
genSqlList
(
self
,
count
=
5
,
stb_name
=
""
,
tb_name
=
""
):
def
genSqlList
(
self
,
count
=
5
,
stb_name
=
""
,
tb_name
=
""
):
...
@@ -1049,7 +1049,7 @@ class TDTestCase:
...
@@ -1049,7 +1049,7 @@ class TDTestCase:
def
genMultiThreadSeq
(
self
,
sql_list
):
def
genMultiThreadSeq
(
self
,
sql_list
):
tlist
=
list
()
tlist
=
list
()
for
insert_sql
in
sql_list
:
for
insert_sql
in
sql_list
:
t
=
threading
.
Thread
(
target
=
self
.
_conn
.
insert_lines
,
args
=
([
insert_sql
[
0
]],
))
t
=
threading
.
Thread
(
target
=
self
.
_conn
.
schemaless_insert
,
args
=
([
insert_sql
[
0
]],
0
))
tlist
.
append
(
t
)
tlist
.
append
(
t
)
return
tlist
return
tlist
...
@@ -1248,17 +1248,17 @@ class TDTestCase:
...
@@ -1248,17 +1248,17 @@ class TDTestCase:
input_sql1
=
"rfasta,id=
\"
rfasta_1
\"
,t0=true,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
ddzhiksj
\"
,t8=L
\"
ncharTagValue
\"
c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
bnhwlgvj
\"
,c8=L
\"
ncharTagValue
\"
,c9=7u64 1626006933640000000ns"
input_sql1
=
"rfasta,id=
\"
rfasta_1
\"
,t0=true,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7=
\"
ddzhiksj
\"
,t8=L
\"
ncharTagValue
\"
c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7=
\"
bnhwlgvj
\"
,c8=L
\"
ncharTagValue
\"
,c9=7u64 1626006933640000000ns"
input_sql2
=
"rfasta,id=
\"
rfasta_1
\"
,t0=true,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64 c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64 1626006933640000000ns"
input_sql2
=
"rfasta,id=
\"
rfasta_1
\"
,t0=true,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64 c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64 1626006933640000000ns"
try
:
try
:
self
.
_conn
.
insert_lines
([
input_sql1
]
)
self
.
_conn
.
schemaless_insert
([
input_sql1
],
0
)
self
.
_conn
.
insert_lines
([
input_sql2
]
)
self
.
_conn
.
schemaless_insert
([
input_sql2
],
0
)
except
Line
sError
as
err
:
except
Schemales
sError
as
err
:
print
(
err
.
errno
)
print
(
err
.
errno
)
# self._conn.
insert_lines([input_sql2]
)
# self._conn.
schemaless_insert([input_sql2], 0
)
# input_sql3 = f'abcd,id="cc¥Ec",t0=True,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7="ndsfdrum",t8=L"ncharTagValue" c0=f,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7="igwoehkm",c8=L"ncharColValue",c9=7u64 0'
# input_sql3 = f'abcd,id="cc¥Ec",t0=True,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7="ndsfdrum",t8=L"ncharTagValue" c0=f,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7="igwoehkm",c8=L"ncharColValue",c9=7u64 0'
# print(input_sql3)
# print(input_sql3)
# input_sql4 = 'hmemeb,id="kilrcrldgf",t0=F,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7="fysodjql",t8=L"ncharTagValue" c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7="waszbfvc",c8=L"ncharColValue",c9=7u64 0'
# input_sql4 = 'hmemeb,id="kilrcrldgf",t0=F,t1=127i8,t2=32767i16,t3=2147483647i32,t4=9223372036854775807i64,t5=11.12345f32,t6=22.123456789f64,t7="fysodjql",t8=L"ncharTagValue" c0=True,c1=127i8,c2=32767i16,c3=2147483647i32,c4=9223372036854775807i64,c5=11.12345f32,c6=22.123456789f64,c7="waszbfvc",c8=L"ncharColValue",c9=7u64 0'
# code = self._conn.
insert_lines([input_sql3]
)
# code = self._conn.
schemaless_insert([input_sql3], 0
)
# print(code)
# print(code)
# self._conn.
insert_lines([input_sql4]
)
# self._conn.
schemaless_insert([input_sql4], 0
)
def
runAll
(
self
):
def
runAll
(
self
):
self
.
initCheckCase
()
self
.
initCheckCase
()
...
...
tests/pytest/tools/insert-interlace.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/insert-tblimit-tboffset-createdb.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/insert-tblimit-tboffset-insertrec.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/insert-tblimit-tboffset.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/insert-tblimit-tboffset0.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/insert-tblimit1-tboffset.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/schemalessInsertPerformance.py
浏览文件 @
25bddc0c
...
@@ -14,7 +14,7 @@
...
@@ -14,7 +14,7 @@
import
traceback
import
traceback
import
random
import
random
import
string
import
string
from
taos.error
import
Line
sError
from
taos.error
import
Schemales
sError
import
datetime
import
datetime
import
time
import
time
from
copy
import
deepcopy
from
copy
import
deepcopy
...
@@ -172,28 +172,28 @@ class TDTestCase:
...
@@ -172,28 +172,28 @@ class TDTestCase:
def
perfTableInsert
(
self
):
def
perfTableInsert
(
self
):
table_generator
=
self
.
tableGenerator
()
table_generator
=
self
.
tableGenerator
()
for
input_sql
in
table_generator
:
for
input_sql
in
table_generator
:
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
# for i in range(10):
# for i in range(10):
# self._conn.
insert_lines([input_sql]
)
# self._conn.
schemaless_insert([input_sql], 0
)
def
perfDataInsert
(
self
,
count
=
4
):
def
perfDataInsert
(
self
,
count
=
4
):
table_generator
=
self
.
tableGenerator
(
count
=
count
)
table_generator
=
self
.
tableGenerator
(
count
=
count
)
ts
=
int
(
time
.
time
())
ts
=
int
(
time
.
time
())
for
input_sql
in
table_generator
:
for
input_sql
in
table_generator
:
print
(
"input_sql-----------"
,
input_sql
)
print
(
"input_sql-----------"
,
input_sql
)
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
for
i
in
range
(
100000
):
for
i
in
range
(
100000
):
ts
-=
1
ts
-=
1
input_sql_new
=
self
.
replaceLastStr
(
input_sql
,
str
(
ts
))
+
's'
input_sql_new
=
self
.
replaceLastStr
(
input_sql
,
str
(
ts
))
+
's'
print
(
"input_sql_new---------"
,
input_sql_new
)
print
(
"input_sql_new---------"
,
input_sql_new
)
self
.
_conn
.
insert_lines
([
input_sql_new
]
)
self
.
_conn
.
schemaless_insert
([
input_sql_new
],
0
)
def
batchInsertTable
(
self
,
batch_list
):
def
batchInsertTable
(
self
,
batch_list
):
for
insert_list
in
batch_list
:
for
insert_list
in
batch_list
:
print
(
threading
.
current_thread
().
name
,
"length="
,
len
(
insert_list
))
print
(
threading
.
current_thread
().
name
,
"length="
,
len
(
insert_list
))
print
(
threading
.
current_thread
().
name
,
'firstline'
,
insert_list
[
0
])
print
(
threading
.
current_thread
().
name
,
'firstline'
,
insert_list
[
0
])
print
(
threading
.
current_thread
().
name
,
'lastline:'
,
insert_list
[
-
1
])
print
(
threading
.
current_thread
().
name
,
'lastline:'
,
insert_list
[
-
1
])
self
.
_conn
.
insert_lines
(
insert_list
)
self
.
_conn
.
schemaless_insert
(
insert_list
,
0
)
print
(
threading
.
current_thread
().
name
,
'end'
)
print
(
threading
.
current_thread
().
name
,
'end'
)
def
genTableThread
(
self
,
thread_count
=
10
):
def
genTableThread
(
self
,
thread_count
=
10
):
...
@@ -218,7 +218,7 @@ class TDTestCase:
...
@@ -218,7 +218,7 @@ class TDTestCase:
def
createStb
(
self
,
count
=
4
):
def
createStb
(
self
,
count
=
4
):
input_sql
=
self
.
getPerfSql
(
count
=
count
,
init
=
True
)
input_sql
=
self
.
getPerfSql
(
count
=
count
,
init
=
True
)
self
.
_conn
.
insert_lines
([
input_sql
]
)
self
.
_conn
.
schemaless_insert
([
input_sql
],
0
)
def
threadInsertTable
(
self
,
end_list
,
thread_count
=
10
):
def
threadInsertTable
(
self
,
end_list
,
thread_count
=
10
):
threads
=
list
()
threads
=
list
()
...
@@ -238,7 +238,7 @@ class TDTestCase:
...
@@ -238,7 +238,7 @@ class TDTestCase:
# def createTb(self, count=4):
# def createTb(self, count=4):
# input_sql = self.getPerfSql(count=count)
# input_sql = self.getPerfSql(count=count)
# for i in range(10000):
# for i in range(10000):
# self._conn.
insert_lines([input_sql]
)
# self._conn.
schemaless_insert([input_sql], 0
)
# def createTb1(self, count=4):
# def createTb1(self, count=4):
# start_time = time.time()
# start_time = time.time()
...
@@ -273,8 +273,8 @@ class TDTestCase:
...
@@ -273,8 +273,8 @@ class TDTestCase:
# def test(self):
# def test(self):
# sql1 = 'stb,id="init",t0=14865i32,t1="tvnqbjuqck" c0=37i32,c1=217i32,c2=3i32,c3=88i32 1626006833640ms'
# sql1 = 'stb,id="init",t0=14865i32,t1="tvnqbjuqck" c0=37i32,c1=217i32,c2=3i32,c3=88i32 1626006833640ms'
# sql2 = 'stb,id="init",t0=14865i32,t1="tvnqbjuqck" c0=38i32,c1=217i32,c2=3i32,c3=88i32 1626006833641ms'
# sql2 = 'stb,id="init",t0=14865i32,t1="tvnqbjuqck" c0=38i32,c1=217i32,c2=3i32,c3=88i32 1626006833641ms'
# self._conn.
insert_lines([sql1]
)
# self._conn.
schemaless_insert([sql1], 0
)
# self._conn.
insert_lines([sql2]
)
# self._conn.
schemaless_insert([sql2], 0
)
def
run
(
self
):
def
run
(
self
):
print
(
"running {}"
.
format
(
__file__
))
print
(
"running {}"
.
format
(
__file__
))
...
...
tests/pytest/tools/taosdemoAllTest/insert-1s1tnt1r.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-1s1tntmr.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-allDataType.json
0 → 100644
浏览文件 @
25bddc0c
{
"filetype"
:
"insert"
,
"cfgdir"
:
"/etc/taos"
,
"host"
:
"127.0.0.1"
,
"port"
:
6030
,
"user"
:
"root"
,
"password"
:
"taosdata"
,
"thread_count"
:
4
,
"thread_count_create_tbl"
:
4
,
"result_file"
:
"./insert_res.txt"
,
"confirm_parameter_prompt"
:
"no"
,
"insert_interval"
:
0
,
"interlace_rows"
:
10
,
"num_of_records_per_req"
:
1000
,
"max_sql_len"
:
1024000
,
"databases"
:
[{
"dbinfo"
:
{
"name"
:
"db"
,
"drop"
:
"yes"
,
"replica"
:
1
,
"days"
:
10
,
"cache"
:
50
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"keep"
:
36500
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"comp"
:
2
,
"walLevel"
:
1
,
"cachelast"
:
0
,
"quorum"
:
1
,
"fsync"
:
3000
,
"update"
:
0
},
"super_tables"
:
[{
"name"
:
"stb0"
,
"child_table_exists"
:
"no"
,
"childtable_count"
:
1000
,
"childtable_prefix"
:
"stb00_"
,
"auto_create_table"
:
"no"
,
"batch_create_tbl_num"
:
1
,
"data_source"
:
"rand"
,
"insert_mode"
:
"taosc"
,
"insert_rows"
:
100
,
"childtable_limit"
:
0
,
"childtable_offset"
:
0
,
"multi_thread_write_one_tbl"
:
"no"
,
"interlace_rows"
:
0
,
"insert_interval"
:
0
,
"max_sql_len"
:
1024000
,
"disorder_ratio"
:
0
,
"disorder_range"
:
1000
,
"timestamp_step"
:
1
,
"start_timestamp"
:
"2020-10-01 00:00:00.000"
,
"sample_format"
:
"csv"
,
"sample_file"
:
"./sample.csv"
,
"tags_file"
:
""
,
"columns"
:
[{
"type"
:
"INT"
},
{
"type"
:
"TIMESTAMP"
},
{
"type"
:
"BIGINT"
},
{
"type"
:
"FLOAT"
},
{
"type"
:
"DOUBLE"
},
{
"type"
:
"SMALLINT"
},
{
"type"
:
"TINYINT"
},
{
"type"
:
"BOOL"
},
{
"type"
:
"NCHAR"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"UINT"
},
{
"type"
:
"UBIGINT"
},
{
"type"
:
"UTINYINT"
},
{
"type"
:
"USMALLINT"
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}],
"tags"
:
[{
"type"
:
"INT"
},
{
"type"
:
"BIGINT"
},
{
"type"
:
"FLOAT"
},
{
"type"
:
"DOUBLE"
},
{
"type"
:
"SMALLINT"
},
{
"type"
:
"TINYINT"
},
{
"type"
:
"BOOL"
},
{
"type"
:
"NCHAR"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"UINT"
},
{
"type"
:
"UBIGINT"
},
{
"type"
:
"UTINYINT"
},
{
"type"
:
"USMALLINT"
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}]
},
{
"name"
:
"stb1"
,
"child_table_exists"
:
"no"
,
"childtable_count"
:
1000
,
"childtable_prefix"
:
"stb01_"
,
"auto_create_table"
:
"no"
,
"batch_create_tbl_num"
:
10
,
"data_source"
:
"rand"
,
"insert_mode"
:
"taosc"
,
"insert_rows"
:
200
,
"childtable_limit"
:
0
,
"childtable_offset"
:
0
,
"multi_thread_write_one_tbl"
:
"no"
,
"interlace_rows"
:
0
,
"insert_interval"
:
0
,
"max_sql_len"
:
1024000
,
"disorder_ratio"
:
0
,
"disorder_range"
:
1000
,
"timestamp_step"
:
1
,
"start_timestamp"
:
"2020-10-01 00:00:00.000"
,
"sample_format"
:
"csv"
,
"sample_file"
:
"./sample.csv"
,
"tags_file"
:
""
,
"columns"
:
[{
"type"
:
"INT"
},
{
"type"
:
"DOUBLE"
,
"count"
:
1
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"BINARY"
,
"len"
:
32
,
"count"
:
1
}],
"tags"
:
[{
"type"
:
"TINYINT"
,
"count"
:
2
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}]
}]
}]
}
tests/pytest/tools/taosdemoAllTest/insert-disorder.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-drop-exist-auto-N00.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-drop-exist-auto-Y00.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-illegal.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-interlace-row.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-interval-speed.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-newdb.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-newtable.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-nodbnodrop.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-offset.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-renewdb.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-sample.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insert-timestep.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertBinaryLenLarge16374AllcolLar49151.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertChildTab0.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertChildTabLess0.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertColumnsAndTagNum4096.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertColumnsAndTagNumLarge4096.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertColumnsNum0.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertInterlaceRowsLarge1M.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertMaxNumPerReq.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertNumOfrecordPerReq0.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertNumOfrecordPerReqless0.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertRestful.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertSigcolumnsNum4096.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertTagsNumLarge128.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/insertTimestepMulRowsLargeint16.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"blf"
,
"name"
:
"blf"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[
"super_tables"
:
[
{
{
...
...
tests/pytest/tools/taosdemoAllTest/insert_5M_rows.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/manual_block1_comp.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
3
,
"blocks"
:
3
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
1000
,
"minRows"
:
1000
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/manual_block2.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit1.json
浏览文件 @
25bddc0c
...
@@ -23,7 +23,7 @@
...
@@ -23,7 +23,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit5.json
浏览文件 @
25bddc0c
...
@@ -23,7 +23,7 @@
...
@@ -23,7 +23,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/moredemo-offset-limit94.json
浏览文件 @
25bddc0c
...
@@ -23,7 +23,7 @@
...
@@ -23,7 +23,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/moredemo-offset-newdb.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/query-interrupt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/queryInsertdata.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/queryInsertrestdata.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/1174-large-stmt.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"gdse"
,
"name"
:
"gdse"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[{
"super_tables"
:
[{
"name"
:
"model_1174"
,
"name"
:
"model_1174"
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/1174-large-taosc.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"gdse"
,
"name"
:
"gdse"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[{
"super_tables"
:
[{
"name"
:
"model_1174"
,
"name"
:
"model_1174"
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-stmt-random.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"gdse"
,
"name"
:
"gdse"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[{
"super_tables"
:
[{
"name"
:
"model_1174"
,
"name"
:
"model_1174"
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-stmt.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"gdse"
,
"name"
:
"gdse"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[{
"super_tables"
:
[{
"name"
:
"model_1174"
,
"name"
:
"model_1174"
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/1174-small-taosc.json
浏览文件 @
25bddc0c
...
@@ -14,7 +14,8 @@
...
@@ -14,7 +14,8 @@
{
{
"dbinfo"
:
{
"dbinfo"
:
{
"name"
:
"gdse"
,
"name"
:
"gdse"
,
"drop"
:
"yes"
"drop"
:
"yes"
,
"keep"
:
36500
},
},
"super_tables"
:
[{
"super_tables"
:
[{
"name"
:
"model_1174"
,
"name"
:
"model_1174"
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-1s1tnt1r-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-1s1tntmr-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-allDataType-stmt.json
0 → 100644
浏览文件 @
25bddc0c
{
"filetype"
:
"insert"
,
"cfgdir"
:
"/etc/taos"
,
"host"
:
"127.0.0.1"
,
"port"
:
6030
,
"user"
:
"root"
,
"password"
:
"taosdata"
,
"thread_count"
:
4
,
"thread_count_create_tbl"
:
4
,
"result_file"
:
"./insert_res.txt"
,
"confirm_parameter_prompt"
:
"no"
,
"insert_interval"
:
0
,
"interlace_rows"
:
10
,
"num_of_records_per_req"
:
1000
,
"max_sql_len"
:
1024000
,
"databases"
:
[{
"dbinfo"
:
{
"name"
:
"db"
,
"drop"
:
"yes"
,
"replica"
:
1
,
"days"
:
10
,
"cache"
:
50
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"keep"
:
36500
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"comp"
:
2
,
"walLevel"
:
1
,
"cachelast"
:
0
,
"quorum"
:
1
,
"fsync"
:
3000
,
"update"
:
0
},
"super_tables"
:
[{
"name"
:
"stb0"
,
"child_table_exists"
:
"no"
,
"childtable_count"
:
1000
,
"childtable_prefix"
:
"stb00_"
,
"auto_create_table"
:
"no"
,
"batch_create_tbl_num"
:
1
,
"data_source"
:
"rand"
,
"insert_mode"
:
"stmt"
,
"insert_rows"
:
100
,
"childtable_limit"
:
0
,
"childtable_offset"
:
0
,
"multi_thread_write_one_tbl"
:
"no"
,
"interlace_rows"
:
0
,
"insert_interval"
:
0
,
"max_sql_len"
:
1024000
,
"disorder_ratio"
:
0
,
"disorder_range"
:
1000
,
"timestamp_step"
:
1
,
"start_timestamp"
:
"2020-10-01 00:00:00.000"
,
"sample_format"
:
"csv"
,
"sample_file"
:
"./sample.csv"
,
"tags_file"
:
""
,
"columns"
:
[{
"type"
:
"INT"
},
{
"type"
:
"TIMESTAMP"
},
{
"type"
:
"BIGINT"
},
{
"type"
:
"FLOAT"
},
{
"type"
:
"DOUBLE"
},
{
"type"
:
"SMALLINT"
},
{
"type"
:
"TINYINT"
},
{
"type"
:
"BOOL"
},
{
"type"
:
"NCHAR"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"UINT"
},
{
"type"
:
"UBIGINT"
},
{
"type"
:
"UTINYINT"
},
{
"type"
:
"USMALLINT"
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}],
"tags"
:
[{
"type"
:
"INT"
},
{
"type"
:
"BIGINT"
},
{
"type"
:
"FLOAT"
},
{
"type"
:
"DOUBLE"
},
{
"type"
:
"SMALLINT"
},
{
"type"
:
"TINYINT"
},
{
"type"
:
"BOOL"
},
{
"type"
:
"NCHAR"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"UINT"
},
{
"type"
:
"UBIGINT"
},
{
"type"
:
"UTINYINT"
},
{
"type"
:
"USMALLINT"
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}]
},
{
"name"
:
"stb1"
,
"child_table_exists"
:
"no"
,
"childtable_count"
:
1000
,
"childtable_prefix"
:
"stb01_"
,
"auto_create_table"
:
"no"
,
"batch_create_tbl_num"
:
10
,
"data_source"
:
"rand"
,
"insert_mode"
:
"stmt"
,
"insert_rows"
:
200
,
"childtable_limit"
:
0
,
"childtable_offset"
:
0
,
"multi_thread_write_one_tbl"
:
"no"
,
"interlace_rows"
:
0
,
"insert_interval"
:
0
,
"max_sql_len"
:
1024000
,
"disorder_ratio"
:
0
,
"disorder_range"
:
1000
,
"timestamp_step"
:
1
,
"start_timestamp"
:
"2020-10-01 00:00:00.000"
,
"sample_format"
:
"csv"
,
"sample_file"
:
"./sample.csv"
,
"tags_file"
:
""
,
"columns"
:
[{
"type"
:
"INT"
},
{
"type"
:
"DOUBLE"
,
"count"
:
1
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
},
{
"type"
:
"BINARY"
,
"len"
:
32
,
"count"
:
1
}],
"tags"
:
[{
"type"
:
"TINYINT"
,
"count"
:
2
},
{
"type"
:
"BINARY"
,
"len"
:
16
,
"count"
:
1
}]
}]
}]
}
tests/pytest/tools/taosdemoAllTest/stmt/insert-disorder-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-drop-exist-auto-N00-stmt.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-drop-exist-auto-Y00-stmt.json
浏览文件 @
25bddc0c
...
@@ -21,7 +21,7 @@
...
@@ -21,7 +21,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-interlace-row-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-interval-speed-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-newdb-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-newtable-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-nodbnodrop-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-offset-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-renewdb-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-sample-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insert-timestep-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertBinaryLenLarge16374AllcolLar49151-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertChildTab0-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertChildTabLess0-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertColumnsAndTagNum4096-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertColumnsNum0-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertInterlaceRowsLarge1M-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertMaxNumPerReq-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
3650
,
"keep"
:
3650
0
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertNumOfrecordPerReq0-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertNumOfrecordPerReqless0-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertSigcolumnsNum4096-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/insertTagsNumLarge128-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/stmt/nsertColumnsAndTagNumLarge4096-stmt.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
50
,
"cache"
:
50
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/subInsertdata.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/subInsertdataMaxsql100.json
浏览文件 @
25bddc0c
...
@@ -22,7 +22,7 @@
...
@@ -22,7 +22,7 @@
"cache"
:
16
,
"cache"
:
16
,
"blocks"
:
8
,
"blocks"
:
8
,
"precision"
:
"ms"
,
"precision"
:
"ms"
,
"keep"
:
365
,
"keep"
:
365
00
,
"minRows"
:
100
,
"minRows"
:
100
,
"maxRows"
:
4096
,
"maxRows"
:
4096
,
"comp"
:
2
,
"comp"
:
2
,
...
...
tests/pytest/tools/taosdemoAllTest/taosdemoTestInsertAllType.py
0 → 100644
浏览文件 @
25bddc0c
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import
sys
import
os
from
util.log
import
*
from
util.cases
import
*
from
util.sql
import
*
from
util.dnodes
import
*
class
TDTestCase
:
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
"start to execute %s"
%
__file__
)
tdSql
.
init
(
conn
.
cursor
(),
logSql
)
def
getBuildPath
(
self
):
selfPath
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
if
(
"community"
in
selfPath
):
projPath
=
selfPath
[:
selfPath
.
find
(
"community"
)]
else
:
projPath
=
selfPath
[:
selfPath
.
find
(
"tests"
)]
for
root
,
dirs
,
files
in
os
.
walk
(
projPath
):
if
(
"taosd"
in
files
):
rootRealPath
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
root
))
if
(
"packaging"
not
in
rootRealPath
):
buildPath
=
root
[:
len
(
root
)
-
len
(
"/build/bin"
)]
break
return
buildPath
def
run
(
self
):
buildPath
=
self
.
getBuildPath
()
if
(
buildPath
==
""
):
tdLog
.
exit
(
"taosd not found!"
)
else
:
tdLog
.
info
(
"taosd found in %s"
%
buildPath
)
binPath
=
buildPath
+
"/build/bin/"
# insert: create one or mutiple tables per sql and insert multiple rows per sql
os
.
system
(
"%staosdemo -f tools/taosdemoAllTest/insert-allDataType.json -y "
%
binPath
)
tdSql
.
execute
(
"use db"
)
tdSql
.
query
(
"select count (tbname) from stb0"
)
tdSql
.
checkData
(
0
,
0
,
1000
)
tdSql
.
query
(
"select count (tbname) from stb1"
)
tdSql
.
checkData
(
0
,
0
,
1000
)
tdSql
.
query
(
"select count(*) from stb00_0"
)
tdSql
.
checkData
(
0
,
0
,
100
)
tdSql
.
query
(
"select count(*) from stb0"
)
tdSql
.
checkData
(
0
,
0
,
100000
)
tdSql
.
query
(
"select count(*) from stb01_1"
)
tdSql
.
checkData
(
0
,
0
,
200
)
tdSql
.
query
(
"select count(*) from stb1"
)
tdSql
.
checkData
(
0
,
0
,
200000
)
# stmt interface
os
.
system
(
"%staosdemo -f tools/taosdemoAllTest/stmt/insert-allDataType-stmt.json -y "
%
binPath
)
tdSql
.
execute
(
"use db"
)
tdSql
.
query
(
"select count (tbname) from stb0"
)
tdSql
.
checkData
(
0
,
0
,
1000
)
tdSql
.
query
(
"select count (tbname) from stb1"
)
tdSql
.
checkData
(
0
,
0
,
1000
)
tdSql
.
query
(
"select count(*) from stb00_0"
)
tdSql
.
checkData
(
0
,
0
,
100
)
tdSql
.
query
(
"select count(*) from stb0"
)
tdSql
.
checkData
(
0
,
0
,
100000
)
tdSql
.
query
(
"select count(*) from stb01_1"
)
tdSql
.
checkData
(
0
,
0
,
200
)
tdSql
.
query
(
"select count(*) from stb1"
)
tdSql
.
checkData
(
0
,
0
,
200000
)
# taosdemo command line
os
.
system
(
"%staosdemo -t 1000 -n 100 -T 10 -b INT,TIMESTAMP,BIGINT,FLOAT,DOUBLE,SMALLINT,TINYINT,BOOL,NCHAR,UINT,UBIGINT,UTINYINT,USMALLINT,BINARY -y "
%
binPath
)
tdSql
.
execute
(
"use test"
)
tdSql
.
query
(
"select count (tbname) from meters"
)
tdSql
.
checkData
(
0
,
0
,
1000
)
tdSql
.
query
(
"select count(*) from meters"
)
tdSql
.
checkData
(
0
,
0
,
100000
)
tdSql
.
query
(
"select count(*) from d100"
)
tdSql
.
checkData
(
0
,
0
,
100
)
testcaseFilename
=
os
.
path
.
split
(
__file__
)[
-
1
]
os
.
system
(
"rm -rf ./insert_res.txt"
)
os
.
system
(
"rm -rf tools/taosdemoAllTest/%s.sql"
%
testcaseFilename
)
def
stop
(
self
):
tdSql
.
close
()
tdLog
.
success
(
"%s successfully executed"
%
__file__
)
tdCases
.
addWindows
(
__file__
,
TDTestCase
())
tdCases
.
addLinux
(
__file__
,
TDTestCase
())
tests/pytest/tools/taosdumpTest3.py
0 → 100644
浏览文件 @
25bddc0c
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import
sys
import
os
from
util.log
import
*
from
util.cases
import
*
from
util.sql
import
*
from
util.dnodes
import
*
class
TDTestCase
:
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
"start to execute %s"
%
__file__
)
tdSql
.
init
(
conn
.
cursor
(),
logSql
)
self
.
ts
=
1538548685000
self
.
numberOfTables
=
10000
self
.
numberOfRecords
=
100
def
checkCommunity
(
self
):
selfPath
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
if
(
"community"
in
selfPath
):
return
False
else
:
return
True
def
getBuildPath
(
self
):
selfPath
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
if
(
"community"
in
selfPath
):
projPath
=
selfPath
[:
selfPath
.
find
(
"community"
)]
else
:
projPath
=
selfPath
[:
selfPath
.
find
(
"tests"
)]
for
root
,
dirs
,
files
in
os
.
walk
(
projPath
):
if
(
"taosdump"
in
files
):
rootRealPath
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
root
))
if
(
"packaging"
not
in
rootRealPath
):
buildPath
=
root
[:
len
(
root
)
-
len
(
"/build/bin"
)]
break
return
buildPath
def
run
(
self
):
if
not
os
.
path
.
exists
(
"./taosdumptest"
):
os
.
makedirs
(
"./taosdumptest"
)
if
not
os
.
path
.
exists
(
"./taosdumptest/tmp1"
):
os
.
makedirs
(
"./taosdumptest/tmp1"
)
if
not
os
.
path
.
exists
(
"./taosdumptest/tmp2"
):
os
.
makedirs
(
"./taosdumptest/tmp2"
)
if
not
os
.
path
.
exists
(
"./taosdumptest/tmp3"
):
os
.
makedirs
(
"./taosdumptest/tmp3"
)
if
not
os
.
path
.
exists
(
"./taosdumptest/tmp4"
):
os
.
makedirs
(
"./taosdumptest/tmp4"
)
buildPath
=
self
.
getBuildPath
()
if
(
buildPath
==
""
):
tdLog
.
exit
(
"taosdump not found!"
)
else
:
tdLog
.
info
(
"taosdump found in %s"
%
buildPath
)
binPath
=
buildPath
+
"/build/bin/"
# create db1 , one stables and one table ; create general tables
tdSql
.
execute
(
"create database if not exists dp1"
)
tdSql
.
execute
(
"use dp1"
)
tdSql
.
execute
(
"create stable st0(ts timestamp, c1 int, c2 nchar(10)) tags(t1 int)"
)
tdSql
.
execute
(
"create table st0_0 using st0 tags(0) st0_1 using st0 tags (1) "
)
tdSql
.
execute
(
"insert into st0_0 values(1614218412000,8537,'R')(1614218422000,8538,'E')"
)
tdSql
.
execute
(
"insert into st0_1 values(1614218413000,1537,'A')(1614218423000,1538,'D')"
)
tdSql
.
execute
(
"create table if not exists gt0 (ts timestamp, c0 int, c1 float) "
)
tdSql
.
execute
(
"create table if not exists gt1 (ts timestamp, c0 int, c1 double) "
)
tdSql
.
execute
(
"insert into gt0 values(1614218412000,637,8.861)"
)
tdSql
.
execute
(
"insert into gt1 values(1614218413000,638,8.862)"
)
# create db1 , three stables:stb0,include ctables stb0_0 \ stb0_1,stb1 include ctables stb1_0 and stb1_1
# \stb3,include ctables stb3_0 and stb3_1
# ; create general three tables gt0 gt1 gt2
tdSql
.
execute
(
"create database if not exists dp2"
)
tdSql
.
execute
(
"use dp2"
)
tdSql
.
execute
(
"create stable st0(ts timestamp, c01 int, c02 nchar(10)) tags(t1 int)"
)
tdSql
.
execute
(
"create table st0_0 using st0 tags(0) st0_1 using st0 tags(1) "
)
tdSql
.
execute
(
"insert into st0_0 values(1614218412000,8600,'R')(1614218422000,8600,'E')"
)
tdSql
.
execute
(
"insert into st0_1 values(1614218413000,8601,'A')(1614218423000,8601,'D')"
)
tdSql
.
execute
(
"create stable st1(ts timestamp, c11 float, c12 nchar(10)) tags(t1 int)"
)
tdSql
.
execute
(
"create table st1_0 using st1 tags(0) st1_1 using st1 tags(1) "
)
tdSql
.
execute
(
"insert into st1_0 values(1614218412000,8610.1,'R')(1614218422000,8610.1,'E')"
)
tdSql
.
execute
(
"insert into st1_1 values(1614218413000,8611.2,'A')(1614218423000,8611.1,'D')"
)
tdSql
.
execute
(
"create stable st2(ts timestamp, c21 float, c22 nchar(10)) tags(t1 int)"
)
tdSql
.
execute
(
"create table st2_0 using st2 tags(0) st2_1 using st2 tags(1) "
)
tdSql
.
execute
(
"insert into st2_0 values(1614218412000,8620.3,'R')(1614218422000,8620.3,'E')"
)
tdSql
.
execute
(
"insert into st2_1 values(1614218413000,8621.4,'A')(1614218423000,8621.4,'D')"
)
tdSql
.
execute
(
"create table if not exists gt0 (ts timestamp, c00 int, c01 float) "
)
tdSql
.
execute
(
"create table if not exists gt1 (ts timestamp, c10 int, c11 double) "
)
tdSql
.
execute
(
"create table if not exists gt2 (ts timestamp, c20 int, c21 float) "
)
tdSql
.
execute
(
"insert into gt0 values(1614218412000,8637,78.86155)"
)
tdSql
.
execute
(
"insert into gt1 values(1614218413000,8638,78.862020199)"
)
tdSql
.
execute
(
"insert into gt2 values(1614218413000,8639,78.863)"
)
# tdSql.execute("insert into t0 values(1614218422000,8638,'R')")
os
.
system
(
"rm -rf ./taosdumptest/tmp1/*"
)
os
.
system
(
"rm -rf ./taosdumptest/tmp2/*"
)
os
.
system
(
"rm -rf ./taosdumptest/tmp3/*"
)
os
.
system
(
"rm -rf ./taosdumptest/tmp4/*"
)
# # taosdump stable and general table
# os.system("%staosdump -o ./taosdumptest/tmp1 -D dp1 dp2 " % binPath)
# os.system("%staosdump -o ./taosdumptest/tmp2 dp1 st0 gt0 " % binPath)
# os.system("%staosdump -o ./taosdumptest/tmp3 dp2 st0 st1_0 gt0" % binPath)
# os.system("%staosdump -o ./taosdumptest/tmp4 dp2 st0 st2 gt0 gt2" % binPath)、
# verify -D:--database
# os.system("%staosdump --databases dp1 -o ./taosdumptest/tmp3 dp2 st0 st1_0 gt0" % binPath)
# os.system("%staosdump --databases dp1,dp2 -o ./taosdumptest/tmp3 " % binPath)
# #check taosdumptest/tmp1
# tdSql.execute("drop database dp1")
# tdSql.execute("drop database dp2")
# os.system("%staosdump -i ./taosdumptest/tmp1 -T 2 " % binPath)
# tdSql.execute("use dp1")
# tdSql.query("show stables")
# tdSql.checkRows(1)
# tdSql.query("show tables")
# tdSql.checkRows(4)
# tdSql.execute("use dp2")
# tdSql.query("show stables")
# tdSql.checkRows(3)
# tdSql.query("show tables")
# tdSql.checkRows(9)
# tdSql.query("select c01 from gt0")
# tdSql.checkData(0,0,78.86155)
# tdSql.query("select c11 from gt1")
# tdSql.checkData(0, 0, 78.862020199)
# tdSql.query("select c21 from gt2")
# tdSql.checkData(0, 0, 78.86300)
# #check taosdumptest/tmp2
# tdSql.execute("drop database dp1")
# tdSql.execute("drop database dp2")
# os.system("%staosdump -i ./taosdumptest/tmp2 -T 2 " % binPath)
# tdSql.execute("use dp1")
# tdSql.query("show stables")
# tdSql.checkRows(1)
# tdSql.query("show tables")
# tdSql.checkRows(3)
# tdSql.error("use dp2")
# tdSql.query("select c01 from gt0")
# tdSql.checkData(0,0,78.86155)
# #check taosdumptest/tmp3
# tdSql.execute("drop database dp1")
# os.system("%staosdump -i ./taosdumptest/tmp3 -T 2 " % binPath)
# tdSql.execute("use dp2")
# tdSql.query("show stables")
# tdSql.checkRows(2)
# tdSql.query("show tables")
# tdSql.checkRows(4)
# tdSql.query("select count(*) from st1_0")
# tdSql.query("select c01 from gt0")
# tdSql.checkData(0,0,78.86155)
# tdSql.error("use dp1")
# tdSql.error("select count(*) from st2_0")
# tdSql.error("select count(*) from gt2")
# #check taosdumptest/tmp4
# tdSql.execute("drop database dp2")
# os.system("%staosdump -i ./taosdumptest/tmp4 -T 2 " % binPath)
# tdSql.execute("use dp2")
# tdSql.query("show stables")
# tdSql.checkRows(2)
# tdSql.query("show tables")
# tdSql.checkRows(6)
# tdSql.query("select c21 from gt2")
# tdSql.checkData(0, 0, 78.86300)
# tdSql.query("select count(*) from st2_0")
# tdSql.error("use dp1")
# tdSql.error("select count(*) from st1_0")
# tdSql.error("select count(*) from gt3")
# tdSql.execute("drop database dp2")
# os.system("rm -rf ./taosdumptest/tmp1")
# os.system("rm -rf ./taosdumptest/tmp2")
# os.system("rm -rf ./dump_result.txt")
# os.system("rm -rf ./db.csv")
def
stop
(
self
):
tdSql
.
close
()
tdLog
.
success
(
"%s successfully executed"
%
__file__
)
tdCases
.
addWindows
(
__file__
,
TDTestCase
())
tdCases
.
addLinux
(
__file__
,
TDTestCase
())
tests/script/api/openTSDBTest.c
浏览文件 @
25bddc0c
...
@@ -26,7 +26,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -26,7 +26,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb0_1 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
"stb0_1 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
"stb0_2 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
"stb0_2 1626006833639000000ns 4i8 host=
\"
host0
\"
interface=
\"
eth0
\"
"
,
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines0
,
3
);
code
=
taos_
schemaless_insert
(
taos
,
lines0
,
3
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -40,7 +40,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -40,7 +40,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb1 1626006833651ms 5i8 host=
\"
host0
\"
"
,
"stb1 1626006833651ms 5i8 host=
\"
host0
\"
"
,
"stb1 0 6i8 host=
\"
host0
\"
"
,
"stb1 0 6i8 host=
\"
host0
\"
"
,
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines1
,
6
);
code
=
taos_
schemaless_insert
(
taos
,
lines1
,
6
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -51,7 +51,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -51,7 +51,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_0 1626006833651ms -127i8 host=
\"
host0
\"
"
,
"stb2_0 1626006833651ms -127i8 host=
\"
host0
\"
"
,
"stb2_0 1626006833652ms 127i8 host=
\"
host0
\"
"
"stb2_0 1626006833652ms 127i8 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_0
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_0
,
2
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -61,7 +61,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -61,7 +61,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_1 1626006833651ms -32767i16 host=
\"
host0
\"
"
,
"stb2_1 1626006833651ms -32767i16 host=
\"
host0
\"
"
,
"stb2_1 1626006833652ms 32767i16 host=
\"
host0
\"
"
"stb2_1 1626006833652ms 32767i16 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_1
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_1
,
2
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -71,7 +71,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -71,7 +71,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_2 1626006833651ms -2147483647i32 host=
\"
host0
\"
"
,
"stb2_2 1626006833651ms -2147483647i32 host=
\"
host0
\"
"
,
"stb2_2 1626006833652ms 2147483647i32 host=
\"
host0
\"
"
"stb2_2 1626006833652ms 2147483647i32 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_2
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_2
,
2
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -82,7 +82,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -82,7 +82,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_3 1626006833652ms 9223372036854775807i64 host=
\"
host0
\"
"
,
"stb2_3 1626006833652ms 9223372036854775807i64 host=
\"
host0
\"
"
,
"stb2_3 1626006833662ms 9223372036854775807 host=
\"
host0
\"
"
"stb2_3 1626006833662ms 9223372036854775807 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_3
,
3
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_3
,
3
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -100,7 +100,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -100,7 +100,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_4 1626006833700ms 3.4E38f32 host=
\"
host0
\"
"
,
"stb2_4 1626006833700ms 3.4E38f32 host=
\"
host0
\"
"
,
"stb2_4 1626006833710ms -3.4E38f32 host=
\"
host0
\"
"
"stb2_4 1626006833710ms -3.4E38f32 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_4
,
10
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_4
,
10
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -119,7 +119,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -119,7 +119,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_5 1626006833700ms -1.7E308f64 host=
\"
host0
\"
"
,
"stb2_5 1626006833700ms -1.7E308f64 host=
\"
host0
\"
"
,
"stb2_5 1626006833710ms 3.15 host=
\"
host0
\"
"
"stb2_5 1626006833710ms 3.15 host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_5
,
1
1
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_5
,
11
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_5 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_5 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -137,7 +137,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -137,7 +137,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_6 1626006833690ms False host=
\"
host0
\"
"
,
"stb2_6 1626006833690ms False host=
\"
host0
\"
"
,
"stb2_6 1626006833700ms FALSE host=
\"
host0
\"
"
"stb2_6 1626006833700ms FALSE host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_6
,
10
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_6
,
10
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_6 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_6 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -148,7 +148,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -148,7 +148,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_7 1626006833620ms
\"
binary_val.:;,./?|+-=
\"
host=
\"
host0
\"
"
,
"stb2_7 1626006833620ms
\"
binary_val.:;,./?|+-=
\"
host=
\"
host0
\"
"
,
"stb2_7 1626006833630ms
\"
binary_val.()[]{}<>
\"
host=
\"
host0
\"
"
"stb2_7 1626006833630ms
\"
binary_val.()[]{}<>
\"
host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_7
,
3
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_7
,
3
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_7 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_7 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -158,7 +158,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -158,7 +158,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb2_8 1626006833610ms L
\"
nchar_val数值一
\"
host=
\"
host0
\"
"
,
"stb2_8 1626006833610ms L
\"
nchar_val数值一
\"
host=
\"
host0
\"
"
,
"stb2_8 1626006833620ms L
\"
nchar_val数值二
\"
host=
\"
host0
\"
"
"stb2_8 1626006833620ms L
\"
nchar_val数值二
\"
host=
\"
host0
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines2_8
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines2_8
,
2
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines2_8 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines2_8 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -169,7 +169,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -169,7 +169,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb3_0 1626006833610ms 1 t1=127i8 t2=32767i16 t3=2147483647i32 t4=9223372036854775807i64 t5=3.4E38f32 t6=1.7E308f64 t7=true t8=
\"
binary_val_1
\"
t9=L
\"
标签值1
\"
"
,
"stb3_0 1626006833610ms 1 t1=127i8 t2=32767i16 t3=2147483647i32 t4=9223372036854775807i64 t5=3.4E38f32 t6=1.7E308f64 t7=true t8=
\"
binary_val_1
\"
t9=L
\"
标签值1
\"
"
,
"stb3_0 1626006833610ms 2 t1=-127i8 t2=-32767i16 t3=-2147483647i32 t4=-9223372036854775807i64 t5=-3.4E38f32 t6=-1.7E308f64 t7=false t8=
\"
binary_val_2
\"
t9=L
\"
标签值2
\"
"
"stb3_0 1626006833610ms 2 t1=-127i8 t2=-32767i16 t3=-2147483647i32 t4=-9223372036854775807i64 t5=-3.4E38f32 t6=-1.7E308f64 t7=false t8=
\"
binary_val_2
\"
t9=L
\"
标签值2
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines3_0
,
2
);
code
=
taos_
schemaless_insert
(
taos
,
lines3_0
,
2
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines3_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines3_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -180,7 +180,7 @@ void verify_telnet_insert(TAOS* taos) {
...
@@ -180,7 +180,7 @@ void verify_telnet_insert(TAOS* taos) {
"stb3_1 1626006833610ms 2 host=
\"
host2
\"
iD=
\"
child_table2
\"
"
,
"stb3_1 1626006833610ms 2 host=
\"
host2
\"
iD=
\"
child_table2
\"
"
,
"stb3_1 1626006833610ms 3 ID=
\"
child_table3
\"
host=
\"
host3
\"
"
"stb3_1 1626006833610ms 3 ID=
\"
child_table3
\"
host=
\"
host3
\"
"
};
};
code
=
taos_
insert_telnet_lines
(
taos
,
lines3_1
,
3
);
code
=
taos_
schemaless_insert
(
taos
,
lines3_1
,
3
,
1
);
if
(
code
)
{
if
(
code
)
{
printf
(
"lines3_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"lines3_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
...
@@ -201,7 +201,7 @@ void verify_json_insert(TAOS* taos) {
...
@@ -201,7 +201,7 @@ void verify_json_insert(TAOS* taos) {
(
void
)
taos_select_db
(
taos
,
"db"
);
(
void
)
taos_select_db
(
taos
,
"db"
);
int32_t
code
=
0
;
int32_t
code
=
0
;
char
*
message
=
char
*
message
[]
=
{
"{ \
"{ \
\"
metric
\"
:
\"
cpu_load_0
\"
, \
\"
metric
\"
:
\"
cpu_load_0
\"
, \
\"
timestamp
\"
: 1626006833610123, \
\"
timestamp
\"
: 1626006833610123, \
...
@@ -212,14 +212,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -212,14 +212,14 @@ void verify_json_insert(TAOS* taos) {
\"
interface1
\"
:
\"
eth0
\"
, \
\"
interface1
\"
:
\"
eth0
\"
, \
\"
Id
\"
:
\"
tb0
\"
\
\"
Id
\"
:
\"
tb0
\"
\
} \
} \
}"
;
}"
}
;
code
=
taos_
insert_json_payload
(
taos
,
message
);
code
=
taos_
schemaless_insert
(
taos
,
message
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
char
*
message1
=
char
*
message1
[]
=
{
"[ \
"[ \
{ \
{ \
\"
metric
\"
:
\"
cpu_load_1
\"
, \
\"
metric
\"
:
\"
cpu_load_1
\"
, \
...
@@ -243,14 +243,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -243,14 +243,14 @@ void verify_json_insert(TAOS* taos) {
\"
Id
\"
:
\"
tb2
\"
\
\"
Id
\"
:
\"
tb2
\"
\
} \
} \
} \
} \
]"
;
]"
}
;
code
=
taos_
insert_json_payload
(
taos
,
message1
);
code
=
taos_
schemaless_insert
(
taos
,
message1
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
char
*
message2
=
char
*
message2
[]
=
{
"[ \
"[ \
{ \
{ \
\"
metric
\"
:
\"
cpu_load_3
\"
, \
\"
metric
\"
:
\"
cpu_load_3
\"
, \
...
@@ -295,15 +295,15 @@ void verify_json_insert(TAOS* taos) {
...
@@ -295,15 +295,15 @@ void verify_json_insert(TAOS* taos) {
\"
Id
\"
:
\"
tb4
\"
\
\"
Id
\"
:
\"
tb4
\"
\
} \
} \
} \
} \
]"
;
]"
}
;
code
=
taos_
insert_json_payload
(
taos
,
message
2
);
code
=
taos_
schemaless_insert
(
taos
,
message2
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
cJSON
*
payload
,
*
tags
;
cJSON
*
payload
,
*
tags
;
char
*
payload_str
;
char
*
payload_str
[
1
]
;
/* Default format */
/* Default format */
//number
//number
...
@@ -317,14 +317,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -317,14 +317,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload0_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload0_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//true
//true
...
@@ -338,14 +338,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -338,14 +338,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload0_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload0_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//false
//false
...
@@ -359,14 +359,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -359,14 +359,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload0_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload0_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//string
//string
...
@@ -380,14 +380,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -380,14 +380,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload0_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload0_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//timestamp 0 -> current time
//timestamp 0 -> current time
...
@@ -401,38 +401,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -401,38 +401,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload0_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload0_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
//ID
payload
=
cJSON_CreateObject
();
cJSON_AddStringToObject
(
payload
,
"metric"
,
"stb0_5"
);
cJSON_AddNumberToObject
(
payload
,
"timestamp"
,
0
);
cJSON_AddNumberToObject
(
payload
,
"value"
,
123
);
tags
=
cJSON_CreateObject
();
cJSON_AddStringToObject
(
tags
,
"ID"
,
"tb0_5"
);
cJSON_AddTrueToObject
(
tags
,
"t1"
);
cJSON_AddStringToObject
(
tags
,
"iD"
,
"tb000"
);
cJSON_AddFalseToObject
(
tags
,
"t2"
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"id"
,
"tb555"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
code
=
taos_insert_json_payload
(
taos
,
payload_str
);
if
(
code
)
{
printf
(
"payload0_5 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
free
(
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
/* Nested format */
/* Nested format */
...
@@ -454,14 +430,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -454,14 +430,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload1_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload1_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//milleseconds
//milleseconds
...
@@ -480,14 +456,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -480,14 +456,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload1_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload1_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//microseconds
//microseconds
...
@@ -506,40 +482,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -506,40 +482,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload1_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload1_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
//nanoseconds
payload
=
cJSON_CreateObject
();
cJSON_AddStringToObject
(
payload
,
"metric"
,
"stb1_3"
);
timestamp
=
cJSON_CreateObject
();
cJSON_AddNumberToObject
(
timestamp
,
"value"
,
(
double
)
1626006833610123321
);
cJSON_AddStringToObject
(
timestamp
,
"type"
,
"ns"
);
cJSON_AddItemToObject
(
payload
,
"timestamp"
,
timestamp
);
cJSON_AddNumberToObject
(
payload
,
"value"
,
10
);
tags
=
cJSON_CreateObject
();
cJSON_AddTrueToObject
(
tags
,
"t1"
);
cJSON_AddFalseToObject
(
tags
,
"t2"
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
code
=
taos_insert_json_payload
(
taos
,
payload_str
);
if
(
code
)
{
printf
(
"payload1_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
free
(
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//now
//now
...
@@ -558,14 +508,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -558,14 +508,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload1_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload1_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//metric value
//metric value
...
@@ -590,14 +540,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -590,14 +540,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//tinyint
//tinyint
...
@@ -620,14 +570,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -620,14 +570,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_1 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//smallint
//smallint
...
@@ -650,14 +600,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -650,14 +600,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_2 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//int
//int
...
@@ -680,14 +630,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -680,14 +630,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_3 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//bigint
//bigint
...
@@ -710,14 +660,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -710,14 +660,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_4 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//float
//float
...
@@ -740,14 +690,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -740,14 +690,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_5 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_5 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//double
//double
...
@@ -770,14 +720,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -770,14 +720,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_6 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_6 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//binary
//binary
...
@@ -800,14 +750,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -800,14 +750,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_7 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_7 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//nchar
//nchar
...
@@ -830,14 +780,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -830,14 +780,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddNumberToObject
(
tags
,
"t3"
,
10
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddStringToObject
(
tags
,
"t4"
,
"123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload2_8 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload2_8 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
//tag value
//tag value
...
@@ -910,14 +860,14 @@ void verify_json_insert(TAOS* taos) {
...
@@ -910,14 +860,14 @@ void verify_json_insert(TAOS* taos) {
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
cJSON_AddItemToObject
(
payload
,
"tags"
,
tags
);
payload_str
=
cJSON_Print
(
payload
);
*
payload_str
=
cJSON_Print
(
payload
);
//printf("%s\n", payload_str);
//printf("%s\n", payload_str);
code
=
taos_
insert_json_payload
(
taos
,
payload_str
);
code
=
taos_
schemaless_insert
(
taos
,
payload_str
,
0
,
2
);
if
(
code
)
{
if
(
code
)
{
printf
(
"payload3_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
printf
(
"payload3_0 code: %d, %s.
\n
"
,
code
,
tstrerror
(
code
));
}
}
free
(
payload_str
);
free
(
*
payload_str
);
cJSON_Delete
(
payload
);
cJSON_Delete
(
payload
);
}
}
...
...
tests/tsim/src/simExe.c
浏览文件 @
25bddc0c
...
@@ -1084,7 +1084,7 @@ bool simExecuteLineInsertCmd(SScript *script, char *rest) {
...
@@ -1084,7 +1084,7 @@ bool simExecuteLineInsertCmd(SScript *script, char *rest) {
simInfo
(
"script:%s, %s"
,
script
->
fileName
,
rest
);
simInfo
(
"script:%s, %s"
,
script
->
fileName
,
rest
);
simLogSql
(
buf
,
true
);
simLogSql
(
buf
,
true
);
char
*
lines
[]
=
{
rest
};
char
*
lines
[]
=
{
rest
};
int32_t
ret
=
taos_
insert_lines
(
script
->
taos
,
lines
,
1
);
int32_t
ret
=
taos_
schemaless_insert
(
script
->
taos
,
lines
,
1
,
0
);
if
(
ret
==
TSDB_CODE_SUCCESS
)
{
if
(
ret
==
TSDB_CODE_SUCCESS
)
{
simDebug
(
"script:%s, taos:%p, %s executed. success."
,
script
->
fileName
,
script
->
taos
,
rest
);
simDebug
(
"script:%s, taos:%p, %s executed. success."
,
script
->
fileName
,
script
->
taos
,
rest
);
script
->
linePos
++
;
script
->
linePos
++
;
...
@@ -1107,7 +1107,7 @@ bool simExecuteLineInsertErrorCmd(SScript *script, char *rest) {
...
@@ -1107,7 +1107,7 @@ bool simExecuteLineInsertErrorCmd(SScript *script, char *rest) {
simInfo
(
"script:%s, %s"
,
script
->
fileName
,
rest
);
simInfo
(
"script:%s, %s"
,
script
->
fileName
,
rest
);
simLogSql
(
buf
,
true
);
simLogSql
(
buf
,
true
);
char
*
lines
[]
=
{
rest
};
char
*
lines
[]
=
{
rest
};
int32_t
ret
=
taos_
insert_lines
(
script
->
taos
,
lines
,
1
);
int32_t
ret
=
taos_
schemaless_insert
(
script
->
taos
,
lines
,
1
,
0
);
if
(
ret
==
TSDB_CODE_SUCCESS
)
{
if
(
ret
==
TSDB_CODE_SUCCESS
)
{
sprintf
(
script
->
error
,
"script:%s, taos:%p, %s executed. expect failed, but success."
,
script
->
fileName
,
script
->
taos
,
rest
);
sprintf
(
script
->
error
,
"script:%s, taos:%p, %s executed. expect failed, but success."
,
script
->
fileName
,
script
->
taos
,
rest
);
script
->
linePos
++
;
script
->
linePos
++
;
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录