Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
taosdata
TDengine
提交
b1b09f65
T
TDengine
项目概览
taosdata
/
TDengine
1 年多 前同步成功
通知
1185
Star
22016
Fork
4786
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
T
TDengine
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
b1b09f65
编写于
9月 13, 2021
作者:
D
dapan1121
提交者:
GitHub
9月 13, 2021
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #7825 from taosdata/feature/TD-6443
[TD-6443]<feature>: Support OpenTSDB HTTP JSON data import format
上级
10db0c2b
10b6eafc
变更
12
展开全部
隐藏空白更改
内联
并排
Showing
12 changed file
with
1935 addition
and
37 deletion
+1935
-37
src/client/CMakeLists.txt
src/client/CMakeLists.txt
+8
-8
src/client/src/tscParseLineProtocol.c
src/client/src/tscParseLineProtocol.c
+5
-4
src/client/src/tscParseOpenTSDB.c
src/client/src/tscParseOpenTSDB.c
+556
-7
src/connector/python/taos/cinterface.py
src/connector/python/taos/cinterface.py
+7
-1
src/connector/python/taos/connection.py
src/connector/python/taos/connection.py
+19
-0
src/connector/python/taos/error.py
src/connector/python/taos/error.py
+11
-1
src/inc/taos.h
src/inc/taos.h
+2
-0
src/inc/taoserror.h
src/inc/taoserror.h
+3
-0
src/util/src/terror.c
src/util/src/terror.c
+3
-0
tests/examples/c/apitest.c
tests/examples/c/apitest.c
+751
-14
tests/examples/c/makefile
tests/examples/c/makefile
+2
-2
tests/pytest/insert/insertJSONPayload.py
tests/pytest/insert/insertJSONPayload.py
+568
-0
未找到文件。
src/client/CMakeLists.txt
浏览文件 @
b1b09f65
...
...
@@ -13,13 +13,13 @@ IF (TD_LINUX)
# set the static lib name
ADD_LIBRARY
(
taos_static STATIC
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos_static common query trpc tutil pthread m rt
${
VAR_TSZ
}
)
TARGET_LINK_LIBRARIES
(
taos_static common query trpc tutil pthread m rt
cJson
${
VAR_TSZ
}
)
SET_TARGET_PROPERTIES
(
taos_static PROPERTIES OUTPUT_NAME
"taos_static"
)
SET_TARGET_PROPERTIES
(
taos_static PROPERTIES CLEAN_DIRECT_OUTPUT 1
)
# generate dynamic library (*.so)
ADD_LIBRARY
(
taos SHARED
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos common query trpc tutil pthread m rt
)
TARGET_LINK_LIBRARIES
(
taos common query trpc tutil pthread m rt
cJson
)
IF
(
TD_LINUX_64
)
TARGET_LINK_LIBRARIES
(
taos lua
)
ENDIF
()
...
...
@@ -39,13 +39,13 @@ ELSEIF (TD_DARWIN)
# set the static lib name
ADD_LIBRARY
(
taos_static STATIC
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos_static common query trpc tutil pthread m lua
)
TARGET_LINK_LIBRARIES
(
taos_static common query trpc tutil pthread m lua
cJson
)
SET_TARGET_PROPERTIES
(
taos_static PROPERTIES OUTPUT_NAME
"taos_static"
)
SET_TARGET_PROPERTIES
(
taos_static PROPERTIES CLEAN_DIRECT_OUTPUT 1
)
# generate dynamic library (*.dylib)
ADD_LIBRARY
(
taos SHARED
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos common query trpc tutil pthread m lua
)
TARGET_LINK_LIBRARIES
(
taos common query trpc tutil pthread m lua
cJson
)
SET_TARGET_PROPERTIES
(
taos PROPERTIES CLEAN_DIRECT_OUTPUT 1
)
#set version of .dylib
...
...
@@ -63,26 +63,26 @@ ELSEIF (TD_WINDOWS)
CONFIGURE_FILE
(
"
${
TD_COMMUNITY_DIR
}
/src/client/src/taos.rc.in"
"
${
TD_COMMUNITY_DIR
}
/src/client/src/taos.rc"
)
ADD_LIBRARY
(
taos_static STATIC
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos_static trpc tutil query
)
TARGET_LINK_LIBRARIES
(
taos_static trpc tutil query
cJson
)
# generate dynamic library (*.dll)
ADD_LIBRARY
(
taos SHARED
${
SRC
}
${
TD_COMMUNITY_DIR
}
/src/client/src/taos.rc
)
IF
(
NOT TD_GODLL
)
SET_TARGET_PROPERTIES
(
taos PROPERTIES LINK_FLAGS /DEF:
${
TD_COMMUNITY_DIR
}
/src/client/src/taos.def
)
ENDIF
()
TARGET_LINK_LIBRARIES
(
taos trpc tutil query lua
)
TARGET_LINK_LIBRARIES
(
taos trpc tutil query lua
cJson
)
ELSEIF
(
TD_DARWIN
)
SET
(
CMAKE_MACOSX_RPATH 1
)
INCLUDE_DIRECTORIES
(
${
TD_COMMUNITY_DIR
}
/deps/jni/linux
)
ADD_LIBRARY
(
taos_static STATIC
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos_static query trpc tutil pthread m lua
)
TARGET_LINK_LIBRARIES
(
taos_static query trpc tutil pthread m lua
cJson
)
SET_TARGET_PROPERTIES
(
taos_static PROPERTIES OUTPUT_NAME
"taos_static"
)
# generate dynamic library (*.dylib)
ADD_LIBRARY
(
taos SHARED
${
SRC
}
)
TARGET_LINK_LIBRARIES
(
taos query trpc tutil pthread m lua
)
TARGET_LINK_LIBRARIES
(
taos query trpc tutil pthread m lua
cJson
)
SET_TARGET_PROPERTIES
(
taos PROPERTIES CLEAN_DIRECT_OUTPUT 1
)
...
...
src/client/src/tscParseLineProtocol.c
浏览文件 @
b1b09f65
...
...
@@ -2128,11 +2128,12 @@ int32_t tscParseLines(char* lines[], int numLines, SArray* points, SArray* faile
int
taos_insert_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
)
{
int32_t
code
=
0
;
SSmlLinesInfo
*
info
=
calloc
(
1
,
sizeof
(
SSmlLinesInfo
));
SSmlLinesInfo
*
info
=
t
calloc
(
1
,
sizeof
(
SSmlLinesInfo
));
info
->
id
=
genLinesSmlId
();
if
(
numLines
<=
0
||
numLines
>
65536
)
{
tscError
(
"SML:0x%"
PRIx64
" taos_insert_lines numLines should be between 1 and 65536. numLines: %d"
,
info
->
id
,
numLines
);
tfree
(
info
);
code
=
TSDB_CODE_TSC_APP_ERROR
;
return
code
;
}
...
...
@@ -2140,7 +2141,7 @@ int taos_insert_lines(TAOS* taos, char* lines[], int numLines) {
for
(
int
i
=
0
;
i
<
numLines
;
++
i
)
{
if
(
lines
[
i
]
==
NULL
)
{
tscError
(
"SML:0x%"
PRIx64
" taos_insert_lines line %d is NULL"
,
info
->
id
,
i
);
free
(
info
);
t
free
(
info
);
code
=
TSDB_CODE_TSC_APP_ERROR
;
return
code
;
}
...
...
@@ -2149,7 +2150,7 @@ int taos_insert_lines(TAOS* taos, char* lines[], int numLines) {
SArray
*
lpPoints
=
taosArrayInit
(
numLines
,
sizeof
(
TAOS_SML_DATA_POINT
));
if
(
lpPoints
==
NULL
)
{
tscError
(
"SML:0x%"
PRIx64
" taos_insert_lines failed to allocate memory"
,
info
->
id
);
free
(
info
);
t
free
(
info
);
return
TSDB_CODE_TSC_OUT_OF_MEMORY
;
}
...
...
@@ -2177,7 +2178,7 @@ cleanup:
taosArrayDestroy
(
lpPoints
);
free
(
info
);
t
free
(
info
);
return
code
;
}
src/client/src/tscParseOpenTSDB.c
浏览文件 @
b1b09f65
...
...
@@ -3,6 +3,7 @@
#include <stdlib.h>
#include <string.h>
#include "cJSON.h"
#include "hash.h"
#include "taos.h"
...
...
@@ -12,9 +13,12 @@
#include "tscParseLine.h"
#define MAX_TELNET_FILEDS_NUM 2
#define OTS_TIMESTAMP_COLUMN_NAME "ts"
#define OTS_METRIC_VALUE_COLUMN_NAME "value"
#define OTD_MAX_FIELDS_NUM 2
#define OTD_JSON_SUB_FIELDS_NUM 2
#define OTD_JSON_FIELDS_NUM 4
#define OTD_TIMESTAMP_COLUMN_NAME "ts"
#define OTD_METRIC_VALUE_COLUMN_NAME "value"
/* telnet style API parser */
static
uint64_t
HandleId
=
0
;
...
...
@@ -77,12 +81,12 @@ static int32_t parseTelnetTimeStamp(TAOS_SML_KV **pTS, int *num_kvs, const char
const
char
*
start
,
*
cur
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int
len
=
0
;
char
key
[]
=
OT
S
_TIMESTAMP_COLUMN_NAME
;
char
key
[]
=
OT
D
_TIMESTAMP_COLUMN_NAME
;
char
*
value
=
NULL
;
start
=
cur
=
*
index
;
//allocate fields for timestamp and value
*
pTS
=
tcalloc
(
MAX_TELNET_FILE
DS_NUM
,
sizeof
(
TAOS_SML_KV
));
*
pTS
=
tcalloc
(
OTD_MAX_FIEL
DS_NUM
,
sizeof
(
TAOS_SML_KV
));
while
(
*
cur
!=
'\0'
)
{
if
(
*
cur
==
' '
)
{
...
...
@@ -123,7 +127,7 @@ static int32_t parseTelnetMetricValue(TAOS_SML_KV **pKVs, int *num_kvs, const ch
const
char
*
start
,
*
cur
;
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int
len
=
0
;
char
key
[]
=
OT
S
_METRIC_VALUE_COLUMN_NAME
;
char
key
[]
=
OT
D
_METRIC_VALUE_COLUMN_NAME
;
char
*
value
=
NULL
;
start
=
cur
=
*
index
;
...
...
@@ -405,7 +409,7 @@ cleanup:
tscDebug
(
"OTD:0x%"
PRIx64
" taos_insert_telnet_lines finish inserting %d lines. code: %d"
,
info
->
id
,
numLines
,
code
);
points
=
TARRAY_GET_START
(
lpPoints
);
numPoints
=
taosArrayGetSize
(
lpPoints
);
for
(
int
i
=
0
;
i
<
numPoints
;
++
i
)
{
for
(
int
i
=
0
;
i
<
numPoints
;
++
i
)
{
destroySmlDataPoint
(
points
+
i
);
}
...
...
@@ -422,3 +426,548 @@ int taos_telnet_insert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint) {
tfree
(
info
);
return
code
;
}
/* telnet style API parser */
int32_t
parseMetricFromJSON
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
cJSON
*
metric
=
cJSON_GetObjectItem
(
root
,
"metric"
);
if
(
!
cJSON_IsString
(
metric
))
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
size_t
stableLen
=
strlen
(
metric
->
valuestring
);
if
(
stableLen
>
TSDB_TABLE_NAME_LEN
)
{
tscError
(
"OTD:0x%"
PRIx64
" Metric cannot exceeds 193 characters in JSON"
,
info
->
id
);
return
TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH
;
}
pSml
->
stableName
=
tcalloc
(
stableLen
+
1
,
sizeof
(
char
));
if
(
pSml
->
stableName
==
NULL
){
return
TSDB_CODE_TSC_OUT_OF_MEMORY
;
}
if
(
isdigit
(
metric
->
valuestring
[
0
]))
{
tscError
(
"OTD:0x%"
PRIx64
" Metric cannnot start with digit in JSON"
,
info
->
id
);
tfree
(
pSml
->
stableName
);
return
TSDB_CODE_TSC_INVALID_JSON
;
}
tstrncpy
(
pSml
->
stableName
,
metric
->
valuestring
,
stableLen
+
1
);
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseTimestampFromJSONObj
(
cJSON
*
root
,
int64_t
*
tsVal
,
SSmlLinesInfo
*
info
)
{
int32_t
size
=
cJSON_GetArraySize
(
root
);
if
(
size
!=
OTD_JSON_SUB_FIELDS_NUM
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
cJSON
*
value
=
cJSON_GetObjectItem
(
root
,
"value"
);
if
(
!
cJSON_IsNumber
(
value
))
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
cJSON
*
type
=
cJSON_GetObjectItem
(
root
,
"type"
);
if
(
!
cJSON_IsString
(
type
))
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
*
tsVal
=
value
->
valueint
;
//if timestamp value is 0 use current system time
if
(
*
tsVal
==
0
)
{
*
tsVal
=
taosGetTimestampNs
();
return
TSDB_CODE_SUCCESS
;
}
size_t
typeLen
=
strlen
(
type
->
valuestring
);
if
(
typeLen
==
1
&&
type
->
valuestring
[
0
]
==
's'
)
{
//seconds
*
tsVal
=
(
int64_t
)(
*
tsVal
*
1e9
);
}
else
if
(
typeLen
==
2
&&
type
->
valuestring
[
1
]
==
's'
)
{
switch
(
type
->
valuestring
[
0
])
{
case
'm'
:
//milliseconds
*
tsVal
=
convertTimePrecision
(
*
tsVal
,
TSDB_TIME_PRECISION_MILLI
,
TSDB_TIME_PRECISION_NANO
);
break
;
case
'u'
:
//microseconds
*
tsVal
=
convertTimePrecision
(
*
tsVal
,
TSDB_TIME_PRECISION_MICRO
,
TSDB_TIME_PRECISION_NANO
);
break
;
case
'n'
:
//nanoseconds
*
tsVal
=
*
tsVal
*
1
;
break
;
default:
return
TSDB_CODE_TSC_INVALID_JSON
;
}
}
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseTimestampFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pTS
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
//Timestamp must be the first KV to parse
assert
(
*
num_kvs
==
0
);
int64_t
tsVal
;
char
key
[]
=
OTD_TIMESTAMP_COLUMN_NAME
;
cJSON
*
timestamp
=
cJSON_GetObjectItem
(
root
,
"timestamp"
);
if
(
cJSON_IsNumber
(
timestamp
))
{
//timestamp value 0 indicates current system time
if
(
timestamp
->
valueint
==
0
)
{
tsVal
=
taosGetTimestampNs
();
}
else
{
tsVal
=
convertTimePrecision
(
timestamp
->
valueint
,
TSDB_TIME_PRECISION_MICRO
,
TSDB_TIME_PRECISION_NANO
);
}
}
else
if
(
cJSON_IsObject
(
timestamp
))
{
int32_t
ret
=
parseTimestampFromJSONObj
(
timestamp
,
&
tsVal
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
tscError
(
"OTD:0x%"
PRIx64
" Failed to parse timestamp from JSON Obj"
,
info
->
id
);
return
ret
;
}
}
else
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
//allocate fields for timestamp and value
*
pTS
=
tcalloc
(
OTD_MAX_FIELDS_NUM
,
sizeof
(
TAOS_SML_KV
));
(
*
pTS
)
->
key
=
tcalloc
(
sizeof
(
key
),
1
);
memcpy
((
*
pTS
)
->
key
,
key
,
sizeof
(
key
));
(
*
pTS
)
->
type
=
TSDB_DATA_TYPE_TIMESTAMP
;
(
*
pTS
)
->
length
=
(
int16_t
)
tDataTypes
[(
*
pTS
)
->
type
].
bytes
;
(
*
pTS
)
->
value
=
tcalloc
((
*
pTS
)
->
length
,
1
);
memcpy
((
*
pTS
)
->
value
,
&
tsVal
,
(
*
pTS
)
->
length
);
*
num_kvs
+=
1
;
return
TSDB_CODE_SUCCESS
;
}
int32_t
convertJSONBool
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
int64_t
valueInt
,
SSmlLinesInfo
*
info
)
{
if
(
strcasecmp
(
typeStr
,
"bool"
)
!=
0
)
{
tscError
(
"OTD:0x%"
PRIx64
" invalid type(%s) for JSON Bool"
,
info
->
id
,
typeStr
);
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_BOOL
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
bool
*
)(
pVal
->
value
)
=
valueInt
?
true
:
false
;
return
TSDB_CODE_SUCCESS
;
}
int32_t
convertJSONNumber
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
//tinyint
if
(
strcasecmp
(
typeStr
,
"i8"
)
==
0
||
strcasecmp
(
typeStr
,
"tinyint"
)
==
0
)
{
if
(
!
IS_VALID_TINYINT
(
value
->
valueint
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%"
PRId64
") cannot fit in type(tinyint)"
,
info
->
id
,
value
->
valueint
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_TINYINT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int8_t
*
)(
pVal
->
value
)
=
(
int8_t
)(
value
->
valueint
);
return
TSDB_CODE_SUCCESS
;
}
//smallint
if
(
strcasecmp
(
typeStr
,
"i16"
)
==
0
||
strcasecmp
(
typeStr
,
"smallint"
)
==
0
)
{
if
(
!
IS_VALID_SMALLINT
(
value
->
valueint
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%"
PRId64
") cannot fit in type(smallint)"
,
info
->
id
,
value
->
valueint
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_SMALLINT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int16_t
*
)(
pVal
->
value
)
=
(
int16_t
)(
value
->
valueint
);
return
TSDB_CODE_SUCCESS
;
}
//int
if
(
strcasecmp
(
typeStr
,
"i32"
)
==
0
||
strcasecmp
(
typeStr
,
"int"
)
==
0
)
{
if
(
!
IS_VALID_INT
(
value
->
valueint
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%"
PRId64
") cannot fit in type(int)"
,
info
->
id
,
value
->
valueint
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_INT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int32_t
*
)(
pVal
->
value
)
=
(
int32_t
)(
value
->
valueint
);
return
TSDB_CODE_SUCCESS
;
}
//bigint
if
(
strcasecmp
(
typeStr
,
"i64"
)
==
0
||
strcasecmp
(
typeStr
,
"bigint"
)
==
0
)
{
if
(
!
IS_VALID_BIGINT
(
value
->
valueint
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%"
PRId64
") cannot fit in type(bigint)"
,
info
->
id
,
value
->
valueint
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_BIGINT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
int64_t
*
)(
pVal
->
value
)
=
(
int64_t
)(
value
->
valueint
);
return
TSDB_CODE_SUCCESS
;
}
//float
if
(
strcasecmp
(
typeStr
,
"f32"
)
==
0
||
strcasecmp
(
typeStr
,
"float"
)
==
0
)
{
if
(
!
IS_VALID_FLOAT
(
value
->
valuedouble
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%f) cannot fit in type(float)"
,
info
->
id
,
value
->
valuedouble
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_FLOAT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
float
*
)(
pVal
->
value
)
=
(
float
)(
value
->
valuedouble
);
return
TSDB_CODE_SUCCESS
;
}
//double
if
(
strcasecmp
(
typeStr
,
"f64"
)
==
0
||
strcasecmp
(
typeStr
,
"double"
)
==
0
)
{
if
(
!
IS_VALID_DOUBLE
(
value
->
valuedouble
))
{
tscError
(
"OTD:0x%"
PRIx64
" JSON value(%f) cannot fit in type(double)"
,
info
->
id
,
value
->
valuedouble
);
return
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
;
}
pVal
->
type
=
TSDB_DATA_TYPE_DOUBLE
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
double
*
)(
pVal
->
value
)
=
(
double
)(
value
->
valuedouble
);
return
TSDB_CODE_SUCCESS
;
}
//if reach here means type is unsupported
tscError
(
"OTD:0x%"
PRIx64
" invalid type(%s) for JSON Number"
,
info
->
id
,
typeStr
);
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
}
int32_t
convertJSONString
(
TAOS_SML_KV
*
pVal
,
char
*
typeStr
,
cJSON
*
value
,
SSmlLinesInfo
*
info
)
{
if
(
strcasecmp
(
typeStr
,
"binary"
)
==
0
)
{
pVal
->
type
=
TSDB_DATA_TYPE_BINARY
;
}
else
if
(
strcasecmp
(
typeStr
,
"nchar"
)
==
0
)
{
pVal
->
type
=
TSDB_DATA_TYPE_NCHAR
;
}
else
{
tscError
(
"OTD:0x%"
PRIx64
" invalid type(%s) for JSON String"
,
info
->
id
,
typeStr
);
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
}
pVal
->
length
=
(
int16_t
)
strlen
(
value
->
valuestring
);
pVal
->
value
=
tcalloc
(
pVal
->
length
+
1
,
1
);
memcpy
(
pVal
->
value
,
value
->
valuestring
,
pVal
->
length
);
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseValueFromJSONObj
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
int32_t
size
=
cJSON_GetArraySize
(
root
);
if
(
size
!=
OTD_JSON_SUB_FIELDS_NUM
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
cJSON
*
value
=
cJSON_GetObjectItem
(
root
,
"value"
);
if
(
value
==
NULL
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
cJSON
*
type
=
cJSON_GetObjectItem
(
root
,
"type"
);
if
(
!
cJSON_IsString
(
type
))
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
switch
(
value
->
type
)
{
case
cJSON_True
:
case
cJSON_False
:
{
ret
=
convertJSONBool
(
pVal
,
type
->
valuestring
,
value
->
valueint
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
break
;
}
case
cJSON_Number
:
{
ret
=
convertJSONNumber
(
pVal
,
type
->
valuestring
,
value
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
break
;
}
case
cJSON_String
:
{
ret
=
convertJSONString
(
pVal
,
type
->
valuestring
,
value
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
break
;
}
default:
return
TSDB_CODE_TSC_INVALID_JSON_TYPE
;
}
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
*
pVal
,
SSmlLinesInfo
*
info
)
{
int
type
=
root
->
type
;
switch
(
type
)
{
case
cJSON_True
:
case
cJSON_False
:
{
pVal
->
type
=
TSDB_DATA_TYPE_BOOL
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
bool
*
)(
pVal
->
value
)
=
root
->
valueint
?
true
:
false
;
break
;
}
case
cJSON_Number
:
{
//convert default JSON Number type to float
pVal
->
type
=
TSDB_DATA_TYPE_FLOAT
;
pVal
->
length
=
(
int16_t
)
tDataTypes
[
pVal
->
type
].
bytes
;
pVal
->
value
=
tcalloc
(
pVal
->
length
,
1
);
*
(
float
*
)(
pVal
->
value
)
=
(
float
)(
root
->
valuedouble
);
break
;
}
case
cJSON_String
:
{
//convert default JSON String type to nchar
pVal
->
type
=
TSDB_DATA_TYPE_NCHAR
;
//pVal->length = wcslen((wchar_t *)root->valuestring) * TSDB_NCHAR_SIZE;
pVal
->
length
=
(
int16_t
)
strlen
(
root
->
valuestring
);
pVal
->
value
=
tcalloc
(
pVal
->
length
+
1
,
1
);
memcpy
(
pVal
->
value
,
root
->
valuestring
,
pVal
->
length
);
break
;
}
case
cJSON_Object
:
{
int32_t
ret
=
parseValueFromJSONObj
(
root
,
pVal
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
tscError
(
"OTD:0x%"
PRIx64
" Failed to parse timestamp from JSON Obj"
,
info
->
id
);
return
ret
;
}
break
;
}
default:
return
TSDB_CODE_TSC_INVALID_JSON
;
}
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseMetricValueFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
SSmlLinesInfo
*
info
)
{
//skip timestamp
TAOS_SML_KV
*
pVal
=
*
pKVs
+
1
;
char
key
[]
=
OTD_METRIC_VALUE_COLUMN_NAME
;
cJSON
*
metricVal
=
cJSON_GetObjectItem
(
root
,
"value"
);
if
(
metricVal
==
NULL
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
int32_t
ret
=
parseValueFromJSON
(
metricVal
,
pVal
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
pVal
->
key
=
tcalloc
(
sizeof
(
key
),
1
);
memcpy
(
pVal
->
key
,
key
,
sizeof
(
key
));
*
num_kvs
+=
1
;
return
TSDB_CODE_SUCCESS
;
}
int32_t
parseTagsFromJSON
(
cJSON
*
root
,
TAOS_SML_KV
**
pKVs
,
int
*
num_kvs
,
char
**
childTableName
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
cJSON
*
tags
=
cJSON_GetObjectItem
(
root
,
"tags"
);
if
(
tags
==
NULL
||
tags
->
type
!=
cJSON_Object
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
//only pick up the first ID value as child table name
cJSON
*
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
if
(
id
!=
NULL
)
{
size_t
idLen
=
strlen
(
id
->
valuestring
);
ret
=
isValidChildTableName
(
id
->
valuestring
,
(
int16_t
)
idLen
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
*
childTableName
=
tcalloc
(
idLen
+
1
,
sizeof
(
char
));
memcpy
(
*
childTableName
,
id
->
valuestring
,
idLen
);
//remove all ID fields from tags list no case sensitive
while
(
id
!=
NULL
)
{
cJSON_DeleteItemFromObject
(
tags
,
"ID"
);
id
=
cJSON_GetObjectItem
(
tags
,
"ID"
);
}
}
int32_t
tagNum
=
cJSON_GetArraySize
(
tags
);
//at least one tag pair required
if
(
tagNum
<=
0
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
//allocate memory for tags
*
pKVs
=
tcalloc
(
tagNum
,
sizeof
(
TAOS_SML_KV
));
TAOS_SML_KV
*
pkv
=
*
pKVs
;
for
(
int32_t
i
=
0
;
i
<
tagNum
;
++
i
)
{
cJSON
*
tag
=
cJSON_GetArrayItem
(
tags
,
i
);
if
(
tag
==
NULL
)
{
return
TSDB_CODE_TSC_INVALID_JSON
;
}
//key
size_t
keyLen
=
strlen
(
tag
->
string
);
pkv
->
key
=
tcalloc
(
keyLen
+
1
,
sizeof
(
char
));
strncpy
(
pkv
->
key
,
tag
->
string
,
keyLen
);
//value
ret
=
parseValueFromJSON
(
tag
,
pkv
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
return
ret
;
}
*
num_kvs
+=
1
;
pkv
++
;
}
return
ret
;
}
int32_t
tscParseJSONPayload
(
cJSON
*
root
,
TAOS_SML_DATA_POINT
*
pSml
,
SSmlLinesInfo
*
info
)
{
int32_t
ret
=
TSDB_CODE_SUCCESS
;
if
(
!
cJSON_IsObject
(
root
))
{
tscError
(
"OTD:0x%"
PRIx64
" data point needs to be JSON object"
,
info
->
id
);
return
TSDB_CODE_TSC_INVALID_JSON
;
}
int32_t
size
=
cJSON_GetArraySize
(
root
);
//outmost json fields has to be exactly 4
if
(
size
!=
OTD_JSON_FIELDS_NUM
)
{
tscError
(
"OTD:0x%"
PRIx64
" Invalid number of JSON fields in data point %d"
,
info
->
id
,
size
);
return
TSDB_CODE_TSC_INVALID_JSON
;
}
//Parse metric
ret
=
parseMetricFromJSON
(
root
,
pSml
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse metric from JSON payload"
,
info
->
id
);
return
ret
;
}
tscDebug
(
"OTD:0x%"
PRIx64
" Parse metric from JSON payload finished"
,
info
->
id
);
//Parse timestamp
ret
=
parseTimestampFromJSON
(
root
,
&
pSml
->
fields
,
&
pSml
->
fieldNum
,
info
);
if
(
ret
)
{
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse timestamp from JSON payload"
,
info
->
id
);
return
ret
;
}
tscDebug
(
"OTD:0x%"
PRIx64
" Parse timestamp from JSON payload finished"
,
info
->
id
);
//Parse metric value
ret
=
parseMetricValueFromJSON
(
root
,
&
pSml
->
fields
,
&
pSml
->
fieldNum
,
info
);
if
(
ret
)
{
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse metric value from JSON payload"
,
info
->
id
);
return
ret
;
}
tscDebug
(
"OTD:0x%"
PRIx64
" Parse metric value from JSON payload finished"
,
info
->
id
);
//Parse tags
ret
=
parseTagsFromJSON
(
root
,
&
pSml
->
tags
,
&
pSml
->
tagNum
,
&
pSml
->
childTableName
,
info
);
if
(
ret
)
{
tscError
(
"OTD:0x%"
PRIx64
" Unable to parse tags from JSON payload"
,
info
->
id
);
return
ret
;
}
tscDebug
(
"OTD:0x%"
PRIx64
" Parse tags from JSON payload finished"
,
info
->
id
);
return
TSDB_CODE_SUCCESS
;
}
int32_t
tscParseMultiJSONPayload
(
char
*
payload
,
SArray
*
points
,
SSmlLinesInfo
*
info
)
{
int32_t
payloadNum
,
ret
;
ret
=
TSDB_CODE_SUCCESS
;
if
(
payload
==
NULL
)
{
tscError
(
"OTD:0x%"
PRIx64
" empty JSON Payload"
,
info
->
id
);
return
TSDB_CODE_TSC_INVALID_JSON
;
}
cJSON
*
root
=
cJSON_Parse
(
payload
);
//multiple data points must be sent in JSON array
if
(
cJSON_IsObject
(
root
))
{
payloadNum
=
1
;
}
else
if
(
cJSON_IsArray
(
root
))
{
payloadNum
=
cJSON_GetArraySize
(
root
);
}
else
{
tscError
(
"OTD:0x%"
PRIx64
" Invalid JSON Payload"
,
info
->
id
);
ret
=
TSDB_CODE_TSC_INVALID_JSON
;
goto
PARSE_JSON_OVER
;
}
for
(
int32_t
i
=
0
;
i
<
payloadNum
;
++
i
)
{
TAOS_SML_DATA_POINT
point
=
{
0
};
cJSON
*
dataPoint
=
(
payloadNum
==
1
)
?
root
:
cJSON_GetArrayItem
(
root
,
i
);
ret
=
tscParseJSONPayload
(
dataPoint
,
&
point
,
info
);
if
(
ret
!=
TSDB_CODE_SUCCESS
)
{
tscError
(
"OTD:0x%"
PRIx64
" JSON data point parse failed"
,
info
->
id
);
destroySmlDataPoint
(
&
point
);
goto
PARSE_JSON_OVER
;
}
else
{
tscDebug
(
"OTD:0x%"
PRIx64
" JSON data point parse success"
,
info
->
id
);
}
taosArrayPush
(
points
,
&
point
);
}
PARSE_JSON_OVER:
cJSON_Delete
(
root
);
return
ret
;
}
int
taos_insert_json_payload
(
TAOS
*
taos
,
char
*
payload
)
{
int32_t
code
=
0
;
SSmlLinesInfo
*
info
=
tcalloc
(
1
,
sizeof
(
SSmlLinesInfo
));
info
->
id
=
genUID
();
if
(
payload
==
NULL
)
{
tscError
(
"OTD:0x%"
PRIx64
" taos_insert_json_payload payload is NULL"
,
info
->
id
);
tfree
(
info
);
code
=
TSDB_CODE_TSC_APP_ERROR
;
return
code
;
}
SArray
*
lpPoints
=
taosArrayInit
(
1
,
sizeof
(
TAOS_SML_DATA_POINT
));
if
(
lpPoints
==
NULL
)
{
tscError
(
"OTD:0x%"
PRIx64
" taos_insert_json_payload failed to allocate memory"
,
info
->
id
);
tfree
(
info
);
return
TSDB_CODE_TSC_OUT_OF_MEMORY
;
}
tscDebug
(
"OTD:0x%"
PRIx64
" taos_insert_telnet_lines begin inserting %d points"
,
info
->
id
,
1
);
code
=
tscParseMultiJSONPayload
(
payload
,
lpPoints
,
info
);
size_t
numPoints
=
taosArrayGetSize
(
lpPoints
);
if
(
code
!=
0
)
{
goto
cleanup
;
}
TAOS_SML_DATA_POINT
*
points
=
TARRAY_GET_START
(
lpPoints
);
code
=
tscSmlInsert
(
taos
,
points
,
(
int
)
numPoints
,
info
);
if
(
code
!=
0
)
{
tscError
(
"OTD:0x%"
PRIx64
" taos_insert_json_payload error: %s"
,
info
->
id
,
tstrerror
((
code
)));
}
cleanup:
tscDebug
(
"OTD:0x%"
PRIx64
" taos_insert_json_payload finish inserting 1 Point. code: %d"
,
info
->
id
,
code
);
points
=
TARRAY_GET_START
(
lpPoints
);
numPoints
=
taosArrayGetSize
(
lpPoints
);
for
(
int
i
=
0
;
i
<
numPoints
;
++
i
)
{
destroySmlDataPoint
(
points
+
i
);
}
taosArrayDestroy
(
lpPoints
);
tfree
(
info
);
return
code
;
}
src/connector/python/taos/cinterface.py
浏览文件 @
b1b09f65
...
...
@@ -835,8 +835,14 @@ def taos_insert_telnet_lines(connection, lines):
p_lines
=
lines_type
(
*
lines
)
errno
=
_libtaos
.
taos_insert_telnet_lines
(
connection
,
p_lines
,
num_of_lines
)
if
errno
!=
0
:
raise
LinesError
(
"insert telnet lines error"
,
errno
)
raise
Telnet
LinesError
(
"insert telnet lines error"
,
errno
)
def
taos_insert_json_payload
(
connection
,
payload
):
# type: (c_void_p, list[str] | tuple(str)) -> None
payload
=
payload
.
encode
(
"utf-8"
)
errno
=
_libtaos
.
taos_insert_json_payload
(
connection
,
payload
)
if
errno
!=
0
:
raise
JsonPayloadError
(
"insert json payload error"
,
errno
)
class
CTaosInterface
(
object
):
def
__init__
(
self
,
config
=
None
):
...
...
src/connector/python/taos/connection.py
浏览文件 @
b1b09f65
...
...
@@ -154,6 +154,25 @@ class TaosConnection(object):
"""
return
taos_insert_telnet_lines
(
self
.
_conn
,
lines
)
def
insert_json_payload
(
self
,
payload
):
"""OpenTSDB HTTP JSON format support
## Example
"{
"metric": "cpu_load_0",
"timestamp": 1626006833610123,
"value": 55.5,
"tags":
{
"host": "ubuntu",
"interface": "eth0",
"Id": "tb0"
}
}"
"""
return
taos_insert_json_payload
(
self
.
_conn
,
payload
)
def
cursor
(
self
):
# type: () -> TaosCursor
"""Return a new Cursor object using the connection."""
...
...
src/connector/python/taos/error.py
浏览文件 @
b1b09f65
...
...
@@ -83,4 +83,14 @@ class ResultError(DatabaseError):
class
LinesError
(
DatabaseError
):
"""taos_insert_lines errors."""
pass
\ No newline at end of file
pass
class
TelnetLinesError
(
DatabaseError
):
"""taos_insert_telnet_lines errors."""
pass
class
JsonPayloadError
(
DatabaseError
):
"""taos_insert_json_payload errors."""
pass
src/inc/taos.h
浏览文件 @
b1b09f65
...
...
@@ -174,6 +174,8 @@ DLL_EXPORT int taos_insert_lines(TAOS* taos, char* lines[], int numLines);
DLL_EXPORT
int
taos_insert_telnet_lines
(
TAOS
*
taos
,
char
*
lines
[],
int
numLines
);
DLL_EXPORT
int
taos_insert_json_payload
(
TAOS
*
taos
,
char
*
payload
);
#ifdef __cplusplus
}
#endif
...
...
src/inc/taoserror.h
浏览文件 @
b1b09f65
...
...
@@ -108,6 +108,9 @@ int32_t* taosGetErrno();
#define TSDB_CODE_TSC_INVALID_TAG_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021E) //"Invalid tag length")
#define TSDB_CODE_TSC_INVALID_COLUMN_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021F) //"Invalid column length")
#define TSDB_CODE_TSC_DUP_TAG_NAMES TAOS_DEF_ERROR_CODE(0, 0x0220) //"duplicated tag names")
#define TSDB_CODE_TSC_INVALID_JSON TAOS_DEF_ERROR_CODE(0, 0x0221) //"Invalid JSON format")
#define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type")
#define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0223) //"Value out of range")
// mnode
#define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed")
...
...
src/util/src/terror.c
浏览文件 @
b1b09f65
...
...
@@ -116,6 +116,9 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_DUP_COL_NAMES, "duplicated column nam
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_TAG_LENGTH
,
"Invalid tag length"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_COLUMN_LENGTH
,
"Invalid column length"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_DUP_TAG_NAMES
,
"duplicated tag names"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON
,
"Invalid JSON format"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_INVALID_JSON_TYPE
,
"Invalid JSON data type"
)
TAOS_DEFINE_ERROR
(
TSDB_CODE_TSC_VALUE_OUT_OF_RANGE
,
"Value out of range"
)
// mnode
TAOS_DEFINE_ERROR
(
TSDB_CODE_MND_MSG_NOT_PROCESSED
,
"Message not processed"
)
...
...
tests/examples/c/apitest.c
浏览文件 @
b1b09f65
此差异已折叠。
点击以展开。
tests/examples/c/makefile
浏览文件 @
b1b09f65
...
...
@@ -6,8 +6,8 @@ TARGET=exe
LFLAGS
=
'-Wl,-rpath,/usr/local/taos/driver/'
-ltaos
-lpthread
-lm
-lrt
CFLAGS
=
-O3
-g
-Wall
-Wno-deprecated
-fPIC
-Wno-unused-result
-Wconversion
\
-Wno-char-subscripts
-D_REENTRANT
-Wno-format
-D_REENTRANT
-DLINUX
\
-Wno-unused-function
-D_M_X64
-I
/usr/local/taos/include
-std
=
gnu99
-Wno-unused-function
-D_M_X64
-I
/usr/local/taos/include
-std
=
gnu99
\
-I
../../../deps/cJson/inc
all
:
$(TARGET)
exe
:
...
...
tests/pytest/insert/insertJSONPayload.py
0 → 100644
浏览文件 @
b1b09f65
###################################################################
# Copyright (c) 2021 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import
sys
from
util.log
import
*
from
util.cases
import
*
from
util.sql
import
*
class
TDTestCase
:
def
init
(
self
,
conn
,
logSql
):
tdLog
.
debug
(
"start to execute %s"
%
__file__
)
tdSql
.
init
(
conn
.
cursor
(),
logSql
)
self
.
_conn
=
conn
def
run
(
self
):
print
(
"running {}"
.
format
(
__file__
))
tdSql
.
execute
(
"drop database if exists test"
)
tdSql
.
execute
(
"create database if not exists test precision 'us'"
)
tdSql
.
execute
(
'use test'
)
### Default format ###
### metric value ###
print
(
"============= step1 : test metric value types ================"
)
payload
=
'''
{
"metric": "stb0_0",
"timestamp": 1626006833610123,
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_0"
)
tdSql
.
checkData
(
1
,
1
,
"FLOAT"
)
payload
=
'''
{
"metric": "stb0_1",
"timestamp": 1626006833610123,
"value": true,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_1"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
{
"metric": "stb0_2",
"timestamp": 1626006833610123,
"value": false,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_2"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
{
"metric": "stb0_3",
"timestamp": 1626006833610123,
"value": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb0_3"
)
tdSql
.
checkData
(
1
,
1
,
"NCHAR"
)
### timestamp 0 ###
payload
=
'''
{
"metric": "stb0_4",
"timestamp": 0,
"value": 123,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
### ID ###
payload
=
'''
{
"metric": "stb0_5",
"timestamp": 0,
"value": 123,
"tags": {
"ID": "tb0_5",
"t1": true,
"iD": "tb000",
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"id": "tb555"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select tbname from stb0_5"
)
tdSql
.
checkData
(
0
,
0
,
"tb0_5"
)
### Nested format ###
### timestamp ###
#seconds
payload
=
'''
{
"metric": "stb1_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_0"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.000000"
)
#milliseconds
payload
=
'''
{
"metric": "stb1_1",
"timestamp": {
"value": 1626006833610,
"type": "ms"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_1"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610000"
)
#microseconds
payload
=
'''
{
"metric": "stb1_2",
"timestamp": {
"value": 1626006833610123,
"type": "us"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_2"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
#nanoseconds
payload
=
'''
{
"metric": "stb1_3",
"timestamp": {
"value": 1.6260068336101233e+18,
"type": "ns"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"select ts from stb1_3"
)
tdSql
.
checkData
(
0
,
0
,
"2021-07-11 20:33:53.610123"
)
#now
tdSql
.
execute
(
'use test'
)
payload
=
'''
{
"metric": "stb1_4",
"timestamp": {
"value": 0,
"type": "ns"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
### metric value ###
payload
=
'''
{
"metric": "stb2_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": true,
"type": "bool"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_0"
)
tdSql
.
checkData
(
1
,
1
,
"BOOL"
)
payload
=
'''
{
"metric": "stb2_1",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 127,
"type": "tinyint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_1"
)
tdSql
.
checkData
(
1
,
1
,
"TINYINT"
)
payload
=
'''
{
"metric": "stb2_2",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 32767,
"type": "smallint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_2"
)
tdSql
.
checkData
(
1
,
1
,
"SMALLINT"
)
payload
=
'''
{
"metric": "stb2_3",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 2147483647,
"type": "int"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_3"
)
tdSql
.
checkData
(
1
,
1
,
"INT"
)
payload
=
'''
{
"metric": "stb2_4",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 9.2233720368547758e+18,
"type": "bigint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_4"
)
tdSql
.
checkData
(
1
,
1
,
"BIGINT"
)
payload
=
'''
{
"metric": "stb2_5",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 11.12345,
"type": "float"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_5"
)
tdSql
.
checkData
(
1
,
1
,
"FLOAT"
)
payload
=
'''
{
"metric": "stb2_6",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 22.123456789,
"type": "double"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_6"
)
tdSql
.
checkData
(
1
,
1
,
"DOUBLE"
)
payload
=
'''
{
"metric": "stb2_7",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"type": "binary"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_7"
)
tdSql
.
checkData
(
1
,
1
,
"BINARY"
)
payload
=
'''
{
"metric": "stb2_8",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "你好",
"type": "nchar"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb2_8"
)
tdSql
.
checkData
(
1
,
1
,
"NCHAR"
)
### tag value ###
payload
=
'''
{
"metric": "stb3_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "hello",
"type": "nchar"
},
"tags": {
"t1": {
"value": true,
"type": "bool"
},
"t2": {
"value": 127,
"type": "tinyint"
},
"t3": {
"value": 32767,
"type": "smallint"
},
"t4": {
"value": 2147483647,
"type": "int"
},
"t5": {
"value": 9.2233720368547758e+18,
"type": "bigint"
},
"t6": {
"value": 11.12345,
"type": "float"
},
"t7": {
"value": 22.123456789,
"type": "double"
},
"t8": {
"value": "binary_val",
"type": "binary"
},
"t9": {
"value": "你好",
"type": "nchar"
}
}
}
'''
code
=
self
.
_conn
.
insert_json_payload
(
payload
)
print
(
"insert_json_payload result {}"
.
format
(
code
))
tdSql
.
query
(
"describe stb3_0"
)
tdSql
.
checkData
(
2
,
1
,
"BOOL"
)
tdSql
.
checkData
(
3
,
1
,
"TINYINT"
)
tdSql
.
checkData
(
4
,
1
,
"SMALLINT"
)
tdSql
.
checkData
(
5
,
1
,
"INT"
)
tdSql
.
checkData
(
6
,
1
,
"BIGINT"
)
tdSql
.
checkData
(
7
,
1
,
"FLOAT"
)
tdSql
.
checkData
(
8
,
1
,
"DOUBLE"
)
tdSql
.
checkData
(
9
,
1
,
"BINARY"
)
tdSql
.
checkData
(
10
,
1
,
"NCHAR"
)
def
stop
(
self
):
tdSql
.
close
()
tdLog
.
success
(
"%s successfully executed"
%
__file__
)
tdCases
.
addWindows
(
__file__
,
TDTestCase
())
tdCases
.
addLinux
(
__file__
,
TDTestCase
())
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录