提交 13d8bb83 编写于 作者: S shenglian zhou

Merge branch 'develop' into feature/szhou/regex-nmatch

......@@ -13,13 +13,13 @@ IF (TD_LINUX)
# set the static lib name
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m rt ${VAR_TSZ})
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m rt cJson ${VAR_TSZ})
SET_TARGET_PROPERTIES(taos_static PROPERTIES OUTPUT_NAME "taos_static")
SET_TARGET_PROPERTIES(taos_static PROPERTIES CLEAN_DIRECT_OUTPUT 1)
# generate dynamic library (*.so)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m rt)
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m rt cJson)
IF (TD_LINUX_64)
TARGET_LINK_LIBRARIES(taos lua)
ENDIF ()
......@@ -39,13 +39,13 @@ ELSEIF (TD_DARWIN)
# set the static lib name
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m lua)
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m lua cJson)
SET_TARGET_PROPERTIES(taos_static PROPERTIES OUTPUT_NAME "taos_static")
SET_TARGET_PROPERTIES(taos_static PROPERTIES CLEAN_DIRECT_OUTPUT 1)
# generate dynamic library (*.dylib)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m lua)
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m lua cJson)
SET_TARGET_PROPERTIES(taos PROPERTIES CLEAN_DIRECT_OUTPUT 1)
#set version of .dylib
......@@ -63,26 +63,26 @@ ELSEIF (TD_WINDOWS)
CONFIGURE_FILE("${TD_COMMUNITY_DIR}/src/client/src/taos.rc.in" "${TD_COMMUNITY_DIR}/src/client/src/taos.rc")
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static trpc tutil query)
TARGET_LINK_LIBRARIES(taos_static trpc tutil query cJson)
# generate dynamic library (*.dll)
ADD_LIBRARY(taos SHARED ${SRC} ${TD_COMMUNITY_DIR}/src/client/src/taos.rc)
IF (NOT TD_GODLL)
SET_TARGET_PROPERTIES(taos PROPERTIES LINK_FLAGS /DEF:${TD_COMMUNITY_DIR}/src/client/src/taos.def)
ENDIF ()
TARGET_LINK_LIBRARIES(taos trpc tutil query lua)
TARGET_LINK_LIBRARIES(taos trpc tutil query lua cJson)
ELSEIF (TD_DARWIN)
SET(CMAKE_MACOSX_RPATH 1)
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/jni/linux)
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static query trpc tutil pthread m lua)
TARGET_LINK_LIBRARIES(taos_static query trpc tutil pthread m lua cJson)
SET_TARGET_PROPERTIES(taos_static PROPERTIES OUTPUT_NAME "taos_static")
# generate dynamic library (*.dylib)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos query trpc tutil pthread m lua)
TARGET_LINK_LIBRARIES(taos query trpc tutil pthread m lua cJson)
SET_TARGET_PROPERTIES(taos PROPERTIES CLEAN_DIRECT_OUTPUT 1)
......
......@@ -2128,11 +2128,12 @@ int32_t tscParseLines(char* lines[], int numLines, SArray* points, SArray* faile
int taos_insert_lines(TAOS* taos, char* lines[], int numLines) {
int32_t code = 0;
SSmlLinesInfo* info = calloc(1, sizeof(SSmlLinesInfo));
SSmlLinesInfo* info = tcalloc(1, sizeof(SSmlLinesInfo));
info->id = genLinesSmlId();
if (numLines <= 0 || numLines > 65536) {
tscError("SML:0x%"PRIx64" taos_insert_lines numLines should be between 1 and 65536. numLines: %d", info->id, numLines);
tfree(info);
code = TSDB_CODE_TSC_APP_ERROR;
return code;
}
......@@ -2140,7 +2141,7 @@ int taos_insert_lines(TAOS* taos, char* lines[], int numLines) {
for (int i = 0; i < numLines; ++i) {
if (lines[i] == NULL) {
tscError("SML:0x%"PRIx64" taos_insert_lines line %d is NULL", info->id, i);
free(info);
tfree(info);
code = TSDB_CODE_TSC_APP_ERROR;
return code;
}
......@@ -2149,7 +2150,7 @@ int taos_insert_lines(TAOS* taos, char* lines[], int numLines) {
SArray* lpPoints = taosArrayInit(numLines, sizeof(TAOS_SML_DATA_POINT));
if (lpPoints == NULL) {
tscError("SML:0x%"PRIx64" taos_insert_lines failed to allocate memory", info->id);
free(info);
tfree(info);
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
......@@ -2177,7 +2178,7 @@ cleanup:
taosArrayDestroy(lpPoints);
free(info);
tfree(info);
return code;
}
......@@ -3,6 +3,7 @@
#include <stdlib.h>
#include <string.h>
#include "cJSON.h"
#include "hash.h"
#include "taos.h"
......@@ -12,9 +13,12 @@
#include "tscParseLine.h"
#define MAX_TELNET_FILEDS_NUM 2
#define OTS_TIMESTAMP_COLUMN_NAME "ts"
#define OTS_METRIC_VALUE_COLUMN_NAME "value"
#define OTD_MAX_FIELDS_NUM 2
#define OTD_JSON_SUB_FIELDS_NUM 2
#define OTD_JSON_FIELDS_NUM 4
#define OTD_TIMESTAMP_COLUMN_NAME "ts"
#define OTD_METRIC_VALUE_COLUMN_NAME "value"
/* telnet style API parser */
static uint64_t HandleId = 0;
......@@ -77,12 +81,12 @@ static int32_t parseTelnetTimeStamp(TAOS_SML_KV **pTS, int *num_kvs, const char
const char *start, *cur;
int32_t ret = TSDB_CODE_SUCCESS;
int len = 0;
char key[] = OTS_TIMESTAMP_COLUMN_NAME;
char key[] = OTD_TIMESTAMP_COLUMN_NAME;
char *value = NULL;
start = cur = *index;
//allocate fields for timestamp and value
*pTS = tcalloc(MAX_TELNET_FILEDS_NUM, sizeof(TAOS_SML_KV));
*pTS = tcalloc(OTD_MAX_FIELDS_NUM, sizeof(TAOS_SML_KV));
while(*cur != '\0') {
if (*cur == ' ') {
......@@ -123,7 +127,7 @@ static int32_t parseTelnetMetricValue(TAOS_SML_KV **pKVs, int *num_kvs, const ch
const char *start, *cur;
int32_t ret = TSDB_CODE_SUCCESS;
int len = 0;
char key[] = OTS_METRIC_VALUE_COLUMN_NAME;
char key[] = OTD_METRIC_VALUE_COLUMN_NAME;
char *value = NULL;
start = cur = *index;
......@@ -405,7 +409,7 @@ cleanup:
tscDebug("OTD:0x%"PRIx64" taos_insert_telnet_lines finish inserting %d lines. code: %d", info->id, numLines, code);
points = TARRAY_GET_START(lpPoints);
numPoints = taosArrayGetSize(lpPoints);
for (int i=0; i<numPoints; ++i) {
for (int i = 0; i < numPoints; ++i) {
destroySmlDataPoint(points+i);
}
......@@ -422,3 +426,548 @@ int taos_telnet_insert(TAOS* taos, TAOS_SML_DATA_POINT* points, int numPoint) {
tfree(info);
return code;
}
/* telnet style API parser */
int32_t parseMetricFromJSON(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInfo* info) {
cJSON *metric = cJSON_GetObjectItem(root, "metric");
if (!cJSON_IsString(metric)) {
return TSDB_CODE_TSC_INVALID_JSON;
}
size_t stableLen = strlen(metric->valuestring);
if (stableLen > TSDB_TABLE_NAME_LEN) {
tscError("OTD:0x%"PRIx64" Metric cannot exceeds 193 characters in JSON", info->id);
return TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH;
}
pSml->stableName = tcalloc(stableLen + 1, sizeof(char));
if (pSml->stableName == NULL){
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
if (isdigit(metric->valuestring[0])) {
tscError("OTD:0x%"PRIx64" Metric cannnot start with digit in JSON", info->id);
tfree(pSml->stableName);
return TSDB_CODE_TSC_INVALID_JSON;
}
tstrncpy(pSml->stableName, metric->valuestring, stableLen + 1);
return TSDB_CODE_SUCCESS;
}
int32_t parseTimestampFromJSONObj(cJSON *root, int64_t *tsVal, SSmlLinesInfo* info) {
int32_t size = cJSON_GetArraySize(root);
if (size != OTD_JSON_SUB_FIELDS_NUM) {
return TSDB_CODE_TSC_INVALID_JSON;
}
cJSON *value = cJSON_GetObjectItem(root, "value");
if (!cJSON_IsNumber(value)) {
return TSDB_CODE_TSC_INVALID_JSON;
}
cJSON *type = cJSON_GetObjectItem(root, "type");
if (!cJSON_IsString(type)) {
return TSDB_CODE_TSC_INVALID_JSON;
}
*tsVal = value->valueint;
//if timestamp value is 0 use current system time
if (*tsVal == 0) {
*tsVal = taosGetTimestampNs();
return TSDB_CODE_SUCCESS;
}
size_t typeLen = strlen(type->valuestring);
if (typeLen == 1 && type->valuestring[0] == 's') {
//seconds
*tsVal = (int64_t)(*tsVal * 1e9);
} else if (typeLen == 2 && type->valuestring[1] == 's') {
switch (type->valuestring[0]) {
case 'm':
//milliseconds
*tsVal = convertTimePrecision(*tsVal, TSDB_TIME_PRECISION_MILLI, TSDB_TIME_PRECISION_NANO);
break;
case 'u':
//microseconds
*tsVal = convertTimePrecision(*tsVal, TSDB_TIME_PRECISION_MICRO, TSDB_TIME_PRECISION_NANO);
break;
case 'n':
//nanoseconds
*tsVal = *tsVal * 1;
break;
default:
return TSDB_CODE_TSC_INVALID_JSON;
}
}
return TSDB_CODE_SUCCESS;
}
int32_t parseTimestampFromJSON(cJSON *root, TAOS_SML_KV **pTS, int *num_kvs, SSmlLinesInfo* info) {
//Timestamp must be the first KV to parse
assert(*num_kvs == 0);
int64_t tsVal;
char key[] = OTD_TIMESTAMP_COLUMN_NAME;
cJSON *timestamp = cJSON_GetObjectItem(root, "timestamp");
if (cJSON_IsNumber(timestamp)) {
//timestamp value 0 indicates current system time
if (timestamp->valueint == 0) {
tsVal = taosGetTimestampNs();
} else {
tsVal = convertTimePrecision(timestamp->valueint, TSDB_TIME_PRECISION_MICRO, TSDB_TIME_PRECISION_NANO);
}
} else if (cJSON_IsObject(timestamp)) {
int32_t ret = parseTimestampFromJSONObj(timestamp, &tsVal, info);
if (ret != TSDB_CODE_SUCCESS) {
tscError("OTD:0x%"PRIx64" Failed to parse timestamp from JSON Obj", info->id);
return ret;
}
} else {
return TSDB_CODE_TSC_INVALID_JSON;
}
//allocate fields for timestamp and value
*pTS = tcalloc(OTD_MAX_FIELDS_NUM, sizeof(TAOS_SML_KV));
(*pTS)->key = tcalloc(sizeof(key), 1);
memcpy((*pTS)->key, key, sizeof(key));
(*pTS)->type = TSDB_DATA_TYPE_TIMESTAMP;
(*pTS)->length = (int16_t)tDataTypes[(*pTS)->type].bytes;
(*pTS)->value = tcalloc((*pTS)->length, 1);
memcpy((*pTS)->value, &tsVal, (*pTS)->length);
*num_kvs += 1;
return TSDB_CODE_SUCCESS;
}
int32_t convertJSONBool(TAOS_SML_KV *pVal, char* typeStr, int64_t valueInt, SSmlLinesInfo* info) {
if (strcasecmp(typeStr, "bool") != 0) {
tscError("OTD:0x%"PRIx64" invalid type(%s) for JSON Bool", info->id, typeStr);
return TSDB_CODE_TSC_INVALID_JSON_TYPE;
}
pVal->type = TSDB_DATA_TYPE_BOOL;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(bool *)(pVal->value) = valueInt ? true : false;
return TSDB_CODE_SUCCESS;
}
int32_t convertJSONNumber(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLinesInfo* info) {
//tinyint
if (strcasecmp(typeStr, "i8") == 0 ||
strcasecmp(typeStr, "tinyint") == 0) {
if (!IS_VALID_TINYINT(value->valueint)) {
tscError("OTD:0x%"PRIx64" JSON value(%"PRId64") cannot fit in type(tinyint)", info->id, value->valueint);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_TINYINT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(int8_t *)(pVal->value) = (int8_t)(value->valueint);
return TSDB_CODE_SUCCESS;
}
//smallint
if (strcasecmp(typeStr, "i16") == 0 ||
strcasecmp(typeStr, "smallint") == 0) {
if (!IS_VALID_SMALLINT(value->valueint)) {
tscError("OTD:0x%"PRIx64" JSON value(%"PRId64") cannot fit in type(smallint)", info->id, value->valueint);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_SMALLINT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(int16_t *)(pVal->value) = (int16_t)(value->valueint);
return TSDB_CODE_SUCCESS;
}
//int
if (strcasecmp(typeStr, "i32") == 0 ||
strcasecmp(typeStr, "int") == 0) {
if (!IS_VALID_INT(value->valueint)) {
tscError("OTD:0x%"PRIx64" JSON value(%"PRId64") cannot fit in type(int)", info->id, value->valueint);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_INT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(int32_t *)(pVal->value) = (int32_t)(value->valueint);
return TSDB_CODE_SUCCESS;
}
//bigint
if (strcasecmp(typeStr, "i64") == 0 ||
strcasecmp(typeStr, "bigint") == 0) {
if (!IS_VALID_BIGINT(value->valueint)) {
tscError("OTD:0x%"PRIx64" JSON value(%"PRId64") cannot fit in type(bigint)", info->id, value->valueint);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_BIGINT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(int64_t *)(pVal->value) = (int64_t)(value->valueint);
return TSDB_CODE_SUCCESS;
}
//float
if (strcasecmp(typeStr, "f32") == 0 ||
strcasecmp(typeStr, "float") == 0) {
if (!IS_VALID_FLOAT(value->valuedouble)) {
tscError("OTD:0x%"PRIx64" JSON value(%f) cannot fit in type(float)", info->id, value->valuedouble);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_FLOAT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(float *)(pVal->value) = (float)(value->valuedouble);
return TSDB_CODE_SUCCESS;
}
//double
if (strcasecmp(typeStr, "f64") == 0 ||
strcasecmp(typeStr, "double") == 0) {
if (!IS_VALID_DOUBLE(value->valuedouble)) {
tscError("OTD:0x%"PRIx64" JSON value(%f) cannot fit in type(double)", info->id, value->valuedouble);
return TSDB_CODE_TSC_VALUE_OUT_OF_RANGE;
}
pVal->type = TSDB_DATA_TYPE_DOUBLE;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(double *)(pVal->value) = (double)(value->valuedouble);
return TSDB_CODE_SUCCESS;
}
//if reach here means type is unsupported
tscError("OTD:0x%"PRIx64" invalid type(%s) for JSON Number", info->id, typeStr);
return TSDB_CODE_TSC_INVALID_JSON_TYPE;
}
int32_t convertJSONString(TAOS_SML_KV *pVal, char* typeStr, cJSON *value, SSmlLinesInfo* info) {
if (strcasecmp(typeStr, "binary") == 0) {
pVal->type = TSDB_DATA_TYPE_BINARY;
} else if (strcasecmp(typeStr, "nchar") == 0) {
pVal->type = TSDB_DATA_TYPE_NCHAR;
} else {
tscError("OTD:0x%"PRIx64" invalid type(%s) for JSON String", info->id, typeStr);
return TSDB_CODE_TSC_INVALID_JSON_TYPE;
}
pVal->length = (int16_t)strlen(value->valuestring);
pVal->value = tcalloc(pVal->length + 1, 1);
memcpy(pVal->value, value->valuestring, pVal->length);
return TSDB_CODE_SUCCESS;
}
int32_t parseValueFromJSONObj(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info) {
int32_t ret = TSDB_CODE_SUCCESS;
int32_t size = cJSON_GetArraySize(root);
if (size != OTD_JSON_SUB_FIELDS_NUM) {
return TSDB_CODE_TSC_INVALID_JSON;
}
cJSON *value = cJSON_GetObjectItem(root, "value");
if (value == NULL) {
return TSDB_CODE_TSC_INVALID_JSON;
}
cJSON *type = cJSON_GetObjectItem(root, "type");
if (!cJSON_IsString(type)) {
return TSDB_CODE_TSC_INVALID_JSON;
}
switch (value->type) {
case cJSON_True:
case cJSON_False: {
ret = convertJSONBool(pVal, type->valuestring, value->valueint, info);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
break;
}
case cJSON_Number: {
ret = convertJSONNumber(pVal, type->valuestring, value, info);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
break;
}
case cJSON_String: {
ret = convertJSONString(pVal, type->valuestring, value, info);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
break;
}
default:
return TSDB_CODE_TSC_INVALID_JSON_TYPE;
}
return TSDB_CODE_SUCCESS;
}
int32_t parseValueFromJSON(cJSON *root, TAOS_SML_KV *pVal, SSmlLinesInfo* info) {
int type = root->type;
switch (type) {
case cJSON_True:
case cJSON_False: {
pVal->type = TSDB_DATA_TYPE_BOOL;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(bool *)(pVal->value) = root->valueint ? true : false;
break;
}
case cJSON_Number: {
//convert default JSON Number type to float
pVal->type = TSDB_DATA_TYPE_FLOAT;
pVal->length = (int16_t)tDataTypes[pVal->type].bytes;
pVal->value = tcalloc(pVal->length, 1);
*(float *)(pVal->value) = (float)(root->valuedouble);
break;
}
case cJSON_String: {
//convert default JSON String type to nchar
pVal->type = TSDB_DATA_TYPE_NCHAR;
//pVal->length = wcslen((wchar_t *)root->valuestring) * TSDB_NCHAR_SIZE;
pVal->length = (int16_t)strlen(root->valuestring);
pVal->value = tcalloc(pVal->length + 1, 1);
memcpy(pVal->value, root->valuestring, pVal->length);
break;
}
case cJSON_Object: {
int32_t ret = parseValueFromJSONObj(root, pVal, info);
if (ret != TSDB_CODE_SUCCESS) {
tscError("OTD:0x%"PRIx64" Failed to parse timestamp from JSON Obj", info->id);
return ret;
}
break;
}
default:
return TSDB_CODE_TSC_INVALID_JSON;
}
return TSDB_CODE_SUCCESS;
}
int32_t parseMetricValueFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, SSmlLinesInfo* info) {
//skip timestamp
TAOS_SML_KV *pVal = *pKVs + 1;
char key[] = OTD_METRIC_VALUE_COLUMN_NAME;
cJSON *metricVal = cJSON_GetObjectItem(root, "value");
if (metricVal == NULL) {
return TSDB_CODE_TSC_INVALID_JSON;
}
int32_t ret = parseValueFromJSON(metricVal, pVal, info);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
pVal->key = tcalloc(sizeof(key), 1);
memcpy(pVal->key, key, sizeof(key));
*num_kvs += 1;
return TSDB_CODE_SUCCESS;
}
int32_t parseTagsFromJSON(cJSON *root, TAOS_SML_KV **pKVs, int *num_kvs, char **childTableName, SSmlLinesInfo* info) {
int32_t ret = TSDB_CODE_SUCCESS;
cJSON *tags = cJSON_GetObjectItem(root, "tags");
if (tags == NULL || tags->type != cJSON_Object) {
return TSDB_CODE_TSC_INVALID_JSON;
}
//only pick up the first ID value as child table name
cJSON *id = cJSON_GetObjectItem(tags, "ID");
if (id != NULL) {
size_t idLen = strlen(id->valuestring);
ret = isValidChildTableName(id->valuestring, (int16_t)idLen);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
*childTableName = tcalloc(idLen + 1, sizeof(char));
memcpy(*childTableName, id->valuestring, idLen);
//remove all ID fields from tags list no case sensitive
while (id != NULL) {
cJSON_DeleteItemFromObject(tags, "ID");
id = cJSON_GetObjectItem(tags, "ID");
}
}
int32_t tagNum = cJSON_GetArraySize(tags);
//at least one tag pair required
if (tagNum <= 0) {
return TSDB_CODE_TSC_INVALID_JSON;
}
//allocate memory for tags
*pKVs = tcalloc(tagNum, sizeof(TAOS_SML_KV));
TAOS_SML_KV *pkv = *pKVs;
for (int32_t i = 0; i < tagNum; ++i) {
cJSON *tag = cJSON_GetArrayItem(tags, i);
if (tag == NULL) {
return TSDB_CODE_TSC_INVALID_JSON;
}
//key
size_t keyLen = strlen(tag->string);
pkv->key = tcalloc(keyLen + 1, sizeof(char));
strncpy(pkv->key, tag->string, keyLen);
//value
ret = parseValueFromJSON(tag, pkv, info);
if (ret != TSDB_CODE_SUCCESS) {
return ret;
}
*num_kvs += 1;
pkv++;
}
return ret;
}
int32_t tscParseJSONPayload(cJSON *root, TAOS_SML_DATA_POINT* pSml, SSmlLinesInfo* info) {
int32_t ret = TSDB_CODE_SUCCESS;
if (!cJSON_IsObject(root)) {
tscError("OTD:0x%"PRIx64" data point needs to be JSON object", info->id);
return TSDB_CODE_TSC_INVALID_JSON;
}
int32_t size = cJSON_GetArraySize(root);
//outmost json fields has to be exactly 4
if (size != OTD_JSON_FIELDS_NUM) {
tscError("OTD:0x%"PRIx64" Invalid number of JSON fields in data point %d", info->id, size);
return TSDB_CODE_TSC_INVALID_JSON;
}
//Parse metric
ret = parseMetricFromJSON(root, pSml, info);
if (ret != TSDB_CODE_SUCCESS) {
tscError("OTD:0x%"PRIx64" Unable to parse metric from JSON payload", info->id);
return ret;
}
tscDebug("OTD:0x%"PRIx64" Parse metric from JSON payload finished", info->id);
//Parse timestamp
ret = parseTimestampFromJSON(root, &pSml->fields, &pSml->fieldNum, info);
if (ret) {
tscError("OTD:0x%"PRIx64" Unable to parse timestamp from JSON payload", info->id);
return ret;
}
tscDebug("OTD:0x%"PRIx64" Parse timestamp from JSON payload finished", info->id);
//Parse metric value
ret = parseMetricValueFromJSON(root, &pSml->fields, &pSml->fieldNum, info);
if (ret) {
tscError("OTD:0x%"PRIx64" Unable to parse metric value from JSON payload", info->id);
return ret;
}
tscDebug("OTD:0x%"PRIx64" Parse metric value from JSON payload finished", info->id);
//Parse tags
ret = parseTagsFromJSON(root, &pSml->tags, &pSml->tagNum, &pSml->childTableName, info);
if (ret) {
tscError("OTD:0x%"PRIx64" Unable to parse tags from JSON payload", info->id);
return ret;
}
tscDebug("OTD:0x%"PRIx64" Parse tags from JSON payload finished", info->id);
return TSDB_CODE_SUCCESS;
}
int32_t tscParseMultiJSONPayload(char* payload, SArray* points, SSmlLinesInfo* info) {
int32_t payloadNum, ret;
ret = TSDB_CODE_SUCCESS;
if (payload == NULL) {
tscError("OTD:0x%"PRIx64" empty JSON Payload", info->id);
return TSDB_CODE_TSC_INVALID_JSON;
}
cJSON *root = cJSON_Parse(payload);
//multiple data points must be sent in JSON array
if (cJSON_IsObject(root)) {
payloadNum = 1;
} else if (cJSON_IsArray(root)) {
payloadNum = cJSON_GetArraySize(root);
} else {
tscError("OTD:0x%"PRIx64" Invalid JSON Payload", info->id);
ret = TSDB_CODE_TSC_INVALID_JSON;
goto PARSE_JSON_OVER;
}
for (int32_t i = 0; i < payloadNum; ++i) {
TAOS_SML_DATA_POINT point = {0};
cJSON *dataPoint = (payloadNum == 1) ? root : cJSON_GetArrayItem(root, i);
ret = tscParseJSONPayload(dataPoint, &point, info);
if (ret != TSDB_CODE_SUCCESS) {
tscError("OTD:0x%"PRIx64" JSON data point parse failed", info->id);
destroySmlDataPoint(&point);
goto PARSE_JSON_OVER;
} else {
tscDebug("OTD:0x%"PRIx64" JSON data point parse success", info->id);
}
taosArrayPush(points, &point);
}
PARSE_JSON_OVER:
cJSON_Delete(root);
return ret;
}
int taos_insert_json_payload(TAOS* taos, char* payload) {
int32_t code = 0;
SSmlLinesInfo* info = tcalloc(1, sizeof(SSmlLinesInfo));
info->id = genUID();
if (payload == NULL) {
tscError("OTD:0x%"PRIx64" taos_insert_json_payload payload is NULL", info->id);
tfree(info);
code = TSDB_CODE_TSC_APP_ERROR;
return code;
}
SArray* lpPoints = taosArrayInit(1, sizeof(TAOS_SML_DATA_POINT));
if (lpPoints == NULL) {
tscError("OTD:0x%"PRIx64" taos_insert_json_payload failed to allocate memory", info->id);
tfree(info);
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
tscDebug("OTD:0x%"PRIx64" taos_insert_telnet_lines begin inserting %d points", info->id, 1);
code = tscParseMultiJSONPayload(payload, lpPoints, info);
size_t numPoints = taosArrayGetSize(lpPoints);
if (code != 0) {
goto cleanup;
}
TAOS_SML_DATA_POINT* points = TARRAY_GET_START(lpPoints);
code = tscSmlInsert(taos, points, (int)numPoints, info);
if (code != 0) {
tscError("OTD:0x%"PRIx64" taos_insert_json_payload error: %s", info->id, tstrerror((code)));
}
cleanup:
tscDebug("OTD:0x%"PRIx64" taos_insert_json_payload finish inserting 1 Point. code: %d", info->id, code);
points = TARRAY_GET_START(lpPoints);
numPoints = taosArrayGetSize(lpPoints);
for (int i = 0; i < numPoints; ++i) {
destroySmlDataPoint(points+i);
}
taosArrayDestroy(lpPoints);
tfree(info);
return code;
}
......@@ -835,8 +835,14 @@ def taos_insert_telnet_lines(connection, lines):
p_lines = lines_type(*lines)
errno = _libtaos.taos_insert_telnet_lines(connection, p_lines, num_of_lines)
if errno != 0:
raise LinesError("insert telnet lines error", errno)
raise TelnetLinesError("insert telnet lines error", errno)
def taos_insert_json_payload(connection, payload):
# type: (c_void_p, list[str] | tuple(str)) -> None
payload = payload.encode("utf-8")
errno = _libtaos.taos_insert_json_payload(connection, payload)
if errno != 0:
raise JsonPayloadError("insert json payload error", errno)
class CTaosInterface(object):
def __init__(self, config=None):
......
......@@ -154,6 +154,25 @@ class TaosConnection(object):
"""
return taos_insert_telnet_lines(self._conn, lines)
def insert_json_payload(self, payload):
"""OpenTSDB HTTP JSON format support
## Example
"{
"metric": "cpu_load_0",
"timestamp": 1626006833610123,
"value": 55.5,
"tags":
{
"host": "ubuntu",
"interface": "eth0",
"Id": "tb0"
}
}"
"""
return taos_insert_json_payload(self._conn, payload)
def cursor(self):
# type: () -> TaosCursor
"""Return a new Cursor object using the connection."""
......
......@@ -83,4 +83,14 @@ class ResultError(DatabaseError):
class LinesError(DatabaseError):
"""taos_insert_lines errors."""
pass
\ No newline at end of file
pass
class TelnetLinesError(DatabaseError):
"""taos_insert_telnet_lines errors."""
pass
class JsonPayloadError(DatabaseError):
"""taos_insert_json_payload errors."""
pass
......@@ -174,6 +174,8 @@ DLL_EXPORT int taos_insert_lines(TAOS* taos, char* lines[], int numLines);
DLL_EXPORT int taos_insert_telnet_lines(TAOS* taos, char* lines[], int numLines);
DLL_EXPORT int taos_insert_json_payload(TAOS* taos, char* payload);
#ifdef __cplusplus
}
#endif
......
......@@ -108,6 +108,9 @@ int32_t* taosGetErrno();
#define TSDB_CODE_TSC_INVALID_TAG_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021E) //"Invalid tag length")
#define TSDB_CODE_TSC_INVALID_COLUMN_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021F) //"Invalid column length")
#define TSDB_CODE_TSC_DUP_TAG_NAMES TAOS_DEF_ERROR_CODE(0, 0x0220) //"duplicated tag names")
#define TSDB_CODE_TSC_INVALID_JSON TAOS_DEF_ERROR_CODE(0, 0x0221) //"Invalid JSON format")
#define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type")
#define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0223) //"Value out of range")
// mnode
#define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed")
......
......@@ -116,6 +116,9 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_DUP_COL_NAMES, "duplicated column nam
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_TAG_LENGTH, "Invalid tag length")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_COLUMN_LENGTH, "Invalid column length")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_DUP_TAG_NAMES, "duplicated tag names")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON, "Invalid JSON format")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON_TYPE, "Invalid JSON data type")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_VALUE_OUT_OF_RANGE, "Value out of range")
// mnode
TAOS_DEFINE_ERROR(TSDB_CODE_MND_MSG_NOT_PROCESSED, "Message not processed")
......
......@@ -17,7 +17,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.31</version>
<version>2.0.34</version>
</dependency>
</dependencies>
......
......@@ -7,6 +7,9 @@ public class JdbcDemo {
private static String host;
private static final String dbName = "test";
private static final String tbName = "weather";
private static final String user = "root";
private static final String password = "taosdata";
private Connection connection;
public static void main(String[] args) {
......@@ -30,10 +33,9 @@ public class JdbcDemo {
}
private void init() {
final String url = "jdbc:TAOS://" + host + ":6030/?user=root&password=taosdata";
final String url = "jdbc:TAOS://" + host + ":6030/?user=" + user + "&password=" + password;
// get connection
try {
Class.forName("com.taosdata.jdbc.TSDBDriver");
Properties properties = new Properties();
properties.setProperty("charset", "UTF-8");
properties.setProperty("locale", "en_US.UTF-8");
......@@ -42,8 +44,7 @@ public class JdbcDemo {
connection = DriverManager.getConnection(url, properties);
if (connection != null)
System.out.println("[ OK ] Connection established.");
} catch (ClassNotFoundException | SQLException e) {
System.out.println("[ ERROR! ] Connection establish failed.");
} catch (SQLException e) {
e.printStackTrace();
}
}
......@@ -74,7 +75,7 @@ public class JdbcDemo {
}
private void select() {
final String sql = "select * from "+ dbName + "." + tbName;
final String sql = "select * from " + dbName + "." + tbName;
executeQuery(sql);
}
......@@ -89,8 +90,6 @@ public class JdbcDemo {
}
}
/************************************************************************/
private void executeQuery(String sql) {
long start = System.currentTimeMillis();
try (Statement statement = connection.createStatement()) {
......@@ -117,7 +116,6 @@ public class JdbcDemo {
}
}
private void printSql(String sql, boolean succeed, long cost) {
System.out.println("[ " + (succeed ? "OK" : "ERROR!") + " ] time cost: " + cost + " ms, execute statement ====> " + sql);
}
......@@ -132,7 +130,6 @@ public class JdbcDemo {
long end = System.currentTimeMillis();
printSql(sql, false, (end - start));
e.printStackTrace();
}
}
......@@ -141,5 +138,4 @@ public class JdbcDemo {
System.exit(0);
}
}
\ No newline at end of file
}
......@@ -4,14 +4,15 @@ import java.sql.*;
import java.util.Properties;
public class JdbcRestfulDemo {
private static final String host = "127.0.0.1";
private static final String host = "localhost";
private static final String dbname = "test";
private static final String user = "root";
private static final String password = "taosdata";
public static void main(String[] args) {
try {
// load JDBC-restful driver
Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
// use port 6041 in url when use JDBC-restful
String url = "jdbc:TAOS-RS://" + host + ":6041/?user=root&password=taosdata";
String url = "jdbc:TAOS-RS://" + host + ":6041/?user=" + user + "&password=" + password;
Properties properties = new Properties();
properties.setProperty("charset", "UTF-8");
......@@ -21,12 +22,12 @@ public class JdbcRestfulDemo {
Connection conn = DriverManager.getConnection(url, properties);
Statement stmt = conn.createStatement();
stmt.execute("drop database if exists restful_test");
stmt.execute("create database if not exists restful_test");
stmt.execute("use restful_test");
stmt.execute("create table restful_test.weather(ts timestamp, temperature float) tags(location nchar(64))");
stmt.executeUpdate("insert into t1 using restful_test.weather tags('北京') values(now, 18.2)");
ResultSet rs = stmt.executeQuery("select * from restful_test.weather");
stmt.execute("drop database if exists " + dbname);
stmt.execute("create database if not exists " + dbname);
stmt.execute("use " + dbname);
stmt.execute("create table " + dbname + ".weather(ts timestamp, temperature float) tags(location nchar(64))");
stmt.executeUpdate("insert into t1 using " + dbname + ".weather tags('北京') values(now, 18.2)");
ResultSet rs = stmt.executeQuery("select * from " + dbname + ".weather");
ResultSetMetaData meta = rs.getMetaData();
while (rs.next()) {
for (int i = 1; i <= meta.getColumnCount(); i++) {
......@@ -38,8 +39,6 @@ public class JdbcRestfulDemo {
rs.close();
stmt.close();
conn.close();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
......
......@@ -34,9 +34,8 @@ public class SubscribeDemo {
System.out.println(usage);
return;
}
/*********************************************************************************************/
try {
Class.forName("com.taosdata.jdbc.TSDBDriver");
Properties properties = new Properties();
properties.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8");
properties.setProperty(TSDBDriver.PROPERTY_KEY_LOCALE, "en_US.UTF-8");
......
......@@ -60,12 +60,15 @@
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.28</version>
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/taos-jdbcdriver-2.0.28-dist.jar</systemPath>-->
<version>2.0.34</version>
</dependency>
<dependency>
......
......@@ -4,7 +4,7 @@ import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@MapperScan(basePackages = {"com.taosdata.example.springbootdemo.dao"})
@MapperScan(basePackages = {"com.taosdata.example.springbootdemo"})
@SpringBootApplication
public class SpringbootdemoApplication {
......
......@@ -15,35 +15,21 @@ public class WeatherController {
@Autowired
private WeatherService weatherService;
/**
* create database and table
*
* @return
*/
@GetMapping("/lastOne")
public Weather lastOne() {
return weatherService.lastOne();
}
@GetMapping("/init")
public int init() {
return weatherService.init();
}
/**
* Pagination Query
*
* @param limit
* @param offset
* @return
*/
@GetMapping("/{limit}/{offset}")
public List<Weather> queryWeather(@PathVariable Long limit, @PathVariable Long offset) {
return weatherService.query(limit, offset);
}
/**
* upload single weather info
*
* @param temperature
* @param humidity
* @return
*/
@PostMapping("/{temperature}/{humidity}")
public int saveWeather(@PathVariable float temperature, @PathVariable float humidity) {
return weatherService.save(temperature, humidity);
......
......@@ -8,6 +8,8 @@ import java.util.Map;
public interface WeatherMapper {
Map<String, Object> lastOne();
void dropDB();
void createDB();
......
......@@ -9,20 +9,48 @@
<result column="humidity" jdbcType="FLOAT" property="humidity"/>
</resultMap>
<select id="lastOne" resultType="java.util.Map">
select last_row(*), location, groupid
from test.weather
</select>
<update id="dropDB">
drop database if exists test
drop
database if exists test
</update>
<update id="createDB">
create database if not exists test
create
database if not exists test
</update>
<update id="createSuperTable">
create table if not exists test.weather(ts timestamp, temperature float, humidity float) tags(location nchar(64), groupId int)
create table if not exists test.weather
(
ts
timestamp,
temperature
float,
humidity
float,
note
binary
(
64
)) tags
(
location nchar
(
64
), groupId int)
</update>
<update id="createTable" parameterType="com.taosdata.example.springbootdemo.domain.Weather">
create table if not exists test.t#{groupId} using test.weather tags(#{location}, #{groupId})
create table if not exists test.t#{groupId} using test.weather tags
(
#{location},
#{groupId}
)
</update>
<select id="select" resultMap="BaseResultMap">
......@@ -36,25 +64,29 @@
</select>
<insert id="insert" parameterType="com.taosdata.example.springbootdemo.domain.Weather">
insert into test.t#{groupId} (ts, temperature, humidity) values (#{ts}, ${temperature}, ${humidity})
insert into test.t#{groupId} (ts, temperature, humidity, note)
values (#{ts}, ${temperature}, ${humidity}, #{note})
</insert>
<select id="getSubTables" resultType="String">
select tbname from test.weather
select tbname
from test.weather
</select>
<select id="count" resultType="int">
select count(*) from test.weather
select count(*)
from test.weather
</select>
<resultMap id="avgResultSet" type="com.taosdata.example.springbootdemo.domain.Weather">
<id column="ts" jdbcType="TIMESTAMP" property="ts" />
<result column="avg(temperature)" jdbcType="FLOAT" property="temperature" />
<result column="avg(humidity)" jdbcType="FLOAT" property="humidity" />
<id column="ts" jdbcType="TIMESTAMP" property="ts"/>
<result column="avg(temperature)" jdbcType="FLOAT" property="temperature"/>
<result column="avg(humidity)" jdbcType="FLOAT" property="humidity"/>
</resultMap>
<select id="avg" resultMap="avgResultSet">
select avg(temperature), avg(humidity)from test.weather interval(1m)
select avg(temperature), avg(humidity)
from test.weather interval(1m)
</select>
</mapper>
\ No newline at end of file
......@@ -11,6 +11,7 @@ public class Weather {
private Float temperature;
private Float humidity;
private String location;
private String note;
private int groupId;
public Weather() {
......@@ -61,4 +62,12 @@ public class Weather {
public void setGroupId(int groupId) {
this.groupId = groupId;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
}
......@@ -29,6 +29,7 @@ public class WeatherService {
Weather weather = new Weather(new Timestamp(ts + (thirtySec * i)), 30 * random.nextFloat(), random.nextInt(100));
weather.setLocation(locations[random.nextInt(locations.length)]);
weather.setGroupId(i % locations.length);
weather.setNote("note-" + i);
weatherMapper.createTable(weather);
count += weatherMapper.insert(weather);
}
......@@ -58,4 +59,21 @@ public class WeatherService {
public List<Weather> avg() {
return weatherMapper.avg();
}
public Weather lastOne() {
Map<String, Object> result = weatherMapper.lastOne();
long ts = (long) result.get("ts");
float temperature = (float) result.get("temperature");
float humidity = (float) result.get("humidity");
String note = (String) result.get("note");
int groupId = (int) result.get("groupid");
String location = (String) result.get("location");
Weather weather = new Weather(new Timestamp(ts), temperature, humidity);
weather.setNote(note);
weather.setGroupId(groupId);
weather.setLocation(location);
return weather;
}
}
package com.taosdata.example.springbootdemo.util;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.stereotype.Component;
import java.sql.Timestamp;
import java.util.Map;
@Aspect
@Component
public class TaosAspect {
@Around("execution(java.util.Map<String,Object> com.taosdata.example.springbootdemo.dao.*.*(..))")
public Object handleType(ProceedingJoinPoint joinPoint) {
Map<String, Object> result = null;
try {
result = (Map<String, Object>) joinPoint.proceed();
for (String key : result.keySet()) {
Object obj = result.get(key);
if (obj instanceof byte[]) {
obj = new String((byte[]) obj);
result.put(key, obj);
}
if (obj instanceof Timestamp) {
obj = ((Timestamp) obj).getTime();
result.put(key, obj);
}
}
} catch (Throwable e) {
e.printStackTrace();
}
return result;
}
}
# datasource config - JDBC-JNI
#spring.datasource.driver-class-name=com.taosdata.jdbc.TSDBDriver
#spring.datasource.url=jdbc:TAOS://127.0.0.1:6030/test?timezone=UTC-8&charset=UTF-8&locale=en_US.UTF-8
#spring.datasource.url=jdbc:TAOS://localhost:6030/?timezone=UTC-8&charset=UTF-8&locale=en_US.UTF-8
#spring.datasource.username=root
#spring.datasource.password=taosdata
# datasource config - JDBC-RESTful
spring.datasource.driver-class-name=com.taosdata.jdbc.rs.RestfulDriver
spring.datasource.url=jdbc:TAOS-RS://master:6041/test?timezone=UTC-8&charset=UTF-8&locale=en_US.UTF-8
spring.datasource.url=jdbc:TAOS-RS://localhsot:6041/test?timezone=UTC-8&charset=UTF-8&locale=en_US.UTF-8
spring.datasource.username=root
spring.datasource.password=taosdata
spring.datasource.druid.initial-size=5
spring.datasource.druid.min-idle=5
spring.datasource.druid.max-active=5
spring.datasource.druid.max-wait=30000
spring.datasource.druid.validation-query=select server_status();
spring.aop.auto=true
spring.aop.proxy-target-class=true
#mybatis
mybatis.mapper-locations=classpath:mapper/*.xml
logging.level.com.taosdata.jdbc.springbootdemo.dao=debug
此差异已折叠。
......@@ -6,8 +6,8 @@ TARGET=exe
LFLAGS = '-Wl,-rpath,/usr/local/taos/driver/' -ltaos -lpthread -lm -lrt
CFLAGS = -O3 -g -Wall -Wno-deprecated -fPIC -Wno-unused-result -Wconversion \
-Wno-char-subscripts -D_REENTRANT -Wno-format -D_REENTRANT -DLINUX \
-Wno-unused-function -D_M_X64 -I/usr/local/taos/include -std=gnu99
-Wno-unused-function -D_M_X64 -I/usr/local/taos/include -std=gnu99 \
-I../../../deps/cJson/inc
all: $(TARGET)
exe:
......
###################################################################
# Copyright (c) 2021 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
from util.log import *
from util.cases import *
from util.sql import *
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
self._conn = conn
def run(self):
print("running {}".format(__file__))
tdSql.execute("drop database if exists test")
tdSql.execute("create database if not exists test precision 'us'")
tdSql.execute('use test')
### Default format ###
### metric value ###
print("============= step1 : test metric value types ================")
payload = '''
{
"metric": "stb0_0",
"timestamp": 1626006833610123,
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb0_0")
tdSql.checkData(1, 1, "FLOAT")
payload = '''
{
"metric": "stb0_1",
"timestamp": 1626006833610123,
"value": true,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb0_1")
tdSql.checkData(1, 1, "BOOL")
payload = '''
{
"metric": "stb0_2",
"timestamp": 1626006833610123,
"value": false,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb0_2")
tdSql.checkData(1, 1, "BOOL")
payload = '''
{
"metric": "stb0_3",
"timestamp": 1626006833610123,
"value": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb0_3")
tdSql.checkData(1, 1, "NCHAR")
### timestamp 0 ###
payload = '''
{
"metric": "stb0_4",
"timestamp": 0,
"value": 123,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
### ID ###
payload = '''
{
"metric": "stb0_5",
"timestamp": 0,
"value": 123,
"tags": {
"ID": "tb0_5",
"t1": true,
"iD": "tb000",
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"id": "tb555"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("select tbname from stb0_5")
tdSql.checkData(0, 0, "tb0_5")
### Nested format ###
### timestamp ###
#seconds
payload = '''
{
"metric": "stb1_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("select ts from stb1_0")
tdSql.checkData(0, 0, "2021-07-11 20:33:53.000000")
#milliseconds
payload = '''
{
"metric": "stb1_1",
"timestamp": {
"value": 1626006833610,
"type": "ms"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("select ts from stb1_1")
tdSql.checkData(0, 0, "2021-07-11 20:33:53.610000")
#microseconds
payload = '''
{
"metric": "stb1_2",
"timestamp": {
"value": 1626006833610123,
"type": "us"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("select ts from stb1_2")
tdSql.checkData(0, 0, "2021-07-11 20:33:53.610123")
#nanoseconds
payload = '''
{
"metric": "stb1_3",
"timestamp": {
"value": 1.6260068336101233e+18,
"type": "ns"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("select ts from stb1_3")
tdSql.checkData(0, 0, "2021-07-11 20:33:53.610123")
#now
tdSql.execute('use test')
payload = '''
{
"metric": "stb1_4",
"timestamp": {
"value": 0,
"type": "ns"
},
"value": 10,
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
### metric value ###
payload = '''
{
"metric": "stb2_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": true,
"type": "bool"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_0")
tdSql.checkData(1, 1, "BOOL")
payload = '''
{
"metric": "stb2_1",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 127,
"type": "tinyint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_1")
tdSql.checkData(1, 1, "TINYINT")
payload = '''
{
"metric": "stb2_2",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 32767,
"type": "smallint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_2")
tdSql.checkData(1, 1, "SMALLINT")
payload = '''
{
"metric": "stb2_3",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 2147483647,
"type": "int"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_3")
tdSql.checkData(1, 1, "INT")
payload = '''
{
"metric": "stb2_4",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 9.2233720368547758e+18,
"type": "bigint"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_4")
tdSql.checkData(1, 1, "BIGINT")
payload = '''
{
"metric": "stb2_5",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 11.12345,
"type": "float"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_5")
tdSql.checkData(1, 1, "FLOAT")
payload = '''
{
"metric": "stb2_6",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": 22.123456789,
"type": "double"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_6")
tdSql.checkData(1, 1, "DOUBLE")
payload = '''
{
"metric": "stb2_7",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>",
"type": "binary"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_7")
tdSql.checkData(1, 1, "BINARY")
payload = '''
{
"metric": "stb2_8",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "你好",
"type": "nchar"
},
"tags": {
"t1": true,
"t2": false,
"t3": 10,
"t4": "123_abc_.!@#$%^&*:;,./?|+-=()[]{}<>"
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb2_8")
tdSql.checkData(1, 1, "NCHAR")
### tag value ###
payload = '''
{
"metric": "stb3_0",
"timestamp": {
"value": 1626006833,
"type": "s"
},
"value": {
"value": "hello",
"type": "nchar"
},
"tags": {
"t1": {
"value": true,
"type": "bool"
},
"t2": {
"value": 127,
"type": "tinyint"
},
"t3": {
"value": 32767,
"type": "smallint"
},
"t4": {
"value": 2147483647,
"type": "int"
},
"t5": {
"value": 9.2233720368547758e+18,
"type": "bigint"
},
"t6": {
"value": 11.12345,
"type": "float"
},
"t7": {
"value": 22.123456789,
"type": "double"
},
"t8": {
"value": "binary_val",
"type": "binary"
},
"t9": {
"value": "你好",
"type": "nchar"
}
}
}
'''
code = self._conn.insert_json_payload(payload)
print("insert_json_payload result {}".format(code))
tdSql.query("describe stb3_0")
tdSql.checkData(2, 1, "BOOL")
tdSql.checkData(3, 1, "TINYINT")
tdSql.checkData(4, 1, "SMALLINT")
tdSql.checkData(5, 1, "INT")
tdSql.checkData(6, 1, "BIGINT")
tdSql.checkData(7, 1, "FLOAT")
tdSql.checkData(8, 1, "DOUBLE")
tdSql.checkData(9, 1, "BINARY")
tdSql.checkData(10, 1, "NCHAR")
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册