提交 400989c0 编写于 作者: H Haojun Liao

[td-13039] refactor scalar function.

上级 21a0ee92
......@@ -93,6 +93,7 @@ void* tDecodeDataBlock(const void* buf, SSDataBlock* pBlock);
int32_t tEncodeDataBlocks(void** buf, const SArray* blocks);
void* tDecodeDataBlocks(const void* buf, SArray* blocks);
void colDataDestroy(SColumnInfoData* pColData) ;
static FORCE_INLINE void blockDestroyInner(SSDataBlock* pBlock) {
// WARNING: do not use info.numOfCols,
......@@ -100,13 +101,7 @@ static FORCE_INLINE void blockDestroyInner(SSDataBlock* pBlock) {
int32_t numOfOutput = taosArrayGetSize(pBlock->pDataBlock);
for (int32_t i = 0; i < numOfOutput; ++i) {
SColumnInfoData* pColInfoData = (SColumnInfoData*)taosArrayGet(pBlock->pDataBlock, i);
if (IS_VAR_DATA_TYPE(pColInfoData->info.type)) {
tfree(pColInfoData->varmeta.offset);
} else {
tfree(pColInfoData->nullbitmap);
}
tfree(pColInfoData->pData);
colDataDestroy(pColInfoData);
}
taosArrayDestroy(pBlock->pDataBlock);
......
......@@ -251,16 +251,9 @@ typedef struct SAggFunctionInfo {
} SAggFunctionInfo;
typedef struct SScalarParam {
void *data;
union {
SColumnInfoData *columnData;
void *data;
} orig;
char *bitmap;
bool dataInBlock;
int32_t num;
int32_t type;
int32_t bytes;
SColumnInfoData *columnData;
SHashObj *pHashFilter;
int32_t numOfRows;
} SScalarParam;
typedef struct SScalarFunctionInfo {
......
......@@ -1241,6 +1241,16 @@ size_t blockDataGetCapacityInRow(const SSDataBlock* pBlock, size_t pageSize) {
return pageSize / (blockDataGetSerialRowSize(pBlock) + blockDataGetSerialMetaSize(pBlock));
}
void colDataDestroy(SColumnInfoData* pColData) {
if (IS_VAR_DATA_TYPE(pColData->info.type)) {
tfree(pColData->varmeta.offset);
} else {
tfree(pColData->nullbitmap);
}
tfree(pColData->pData);
}
int32_t tEncodeDataBlock(void** buf, const SSDataBlock* pBlock) {
int64_t tbUid = pBlock->info.uid;
int16_t numOfCols = pBlock->info.numOfCols;
......
......@@ -3078,8 +3078,8 @@ static void arithmetic_function(SqlFunctionCtx *pCtx) {
GET_RES_INFO(pCtx)->numOfRes += pCtx->size;
//SScalarFunctionSupport *pSup = (SScalarFunctionSupport *)pCtx->param[1].pz;
SScalarParam output = {0};
output.data = pCtx->pOutput;
// SScalarParam output = {0};
// output.data = pCtx->pOutput;
//evaluateExprNodeTree(pSup->pExprInfo->pExpr, pCtx->size, &output, pSup, getArithColumnData);
}
......
......@@ -43,10 +43,12 @@ typedef struct SScalarCtx {
#define SCL_RET(c) do { int32_t _code = c; if (_code != TSDB_CODE_SUCCESS) { terrno = _code; } return _code; } while (0)
#define SCL_ERR_JRET(c) do { code = c; if (code != TSDB_CODE_SUCCESS) { terrno = code; goto _return; } } while (0)
int32_t doConvertDataType(SScalarParam* in, SScalarParam* out, SValueNode* pValueNode);
SColumnInfoData* createColumnInfoData(SDataType* pType, int32_t numOfRows);
int32_t sclMoveParamListData(SScalarParam *params, int32_t listNum, int32_t idx);
bool sclIsNull(SScalarParam* param, int32_t idx);
void sclSetNull(SScalarParam* param, int32_t idx);
//int32_t sclMoveParamListData(SScalarParam *params, int32_t listNum, int32_t idx);
//bool sclIsNull(SScalarParam* param, int32_t idx);
//void sclSetNull(SScalarParam* param, int32_t idx);
void sclFreeParam(SScalarParam *param);
#ifdef __cplusplus
......
......@@ -1021,26 +1021,21 @@ int32_t fltAddGroupUnitFromNode(SFilterInfo *info, SNode* tree, SArray *group) {
if (node->opType == OP_TYPE_IN && (!IS_VAR_DATA_TYPE(type))) {
SNodeListNode *listNode = (SNodeListNode *)node->pRight;
SListCell *cell = listNode->pNodeList->pHead;
SScalarParam in = {.num = 1}, out = {.num = 1, .type = type};
SScalarParam in = {.columnData = calloc(1, sizeof(SColumnInfoData))}, out = {.columnData = calloc(1, sizeof(SColumnInfoData))};
out.columnData->info.type = type;
for (int32_t i = 0; i < listNode->pNodeList->length; ++i) {
SValueNode *valueNode = (SValueNode *)cell->pNode;
in.type = valueNode->node.resType.type;
in.bytes = valueNode->node.resType.bytes;
in.data = nodesGetValueFromNode(valueNode);
out.data = malloc(sizeof(int64_t));
code = vectorConvertImpl(&in, &out);
code = doConvertDataType(&in, &out, valueNode);
if (code) {
fltError("convert from %d to %d failed", in.type, out.type);
tfree(out.data);
// fltError("convert from %d to %d failed", in.type, out.type);
FLT_ERR_RET(code);
}
len = tDataTypes[type].bytes;
filterAddField(info, NULL, &out.data, FLD_TYPE_VALUE, &right, len, true);
filterAddField(info, NULL, (void**) &out.columnData->pData, FLD_TYPE_VALUE, &right, len, true);
filterAddUnit(info, OP_TYPE_EQUAL, &left, &right, &uidx);
SFilterGroup fgroup = {0};
......@@ -1054,7 +1049,6 @@ int32_t fltAddGroupUnitFromNode(SFilterInfo *info, SNode* tree, SArray *group) {
filterAddFieldFromNode(info, node->pRight, &right);
FLT_ERR_RET(filterAddUnit(info, node->opType, &left, &right, &uidx));
SFilterGroup fgroup = {0};
filterAddUnitToGroup(&fgroup, uidx);
......@@ -1080,7 +1074,6 @@ int32_t filterAddUnitFromUnit(SFilterInfo *dst, SFilterInfo *src, SFilterUnit* u
filterAddField(dst, NULL, &data, FLD_TYPE_VALUE, &right, POINTER_BYTES, false); // POINTER_BYTES should be sizeof(SHashObj), but POINTER_BYTES is also right.
t = FILTER_GET_FIELD(dst, right);
FILTER_SET_FLAG(t->flag, FLD_DATA_IS_HASH);
} else {
filterAddField(dst, NULL, &data, FLD_TYPE_VALUE, &right, varDataTLen(data), false);
......@@ -1101,14 +1094,12 @@ int32_t filterAddUnitFromUnit(SFilterInfo *dst, SFilterInfo *src, SFilterUnit* u
int32_t filterAddUnitRight(SFilterInfo *info, uint8_t optr, SFilterFieldId *right, uint32_t uidx) {
SFilterUnit *u = &info->units[uidx];
u->compare.optr2 = optr;
u->right2 = *right;
return TSDB_CODE_SUCCESS;
}
int32_t filterAddGroupUnitFromCtx(SFilterInfo *dst, SFilterInfo *src, SFilterRangeCtx *ctx, uint32_t cidx, SFilterGroup *g, int32_t optr, SArray *res) {
SFilterFieldId left, right, right2;
uint32_t uidx = 0;
......@@ -1800,9 +1791,12 @@ int32_t fltInitValFieldData(SFilterInfo *info) {
if (dType->type == type) {
assignVal(fi->data, nodesGetValueFromNode(var), dType->bytes, type);
} else {
SScalarParam in = {.data = nodesGetValueFromNode(var), .num = 1, .type = dType->type, .bytes = dType->bytes};
SScalarParam out = {.data = fi->data, .num = 1, .type = type};
if (vectorConvertImpl(&in, &out)) {
SScalarParam in = {.columnData = calloc(1, sizeof(SColumnInfoData))};
SScalarParam out = {.columnData = calloc(1, sizeof(SColumnInfoData))};
out.columnData->info.type = type;
int32_t code = doConvertDataType(&in, &out, var);
if (code != TSDB_CODE_SUCCESS) {
qError("convert value to type[%d] failed", type);
return TSDB_CODE_TSC_INVALID_OPERATION;
}
......@@ -3676,18 +3670,18 @@ bool filterExecute(SFilterInfo *info, SSDataBlock *pSrc, int8_t** p, SColumnData
FLT_ERR_RET(scalarCalculate(info->sclCtx.node, pList, &output));
taosArrayDestroy(pList);
*p = output.orig.data;
output.orig.data = NULL;
sclFreeParam(&output);
int8_t *r = output.data;
for (int32_t i = 0; i < output.num; ++i) {
if (0 == *(r+i)) {
return false;
}
}
// TODO Fix it
// *p = output.orig.data;
// output.orig.data = NULL;
//
// sclFreeParam(&output);
//
// int8_t *r = output.data;
// for (int32_t i = 0; i < output.num; ++i) {
// if (0 == *(r+i)) {
// return false;
// }
// }
return true;
}
......
此差异已折叠。
......@@ -2,15 +2,15 @@
#include "sclvector.h"
static void assignBasicParaInfo(struct SScalarParam* dst, const struct SScalarParam* src) {
dst->type = src->type;
dst->bytes = src->bytes;
dst->num = src->num;
// dst->type = src->type;
// dst->bytes = src->bytes;
// dst->num = src->num;
}
static void tceil(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assignBasicParaInfo(pOutput, pLeft);
assert(numOfInput == 1);
#if 0
switch (pLeft->bytes) {
case TSDB_DATA_TYPE_FLOAT: {
float* p = (float*) pLeft->data;
......@@ -31,12 +31,14 @@ static void tceil(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *
default:
memcpy(pOutput->data, pLeft->data, pLeft->num* pLeft->bytes);
}
#endif
}
static void tfloor(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assignBasicParaInfo(pOutput, pLeft);
assert(numOfInput == 1);
#if 0
switch (pLeft->bytes) {
case TSDB_DATA_TYPE_FLOAT: {
float* p = (float*) pLeft->data;
......@@ -59,12 +61,13 @@ static void tfloor(SScalarParam* pOutput, size_t numOfInput, const SScalarParam
default:
memcpy(pOutput->data, pLeft->data, pLeft->num* pLeft->bytes);
}
#endif
}
static void _tabs(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assignBasicParaInfo(pOutput, pLeft);
assert(numOfInput == 1);
#if 0
switch (pLeft->bytes) {
case TSDB_DATA_TYPE_FLOAT: {
float* p = (float*) pLeft->data;
......@@ -117,12 +120,13 @@ static void _tabs(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *
default:
memcpy(pOutput->data, pLeft->data, pLeft->num* pLeft->bytes);
}
#endif
}
static void tround(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assignBasicParaInfo(pOutput, pLeft);
assert(numOfInput == 1);
#if 0
switch (pLeft->bytes) {
case TSDB_DATA_TYPE_FLOAT: {
float* p = (float*) pLeft->data;
......@@ -143,22 +147,24 @@ static void tround(SScalarParam* pOutput, size_t numOfInput, const SScalarParam
default:
memcpy(pOutput->data, pLeft->data, pLeft->num* pLeft->bytes);
}
#endif
}
static void tlength(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assert(numOfInput == 1);
#if 0
int64_t* out = (int64_t*) pOutput->data;
char* s = pLeft->data;
for(int32_t i = 0; i < pLeft->num; ++i) {
out[i] = varDataLen(POINTER_SHIFT(s, i * pLeft->bytes));
}
#endif
}
static void tconcat(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
assert(numOfInput > 0);
#if 0
int32_t rowLen = 0;
int32_t num = 1;
for(int32_t i = 0; i < numOfInput; ++i) {
......@@ -186,6 +192,7 @@ static void tconcat(SScalarParam* pOutput, size_t numOfInput, const SScalarParam
rstart += rowLen;
}
#endif
}
static void tltrim(SScalarParam* pOutput, size_t numOfInput, const SScalarParam *pLeft) {
......@@ -262,13 +269,12 @@ static void reverseCopy(char* dest, const char* src, int16_t type, int32_t numOf
}
static void setScalarFuncParam(SScalarParam* param, int32_t type, int32_t bytes, void* pInput, int32_t numOfRows) {
param->bytes = bytes;
param->type = type;
param->num = numOfRows;
param->data = pInput;
// param->bytes = bytes;
// param->type = type;
// param->num = numOfRows;
// param->data = pInput;
}
#if 0
int32_t evaluateExprNodeTree(tExprNode* pExprs, int32_t numOfRows, SScalarFuncParam* pOutput, void* param,
char* (*getSourceDataBlock)(void*, const char*, int32_t)) {
......
此差异已折叠。
......@@ -261,7 +261,7 @@ TEST(constantTest, bigint_add_bigint) {
ASSERT_EQ(nodeType(res), QUERY_NODE_VALUE);
SValueNode *v = (SValueNode *)res;
ASSERT_EQ(v->node.resType.type, TSDB_DATA_TYPE_DOUBLE);
ASSERT_EQ(v->datum.d, (scltLeftV + scltRightV));
ASSERT_FLOAT_EQ(v->datum.d, (scltLeftV + scltRightV));
nodesDestroyNode(res);
}
......@@ -277,7 +277,7 @@ TEST(constantTest, double_sub_bigint) {
ASSERT_EQ(nodeType(res), QUERY_NODE_VALUE);
SValueNode *v = (SValueNode *)res;
ASSERT_EQ(v->node.resType.type, TSDB_DATA_TYPE_DOUBLE);
ASSERT_EQ(v->datum.d, (scltLeftVd - scltRightV));
ASSERT_FLOAT_EQ(v->datum.d, (scltLeftVd - scltRightV));
nodesDestroyNode(res);
}
......@@ -959,7 +959,6 @@ TEST(columnTest, bigint_column_multi_binary_column) {
scltMakeColumnNode(&pRight, &src, TSDB_DATA_TYPE_BINARY, 5, rowNum, rightv);
scltMakeOpNode(&opNode, OP_TYPE_MULTI, TSDB_DATA_TYPE_DOUBLE, pLeft, pRight);
SArray *blockList = taosArrayInit(1, POINTER_BYTES);
taosArrayPush(blockList, &src);
SColumnInfo colInfo = createColumnInfo(1, TSDB_DATA_TYPE_DOUBLE, sizeof(double));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册