提交 45481da2 编写于 作者: S shenglian zhou

Merge branch 'szhou/feature/support-math-functions' of...

Merge branch 'szhou/feature/support-math-functions' of github.com:taosdata/TDengine into szhou/feature/support-math-functions
...@@ -583,10 +583,15 @@ pipeline { ...@@ -583,10 +583,15 @@ pipeline {
timeout(time: 55, unit: 'MINUTES'){ timeout(time: 55, unit: 'MINUTES'){
pre_test() pre_test()
sh ''' sh '''
cd ${WKC}/tests
./test-all.sh develop-test
'''
sh '''
date date
cd ${WKC}/tests cd ${WKC}/tests
./test-all.sh b6fq ./test-all.sh b6fq
date''' date'''
} }
} }
} }
...@@ -596,6 +601,10 @@ pipeline { ...@@ -596,6 +601,10 @@ pipeline {
timeout(time: 55, unit: 'MINUTES'){ timeout(time: 55, unit: 'MINUTES'){
pre_test() pre_test()
sh ''' sh '''
cd ${WKC}/tests
./test-all.sh system-test
'''
sh '''
date date
cd ${WKC}/tests cd ${WKC}/tests
./test-all.sh b7fq ./test-all.sh b7fq
......
...@@ -557,7 +557,7 @@ KILL STREAM <stream-id>; ...@@ -557,7 +557,7 @@ KILL STREAM <stream-id>;
TDengine启动后,会自动创建一个监测数据库log,并自动将服务器的CPU、内存、硬盘空间、带宽、请求数、磁盘读写速度、慢查询等信息定时写入该数据库。TDengine还将重要的系统操作(比如登录、创建、删除数据库等)日志以及各种错误报警信息记录下来存放在log库里。系统管理员可以从CLI直接查看这个数据库,也可以在WEB通过图形化界面查看这些监测信息。 TDengine启动后,会自动创建一个监测数据库log,并自动将服务器的CPU、内存、硬盘空间、带宽、请求数、磁盘读写速度、慢查询等信息定时写入该数据库。TDengine还将重要的系统操作(比如登录、创建、删除数据库等)日志以及各种错误报警信息记录下来存放在log库里。系统管理员可以从CLI直接查看这个数据库,也可以在WEB通过图形化界面查看这些监测信息。
这些监测信息的采集缺省是打开的,但可以修改配置文件里的选项enableMonitor将其关闭或打开。 这些监测信息的采集缺省是打开的,但可以修改配置文件里的选项monitor将其关闭或打开。
### TDinsight - 使用监控数据库 + Grafana 对 TDengine 进行监控的解决方案 ### TDinsight - 使用监控数据库 + Grafana 对 TDengine 进行监控的解决方案
......
...@@ -58,7 +58,6 @@ cp ${compile_dir}/../packaging/tools/startPre.sh ${pkg_dir}${install_home_pat ...@@ -58,7 +58,6 @@ cp ${compile_dir}/../packaging/tools/startPre.sh ${pkg_dir}${install_home_pat
cp ${compile_dir}/../packaging/tools/set_core.sh ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/../packaging/tools/set_core.sh ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/../packaging/tools/taosd-dump-cfg.gdb ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/../packaging/tools/taosd-dump-cfg.gdb ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
if [ -f "${compile_dir}/build/bin/taosadapter" ]; then if [ -f "${compile_dir}/build/bin/taosadapter" ]; then
......
...@@ -518,7 +518,7 @@ if [ "$osType" != "Darwin" ]; then ...@@ -518,7 +518,7 @@ if [ "$osType" != "Darwin" ]; then
cd ${top_dir}/src/kit/taos-tools/packaging/deb cd ${top_dir}/src/kit/taos-tools/packaging/deb
[ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0" [ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0"
taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g') taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g'|awk -F '-' '{print $1}')
${csudo} ./make-taos-tools-deb.sh ${top_dir} \ ${csudo} ./make-taos-tools-deb.sh ${top_dir} \
${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType} ${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType}
fi fi
...@@ -541,7 +541,7 @@ if [ "$osType" != "Darwin" ]; then ...@@ -541,7 +541,7 @@ if [ "$osType" != "Darwin" ]; then
cd ${top_dir}/src/kit/taos-tools/packaging/rpm cd ${top_dir}/src/kit/taos-tools/packaging/rpm
[ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0" [ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0"
taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g'|sed -e 's/-/_/g') taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g'|awk -F '-' '{print $1}'|sed -e 's/-/_/g')
${csudo} ./make-taos-tools-rpm.sh ${top_dir} \ ${csudo} ./make-taos-tools-rpm.sh ${top_dir} \
${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType} ${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType}
fi fi
......
...@@ -71,7 +71,6 @@ cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin ...@@ -71,7 +71,6 @@ cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin
if [ -f %{_compiledir}/build/bin/taosadapter ]; then if [ -f %{_compiledir}/build/bin/taosadapter ]; then
cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||: cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||:
fi fi
cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taosdef.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taosdef.h %{buildroot}%{homepath}/include
...@@ -196,7 +195,6 @@ if [ $1 -eq 0 ];then ...@@ -196,7 +195,6 @@ if [ $1 -eq 0 ];then
${csudo} rm -f ${bin_link_dir}/taos || : ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || : ${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/taosadapter || : ${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${cfg_link_dir}/* || : ${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${inc_link_dir}/taos.h || : ${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || : ${csudo} rm -f ${inc_link_dir}/taosdef.h || :
......
...@@ -887,8 +887,8 @@ function update_TDengine() { ...@@ -887,8 +887,8 @@ function update_TDengine() {
fi fi
tar -zxf taos.tar.gz tar -zxf taos.tar.gz
install_jemalloc install_jemalloc
install_avro lib #install_avro lib
install_avro lib64 #install_avro lib64
echo -e "${GREEN}Start to update TDengine...${NC}" echo -e "${GREEN}Start to update TDengine...${NC}"
# Stop the service if running # Stop the service if running
...@@ -1001,8 +1001,8 @@ function install_TDengine() { ...@@ -1001,8 +1001,8 @@ function install_TDengine() {
install_header install_header
install_lib install_lib
install_jemalloc install_jemalloc
install_avro lib #install_avro lib
install_avro lib64 #install_avro lib64
if [ "$pagMode" != "lite" ]; then if [ "$pagMode" != "lite" ]; then
install_connector install_connector
......
...@@ -45,8 +45,9 @@ if [ "$osType" != "Darwin" ]; then ...@@ -45,8 +45,9 @@ if [ "$osType" != "Darwin" ]; then
strip ${build_dir}/bin/taos strip ${build_dir}/bin/taos
bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh" bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh"
else else
bin_files="${build_dir}/bin/taos ${build_dir}/bin/taosdemo \ bin_files="${script_dir}/remove_client.sh \
${script_dir}/remove_client.sh ${script_dir}/set_core.sh ${script_dir}/get_client.sh ${script_dir}/taosd-dump-cfg.gdb" ${script_dir}/set_core.sh \
${script_dir}/get_client.sh ${script_dir}/taosd-dump-cfg.gdb"
fi fi
lib_files="${build_dir}/lib/libtaos.so.${version}" lib_files="${build_dir}/lib/libtaos.so.${version}"
else else
......
...@@ -31,23 +31,37 @@ else ...@@ -31,23 +31,37 @@ else
install_dir="${release_dir}/TDengine-server-${version}" install_dir="${release_dir}/TDengine-server-${version}"
fi fi
if [ -d ${top_dir}/src/kit/taos-tools/packaging/deb ]; then
cd ${top_dir}/src/kit/taos-tools/packaging/deb
[ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0"
taostools_ver=$(git describe --tags|sed -e 's/ver-//g'|awk -F '-' '{print $1}')
taostools_install_dir="${release_dir}/taos-tools-${taostools_ver}"
cd ${curr_dir}
else
taostools_install_dir="${release_dir}/taos-tools-${version}"
fi
# Directories and files # Directories and files
if [ "$pagMode" == "lite" ]; then if [ "$pagMode" == "lite" ]; then
strip ${build_dir}/bin/taosd strip ${build_dir}/bin/taosd
strip ${build_dir}/bin/taos strip ${build_dir}/bin/taos
# lite version doesn't include taosadapter, which will lead to no restful interface # lite version doesn't include taosadapter, which will lead to no restful interface
bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh" bin_files="${build_dir}/bin/taosd ${build_dir}/bin/taos ${script_dir}/remove.sh ${script_dir}/startPre.sh"
taostools_bin_files=""
else else
bin_files="${build_dir}/bin/taosd \ bin_files="${build_dir}/bin/taosd \
${build_dir}/bin/taos \ ${build_dir}/bin/taos \
${build_dir}/bin/taosadapter \ ${build_dir}/bin/taosadapter \
${build_dir}/bin/taosdump \
${build_dir}/bin/taosdemo \
${build_dir}/bin/tarbitrator\ ${build_dir}/bin/tarbitrator\
${script_dir}/remove.sh \ ${script_dir}/remove.sh \
${script_dir}/set_core.sh \ ${script_dir}/set_core.sh \
${script_dir}/startPre.sh \ ${script_dir}/startPre.sh \
${script_dir}/taosd-dump-cfg.gdb" ${script_dir}/taosd-dump-cfg.gdb"
taostools_bin_files=" ${build_dir}/bin/taosdump \
${build_dir}/bin/taosBenchmark"
fi fi
lib_files="${build_dir}/lib/libtaos.so.${version}" lib_files="${build_dir}/lib/libtaos.so.${version}"
...@@ -78,6 +92,7 @@ mkdir -p ${install_dir} ...@@ -78,6 +92,7 @@ mkdir -p ${install_dir}
mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc mkdir -p ${install_dir}/inc && cp ${header_files} ${install_dir}/inc
mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/taos.cfg ${install_dir}/cfg/taos.cfg
if [ -f "${compile_dir}/test/cfg/taosadapter.toml" ]; then if [ -f "${compile_dir}/test/cfg/taosadapter.toml" ]; then
cp ${compile_dir}/test/cfg/taosadapter.toml ${install_dir}/cfg || : cp ${compile_dir}/test/cfg/taosadapter.toml ${install_dir}/cfg || :
fi fi
...@@ -102,10 +117,29 @@ mkdir -p ${install_dir}/init.d && cp ${init_file_rpm} ${install_dir}/init.d/taos ...@@ -102,10 +117,29 @@ mkdir -p ${install_dir}/init.d && cp ${init_file_rpm} ${install_dir}/init.d/taos
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || : mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || :
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || : mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || :
if [ -f ${build_dir}/lib/libavro.so.23.0.0 ]; then if [ -n "${taostools_bin_files}" ]; then
mkdir -p ${install_dir}/avro/{lib,lib/pkgconfig} mkdir -p ${taostools_install_dir} || echo -e "failed to create ${taostools_install_dir}"
cp ${build_dir}/lib/libavro.* ${install_dir}/avro/lib mkdir -p ${taostools_install_dir}/bin \
cp ${build_dir}/lib/pkgconfig/avro-c.pc ${install_dir}/avro/lib/pkgconfig && cp ${taostools_bin_files} ${taostools_install_dir}/bin \
&& chmod a+x ${taostools_install_dir}/bin/* || :
[ -f ${taostools_install_dir}/bin/taosBenchmark ] && \
ln -sf ${taostools_install_dir}/bin/taosBenchmark \
${taostools_install_dir}/bin/taosdemo
if [ -f ${top_dir}/src/kit/taos-tools/packaging/tools/install-taostools.sh ]; then
cp ${top_dir}/src/kit/taos-tools/packaging/tools/install-taostools.sh \
${taostools_install_dir}/ > /dev/null \
&& chmod a+x {taostools_install_dir}/install-taostools.sh \
|| echo -e "failed to copy install-taostools.sh"
else
echo -e "install-taostools.sh not found"
fi
if [ -f ${build_dir}/lib/libavro.so.23.0.0 ]; then
mkdir -p ${taostools_install_dir}/avro/{lib,lib/pkgconfig} || echo -e "failed to create ${taostools_install_dir}/avro"
cp ${build_dir}/lib/libavro.* ${taostools_install_dir}/avro/lib
cp ${build_dir}/lib/pkgconfig/avro-c.pc ${taostools_install_dir}/avro/lib/pkgconfig
fi
fi fi
if [ -f ${build_dir}/bin/jemalloc-config ]; then if [ -f ${build_dir}/bin/jemalloc-config ]; then
...@@ -235,6 +269,8 @@ cd ${release_dir} ...@@ -235,6 +269,8 @@ cd ${release_dir}
# install_dir has been distinguishes cluster from edege, so comments this code # install_dir has been distinguishes cluster from edege, so comments this code
pkg_name=${install_dir}-${osType}-${cpuType} pkg_name=${install_dir}-${osType}-${cpuType}
taostools_pkg_name=${taostools_install_dir}-${osType}-${cpuType}
# if [ "$verMode" == "cluster" ]; then # if [ "$verMode" == "cluster" ]; then
# pkg_name=${install_dir}-${osType}-${cpuType} # pkg_name=${install_dir}-${osType}-${cpuType}
# elif [ "$verMode" == "edge" ]; then # elif [ "$verMode" == "edge" ]; then
...@@ -246,8 +282,10 @@ pkg_name=${install_dir}-${osType}-${cpuType} ...@@ -246,8 +282,10 @@ pkg_name=${install_dir}-${osType}-${cpuType}
if [[ "$verType" == "beta" ]] || [[ "$verType" == "preRelease" ]]; then if [[ "$verType" == "beta" ]] || [[ "$verType" == "preRelease" ]]; then
pkg_name=${install_dir}-${verType}-${osType}-${cpuType} pkg_name=${install_dir}-${verType}-${osType}-${cpuType}
taostools_pkg_name=${taostools_install_dir}-${verType}-${osType}-${cpuType}
elif [ "$verType" == "stable" ]; then elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name} pkg_name=${pkg_name}
taostools_pkg_name=${taostools_pkg_name}
else else
echo "unknow verType, nor stabel or beta" echo "unknow verType, nor stabel or beta"
exit 1 exit 1
...@@ -264,4 +302,13 @@ if [ "$exitcode" != "0" ]; then ...@@ -264,4 +302,13 @@ if [ "$exitcode" != "0" ]; then
exit $exitcode exit $exitcode
fi fi
if [ -n "${taostools_bin_files}" ]; then
tar -zcv -f "$(basename ${taostools_pkg_name}).tar.gz" $(basename ${taostools_install_dir}) --remove-files || :
exitcode=$?
if [ "$exitcode" != "0" ]; then
echo "tar ${taostools_pkg_name}.tar.gz error !!!"
exit $exitcode
fi
fi
cd ${curr_dir} cd ${curr_dir}
...@@ -27,6 +27,7 @@ install_nginxd_dir="/usr/local/nginxd" ...@@ -27,6 +27,7 @@ install_nginxd_dir="/usr/local/nginxd"
service_config_dir="/etc/systemd/system" service_config_dir="/etc/systemd/system"
taos_service_name="taosd" taos_service_name="taosd"
taosadapter_service_name="taosadapter"
tarbitrator_service_name="tarbitratord" tarbitrator_service_name="tarbitratord"
nginx_service_name="nginxd" nginx_service_name="nginxd"
csudo="" csudo=""
...@@ -112,14 +113,20 @@ function clean_log() { ...@@ -112,14 +113,20 @@ function clean_log() {
function clean_service_on_systemd() { function clean_service_on_systemd() {
taosd_service_config="${service_config_dir}/${taos_service_name}.service" taosd_service_config="${service_config_dir}/${taos_service_name}.service"
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet ${taos_service_name}; then if systemctl is-active --quiet ${taos_service_name}; then
echo "TDengine taosd is running, stopping it..." echo "TDengine taosd is running, stopping it..."
${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null ${csudo} systemctl stop ${taos_service_name} &> /dev/null || echo &> /dev/null
fi fi
${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null ${csudo} systemctl disable ${taos_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${taosd_service_config} ${csudo} rm -f ${taosd_service_config}
[ -f ${taosadapter_service_config} ] && ${sudo} rm -f ${taosadapter_service_config}
taosadapter_service_config="${service_config_dir}/taosadapter.service"
if systemctl is-active --quiet ${taosadapter_service_name}; then
echo "TDengine taosAdapter is running, stopping it..."
${csudo} systemctl stop ${taosadapter_service_name} &> /dev/null || echo &> /dev/null
fi
${csudo} systemctl disable ${taosadapter_service_name} &> /dev/null || echo &> /dev/null
[ -f ${taosadapter_service_config} ] && ${csudo} rm -f ${taosadapter_service_config}
tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service" tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service"
if systemctl is-active --quiet ${tarbitrator_service_name}; then if systemctl is-active --quiet ${tarbitrator_service_name}; then
......
...@@ -369,19 +369,28 @@ static int32_t handlePassword(SSqlCmd* pCmd, SStrToken* pPwd) { ...@@ -369,19 +369,28 @@ static int32_t handlePassword(SSqlCmd* pCmd, SStrToken* pPwd) {
// validate the out put field type for "UNION ALL" subclause // validate the out put field type for "UNION ALL" subclause
static int32_t normalizeVarDataTypeLength(SSqlCmd* pCmd) { static int32_t normalizeVarDataTypeLength(SSqlCmd* pCmd) {
const char* msg1 = "columns in select clause not identical"; const char* msg1 = "columns in select clause not identical";
const char* msg2 = "too many select clause siblings, at most 100 allowed";
int32_t siblings = 0;
int32_t diffSize = 0; int32_t diffSize = 0;
// if there is only one element, the limit of clause is the limit of global result. // if there is only one element, the limit of clause is the limit of global result.
SQueryInfo* pQueryInfo1 = pCmd->pQueryInfo; SQueryInfo* pQueryInfo1 = pCmd->pQueryInfo;
SQueryInfo* pSibling = pQueryInfo1->sibling; SQueryInfo* pSibling = pQueryInfo1->sibling;
// pQueryInfo1 itself
++siblings;
while(pSibling != NULL) { while(pSibling != NULL) {
int32_t ret = tscFieldInfoCompare(&pQueryInfo1->fieldsInfo, &pSibling->fieldsInfo, &diffSize); int32_t ret = tscFieldInfoCompare(&pQueryInfo1->fieldsInfo, &pSibling->fieldsInfo, &diffSize);
if (ret != 0) { if (ret != 0) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg1); return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg1);
} }
if (++siblings > 100) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg2);
}
pSibling = pSibling->sibling; pSibling = pSibling->sibling;
} }
...@@ -7096,17 +7105,15 @@ int32_t validateLimitNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSqlN ...@@ -7096,17 +7105,15 @@ int32_t validateLimitNode(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SSqlNode* pSqlN
// todo refactor // todo refactor
if (UTIL_TABLE_IS_SUPER_TABLE(pTableMetaInfo)) { if (UTIL_TABLE_IS_SUPER_TABLE(pTableMetaInfo)) {
if (!tscQueryTags(pQueryInfo)) { // local handle the super table tag query if (tscIsProjectionQueryOnSTable(pQueryInfo, 0)) {
if (tscIsProjectionQueryOnSTable(pQueryInfo, 0)) { if (pQueryInfo->slimit.limit > 0 || pQueryInfo->slimit.offset > 0) {
if (pQueryInfo->slimit.limit > 0 || pQueryInfo->slimit.offset > 0) { return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg2);
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg2); }
}
// for projection query on super table, all queries are subqueries // for projection query on super table, all queries are subqueries
if (tscNonOrderedProjectionQueryOnSTable(pQueryInfo, 0) && if (tscNonOrderedProjectionQueryOnSTable(pQueryInfo, 0) &&
!TSDB_QUERY_HAS_TYPE(pQueryInfo->type, TSDB_QUERY_TYPE_JOIN_QUERY)) { !TSDB_QUERY_HAS_TYPE(pQueryInfo->type, TSDB_QUERY_TYPE_JOIN_QUERY)) {
pQueryInfo->type |= TSDB_QUERY_TYPE_SUBQUERY; pQueryInfo->type |= TSDB_QUERY_TYPE_SUBQUERY;
}
} }
} }
...@@ -9351,7 +9358,7 @@ int32_t validateSqlNode(SSqlObj* pSql, SSqlNode* pSqlNode, SQueryInfo* pQueryInf ...@@ -9351,7 +9358,7 @@ int32_t validateSqlNode(SSqlObj* pSql, SSqlNode* pSqlNode, SQueryInfo* pQueryInf
const char* msg4 = "interval query not supported, since the result of sub query not include valid timestamp column"; const char* msg4 = "interval query not supported, since the result of sub query not include valid timestamp column";
const char* msg5 = "only tag query not compatible with normal column filter"; const char* msg5 = "only tag query not compatible with normal column filter";
const char* msg6 = "not support stddev/percentile/interp in the outer query yet"; const char* msg6 = "not support stddev/percentile/interp in the outer query yet";
const char* msg7 = "derivative/twa/irate requires timestamp column exists in subquery"; const char* msg7 = "derivative/twa/rate/irate/diff requires timestamp column exists in subquery";
const char* msg8 = "condition missing for join query"; const char* msg8 = "condition missing for join query";
const char* msg9 = "not support 3 level select"; const char* msg9 = "not support 3 level select";
...@@ -9436,7 +9443,8 @@ int32_t validateSqlNode(SSqlObj* pSql, SSqlNode* pSqlNode, SQueryInfo* pQueryInf ...@@ -9436,7 +9443,8 @@ int32_t validateSqlNode(SSqlObj* pSql, SSqlNode* pSqlNode, SQueryInfo* pQueryInf
SExprInfo* pExpr = tscExprGet(pQueryInfo, i); SExprInfo* pExpr = tscExprGet(pQueryInfo, i);
int32_t f = pExpr->base.functionId; int32_t f = pExpr->base.functionId;
if (f == TSDB_FUNC_DERIVATIVE || f == TSDB_FUNC_TWA || f == TSDB_FUNC_IRATE) { if (f == TSDB_FUNC_DERIVATIVE || f == TSDB_FUNC_TWA || f == TSDB_FUNC_IRATE ||
f == TSDB_FUNC_RATE || f == TSDB_FUNC_DIFF) {
return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg7); return invalidOperationMsg(tscGetErrorMsgPayload(pCmd), msg7);
} }
} }
......
...@@ -837,7 +837,11 @@ static int32_t serializeSqlExpr(SSqlExpr* pExpr, STableMetaInfo* pTableMetaInfo, ...@@ -837,7 +837,11 @@ static int32_t serializeSqlExpr(SSqlExpr* pExpr, STableMetaInfo* pTableMetaInfo,
return TSDB_CODE_TSC_INVALID_OPERATION; return TSDB_CODE_TSC_INVALID_OPERATION;
} }
assert(pExpr->resColId < 0); if (pExpr->resColId >= 0) {
tscError("result column id underflowed: %d", pExpr->resColId);
return TSDB_CODE_TSC_RES_TOO_MANY;
}
SSqlExpr* pSqlExpr = (SSqlExpr *)(*pMsg); SSqlExpr* pSqlExpr = (SSqlExpr *)(*pMsg);
SColIndex* pIndex = &pSqlExpr->colInfo; SColIndex* pIndex = &pSqlExpr->colInfo;
......
...@@ -448,13 +448,14 @@ typedef struct { ...@@ -448,13 +448,14 @@ typedef struct {
#define kvRowSetNCols(r, n) kvRowNCols(r) = (n) #define kvRowSetNCols(r, n) kvRowNCols(r) = (n)
#define kvRowColIdx(r) (SColIdx *)POINTER_SHIFT(r, TD_KV_ROW_HEAD_SIZE) #define kvRowColIdx(r) (SColIdx *)POINTER_SHIFT(r, TD_KV_ROW_HEAD_SIZE)
#define kvRowValues(r) POINTER_SHIFT(r, TD_KV_ROW_HEAD_SIZE + sizeof(SColIdx) * kvRowNCols(r)) #define kvRowValues(r) POINTER_SHIFT(r, TD_KV_ROW_HEAD_SIZE + sizeof(SColIdx) * kvRowNCols(r))
#define kvRowKeys(r) POINTER_SHIFT(r, *(uint16_t *)POINTER_SHIFT(r, TD_KV_ROW_HEAD_SIZE + sizeof(int16_t)))
#define kvRowCpy(dst, r) memcpy((dst), (r), kvRowLen(r)) #define kvRowCpy(dst, r) memcpy((dst), (r), kvRowLen(r))
#define kvRowColVal(r, colIdx) POINTER_SHIFT(kvRowValues(r), (colIdx)->offset) #define kvRowColVal(r, colIdx) POINTER_SHIFT(kvRowValues(r), (colIdx)->offset)
#define kvRowColIdxAt(r, i) (kvRowColIdx(r) + (i)) #define kvRowColIdxAt(r, i) (kvRowColIdx(r) + (i))
#define kvRowFree(r) tfree(r) #define kvRowFree(r) tfree(r)
#define kvRowEnd(r) POINTER_SHIFT(r, kvRowLen(r)) #define kvRowEnd(r) POINTER_SHIFT(r, kvRowLen(r))
#define kvRowValLen(r) (kvRowLen(r) - TD_KV_ROW_HEAD_SIZE - sizeof(SColIdx) * kvRowNCols(r)) #define kvRowValLen(r) (kvRowLen(r) - TD_KV_ROW_HEAD_SIZE - sizeof(SColIdx) * kvRowNCols(r))
#define kvRowTKey(r) (*(TKEY *)(kvRowValues(r))) #define kvRowTKey(r) (*(TKEY *)(kvRowKeys(r)))
#define kvRowKey(r) tdGetKey(kvRowTKey(r)) #define kvRowKey(r) tdGetKey(kvRowTKey(r))
#define kvRowDeleted(r) TKEY_IS_DELETED(kvRowTKey(r)) #define kvRowDeleted(r) TKEY_IS_DELETED(kvRowTKey(r))
...@@ -652,7 +653,7 @@ static FORCE_INLINE char *memRowEnd(SMemRow row) { ...@@ -652,7 +653,7 @@ static FORCE_INLINE char *memRowEnd(SMemRow row) {
#define memRowKvVersion(r) (*(int16_t *)POINTER_SHIFT(r, TD_MEM_ROW_TYPE_SIZE)) #define memRowKvVersion(r) (*(int16_t *)POINTER_SHIFT(r, TD_MEM_ROW_TYPE_SIZE))
#define memRowVersion(r) (isDataRow(r) ? memRowDataVersion(r) : memRowKvVersion(r)) // schema version #define memRowVersion(r) (isDataRow(r) ? memRowDataVersion(r) : memRowKvVersion(r)) // schema version
#define memRowSetKvVersion(r, v) (memRowKvVersion(r) = (v)) #define memRowSetKvVersion(r, v) (memRowKvVersion(r) = (v))
#define memRowTuple(r) (isDataRow(r) ? dataRowTuple(memRowDataBody(r)) : kvRowValues(memRowKvBody(r))) #define memRowKeys(r) (isDataRow(r) ? dataRowTuple(memRowDataBody(r)) : kvRowKeys(memRowKvBody(r)))
#define memRowTKey(r) (isDataRow(r) ? dataRowTKey(memRowDataBody(r)) : kvRowTKey(memRowKvBody(r))) #define memRowTKey(r) (isDataRow(r) ? dataRowTKey(memRowDataBody(r)) : kvRowTKey(memRowKvBody(r)))
#define memRowKey(r) (isDataRow(r) ? dataRowKey(memRowDataBody(r)) : kvRowKey(memRowKvBody(r))) #define memRowKey(r) (isDataRow(r) ? dataRowKey(memRowDataBody(r)) : kvRowKey(memRowKvBody(r)))
......
...@@ -9,15 +9,11 @@ import java.sql.SQLException; ...@@ -9,15 +9,11 @@ import java.sql.SQLException;
import java.sql.Statement; import java.sql.Statement;
/** /**
* @author huolibo@qq.com * this class is an extension of {@link Statement}. e.g.:
* @version v1.0.0
* @JDK: 1.8
* @description: this class is an extension of {@link Statement}. use like:
* Statement statement = conn.createStatement(); * Statement statement = conn.createStatement();
* SchemalessStatement schemalessStatement = new SchemalessStatement(statement); * SchemalessStatement schemalessStatement = new SchemalessStatement(statement);
* schemalessStatement.execute(sql); * schemalessStatement.execute(sql);
* schemalessStatement.executeSchemaless(lines, SchemalessProtocolType, SchemalessTimestampType); * schemalessStatement.insert(lines, SchemalessProtocolType, SchemalessTimestampType);
* @since 2021-11-03 17:10
*/ */
public class SchemalessStatement extends AbstractStatementWrapper { public class SchemalessStatement extends AbstractStatementWrapper {
public SchemalessStatement(Statement statement) { public SchemalessStatement(Statement statement) {
...@@ -27,12 +23,12 @@ public class SchemalessStatement extends AbstractStatementWrapper { ...@@ -27,12 +23,12 @@ public class SchemalessStatement extends AbstractStatementWrapper {
/** /**
* batch insert schemaless lines * batch insert schemaless lines
* *
* @param lines schemaless data * @param lines schemaless lines
* @param protocolType schemaless type {@link SchemalessProtocolType} * @param protocolType schemaless type {@link SchemalessProtocolType}
* @param timestampType Time precision {@link SchemalessTimestampType} * @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception * @throws SQLException execute insert exception
*/ */
public void executeSchemaless(String[] lines, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException { public void insert(String[] lines, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
Connection connection = this.getConnection(); Connection connection = this.getConnection();
if (connection instanceof TSDBConnection) { if (connection instanceof TSDBConnection) {
TSDBConnection tsdbConnection = (TSDBConnection) connection; TSDBConnection tsdbConnection = (TSDBConnection) connection;
...@@ -52,7 +48,7 @@ public class SchemalessStatement extends AbstractStatementWrapper { ...@@ -52,7 +48,7 @@ public class SchemalessStatement extends AbstractStatementWrapper {
* @param timestampType Time precision {@link SchemalessTimestampType} * @param timestampType Time precision {@link SchemalessTimestampType}
* @throws SQLException execute insert exception * @throws SQLException execute insert exception
*/ */
public void executeSchemaless(String line, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException { public void insert(String line, SchemalessProtocolType protocolType, SchemalessTimestampType timestampType) throws SQLException {
executeSchemaless(new String[]{line}, protocolType, timestampType); insert(new String[]{line}, protocolType, timestampType);
} }
} }
...@@ -29,7 +29,7 @@ public class SchemalessInsertTest { ...@@ -29,7 +29,7 @@ public class SchemalessInsertTest {
// when // when
try (Statement statement = conn.createStatement(); try (Statement statement = conn.createStatement();
SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) { SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) {
schemalessStatement.executeSchemaless(lines, SchemalessProtocolType.LINE, SchemalessTimestampType.NANO_SECONDS); schemalessStatement.insert(lines, SchemalessProtocolType.LINE, SchemalessTimestampType.NANO_SECONDS);
} }
// then // then
...@@ -64,7 +64,7 @@ public class SchemalessInsertTest { ...@@ -64,7 +64,7 @@ public class SchemalessInsertTest {
// when // when
try (Statement statement = conn.createStatement(); try (Statement statement = conn.createStatement();
SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) { SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) {
schemalessStatement.executeSchemaless(lines, SchemalessProtocolType.TELNET, SchemalessTimestampType.NOT_CONFIGURED); schemalessStatement.insert(lines, SchemalessProtocolType.TELNET, SchemalessTimestampType.NOT_CONFIGURED);
} }
// then // then
...@@ -116,7 +116,7 @@ public class SchemalessInsertTest { ...@@ -116,7 +116,7 @@ public class SchemalessInsertTest {
// when // when
try (Statement statement = conn.createStatement(); try (Statement statement = conn.createStatement();
SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) { SchemalessStatement schemalessStatement = new SchemalessStatement(statement)) {
schemalessStatement.executeSchemaless(json, SchemalessProtocolType.JSON, SchemalessTimestampType.NOT_CONFIGURED); schemalessStatement.insert(json, SchemalessProtocolType.JSON, SchemalessTimestampType.NOT_CONFIGURED);
} }
// then // then
......
...@@ -75,45 +75,46 @@ int32_t* taosGetErrno(); ...@@ -75,45 +75,46 @@ int32_t* taosGetErrno();
#define TSDB_CODE_REF_NOT_EXIST TAOS_DEF_ERROR_CODE(0, 0x010A) //"Ref is not there" #define TSDB_CODE_REF_NOT_EXIST TAOS_DEF_ERROR_CODE(0, 0x010A) //"Ref is not there"
//client //client
#define TSDB_CODE_TSC_INVALID_OPERATION TAOS_DEF_ERROR_CODE(0, 0x0200) //"Invalid Operation" #define TSDB_CODE_TSC_INVALID_OPERATION TAOS_DEF_ERROR_CODE(0, 0x0200) //"Invalid Operation")
#define TSDB_CODE_TSC_INVALID_QHANDLE TAOS_DEF_ERROR_CODE(0, 0x0201) //"Invalid qhandle" #define TSDB_CODE_TSC_INVALID_QHANDLE TAOS_DEF_ERROR_CODE(0, 0x0201) //"Invalid qhandle")
#define TSDB_CODE_TSC_INVALID_TIME_STAMP TAOS_DEF_ERROR_CODE(0, 0x0202) //"Invalid combination of client/service time" #define TSDB_CODE_TSC_INVALID_TIME_STAMP TAOS_DEF_ERROR_CODE(0, 0x0202) //"Invalid combination of client/service time")
#define TSDB_CODE_TSC_INVALID_VALUE TAOS_DEF_ERROR_CODE(0, 0x0203) //"Invalid value in client" #define TSDB_CODE_TSC_INVALID_VALUE TAOS_DEF_ERROR_CODE(0, 0x0203) //"Invalid value in client")
#define TSDB_CODE_TSC_INVALID_VERSION TAOS_DEF_ERROR_CODE(0, 0x0204) //"Invalid client version" #define TSDB_CODE_TSC_INVALID_VERSION TAOS_DEF_ERROR_CODE(0, 0x0204) //"Invalid client version")
#define TSDB_CODE_TSC_INVALID_IE TAOS_DEF_ERROR_CODE(0, 0x0205) //"Invalid client ie" #define TSDB_CODE_TSC_INVALID_IE TAOS_DEF_ERROR_CODE(0, 0x0205) //"Invalid client ie")
#define TSDB_CODE_TSC_INVALID_FQDN TAOS_DEF_ERROR_CODE(0, 0x0206) //"Invalid host name" #define TSDB_CODE_TSC_INVALID_FQDN TAOS_DEF_ERROR_CODE(0, 0x0206) //"Invalid host name")
#define TSDB_CODE_TSC_INVALID_USER_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0207) //"Invalid user name" #define TSDB_CODE_TSC_INVALID_USER_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0207) //"Invalid user name")
#define TSDB_CODE_TSC_INVALID_PASS_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0208) //"Invalid password" #define TSDB_CODE_TSC_INVALID_PASS_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0208) //"Invalid password")
#define TSDB_CODE_TSC_INVALID_DB_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0209) //"Database name too long" #define TSDB_CODE_TSC_INVALID_DB_LENGTH TAOS_DEF_ERROR_CODE(0, 0x0209) //"Database name too long")
#define TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH TAOS_DEF_ERROR_CODE(0, 0x020A) //"Table name too long" #define TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH TAOS_DEF_ERROR_CODE(0, 0x020A) //"Table name too long")
#define TSDB_CODE_TSC_INVALID_CONNECTION TAOS_DEF_ERROR_CODE(0, 0x020B) //"Invalid connection" #define TSDB_CODE_TSC_INVALID_CONNECTION TAOS_DEF_ERROR_CODE(0, 0x020B) //"Invalid connection")
#define TSDB_CODE_TSC_OUT_OF_MEMORY TAOS_DEF_ERROR_CODE(0, 0x020C) //"System out of memory" #define TSDB_CODE_TSC_OUT_OF_MEMORY TAOS_DEF_ERROR_CODE(0, 0x020C) //"System out of memory")
#define TSDB_CODE_TSC_NO_DISKSPACE TAOS_DEF_ERROR_CODE(0, 0x020D) //"System out of disk space" #define TSDB_CODE_TSC_NO_DISKSPACE TAOS_DEF_ERROR_CODE(0, 0x020D) //"System out of disk space")
#define TSDB_CODE_TSC_QUERY_CACHE_ERASED TAOS_DEF_ERROR_CODE(0, 0x020E) //"Query cache erased" #define TSDB_CODE_TSC_QUERY_CACHE_ERASED TAOS_DEF_ERROR_CODE(0, 0x020E) //"Query cache erased")
#define TSDB_CODE_TSC_QUERY_CANCELLED TAOS_DEF_ERROR_CODE(0, 0x020F) //"Query terminated" #define TSDB_CODE_TSC_QUERY_CANCELLED TAOS_DEF_ERROR_CODE(0, 0x020F) //"Query terminated")
#define TSDB_CODE_TSC_SORTED_RES_TOO_MANY TAOS_DEF_ERROR_CODE(0, 0x0210) //"Result set too large to be sorted" // too many result for ordered super table projection query #define TSDB_CODE_TSC_SORTED_RES_TOO_MANY TAOS_DEF_ERROR_CODE(0, 0x0210) //"Result set too large to be sorted") // too many result for ordered super table projection query
#define TSDB_CODE_TSC_APP_ERROR TAOS_DEF_ERROR_CODE(0, 0x0211) //"Application error" #define TSDB_CODE_TSC_APP_ERROR TAOS_DEF_ERROR_CODE(0, 0x0211) //"Application error")
#define TSDB_CODE_TSC_ACTION_IN_PROGRESS TAOS_DEF_ERROR_CODE(0, 0x0212) //"Action in progress" #define TSDB_CODE_TSC_ACTION_IN_PROGRESS TAOS_DEF_ERROR_CODE(0, 0x0212) //"Action in progress")
#define TSDB_CODE_TSC_DISCONNECTED TAOS_DEF_ERROR_CODE(0, 0x0213) //"Disconnected from service" #define TSDB_CODE_TSC_DISCONNECTED TAOS_DEF_ERROR_CODE(0, 0x0213) //"Disconnected from service")
#define TSDB_CODE_TSC_NO_WRITE_AUTH TAOS_DEF_ERROR_CODE(0, 0x0214) //"No write permission" #define TSDB_CODE_TSC_NO_WRITE_AUTH TAOS_DEF_ERROR_CODE(0, 0x0214) //"No write permission")
#define TSDB_CODE_TSC_CONN_KILLED TAOS_DEF_ERROR_CODE(0, 0x0215) //"Connection killed" #define TSDB_CODE_TSC_CONN_KILLED TAOS_DEF_ERROR_CODE(0, 0x0215) //"Connection killed")
#define TSDB_CODE_TSC_SQL_SYNTAX_ERROR TAOS_DEF_ERROR_CODE(0, 0x0216) //"Syntax error in SQL" #define TSDB_CODE_TSC_SQL_SYNTAX_ERROR TAOS_DEF_ERROR_CODE(0, 0x0216) //"Syntax error in SQL")
#define TSDB_CODE_TSC_DB_NOT_SELECTED TAOS_DEF_ERROR_CODE(0, 0x0217) //"Database not specified or available" #define TSDB_CODE_TSC_DB_NOT_SELECTED TAOS_DEF_ERROR_CODE(0, 0x0217) //"Database not specified or available")
#define TSDB_CODE_TSC_INVALID_TABLE_NAME TAOS_DEF_ERROR_CODE(0, 0x0218) //"Table does not exist" #define TSDB_CODE_TSC_INVALID_TABLE_NAME TAOS_DEF_ERROR_CODE(0, 0x0218) //"Table does not exist")
#define TSDB_CODE_TSC_EXCEED_SQL_LIMIT TAOS_DEF_ERROR_CODE(0, 0x0219) //"SQL statement too long check maxSQLLength config" #define TSDB_CODE_TSC_EXCEED_SQL_LIMIT TAOS_DEF_ERROR_CODE(0, 0x0219) //"SQL statement too long check maxSQLLength config")
#define TSDB_CODE_TSC_FILE_EMPTY TAOS_DEF_ERROR_CODE(0, 0x021A) //"File is empty" #define TSDB_CODE_TSC_FILE_EMPTY TAOS_DEF_ERROR_CODE(0, 0x021A) //"File is empty")
#define TSDB_CODE_TSC_LINE_SYNTAX_ERROR TAOS_DEF_ERROR_CODE(0, 0x021B) //"Syntax error in Line" #define TSDB_CODE_TSC_LINE_SYNTAX_ERROR TAOS_DEF_ERROR_CODE(0, 0x021B) //"Syntax error in Line")
#define TSDB_CODE_TSC_NO_META_CACHED TAOS_DEF_ERROR_CODE(0, 0x021C) //"No table meta cached" #define TSDB_CODE_TSC_NO_META_CACHED TAOS_DEF_ERROR_CODE(0, 0x021C) //"No table meta cached")
#define TSDB_CODE_TSC_DUP_COL_NAMES TAOS_DEF_ERROR_CODE(0, 0x021D) //"duplicated column names" #define TSDB_CODE_TSC_DUP_COL_NAMES TAOS_DEF_ERROR_CODE(0, 0x021D) //"duplicated column names")
#define TSDB_CODE_TSC_INVALID_TAG_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021E) //"Invalid tag length" #define TSDB_CODE_TSC_INVALID_TAG_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021E) //"Invalid tag length")
#define TSDB_CODE_TSC_INVALID_COLUMN_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021F) //"Invalid column length" #define TSDB_CODE_TSC_INVALID_COLUMN_LENGTH TAOS_DEF_ERROR_CODE(0, 0x021F) //"Invalid column length")
#define TSDB_CODE_TSC_DUP_TAG_NAMES TAOS_DEF_ERROR_CODE(0, 0x0220) //"duplicated tag names" #define TSDB_CODE_TSC_DUP_TAG_NAMES TAOS_DEF_ERROR_CODE(0, 0x0220) //"duplicated tag names")
#define TSDB_CODE_TSC_INVALID_JSON TAOS_DEF_ERROR_CODE(0, 0x0221) //"Invalid JSON format" #define TSDB_CODE_TSC_INVALID_JSON TAOS_DEF_ERROR_CODE(0, 0x0221) //"Invalid JSON format")
#define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type" #define TSDB_CODE_TSC_INVALID_JSON_TYPE TAOS_DEF_ERROR_CODE(0, 0x0222) //"Invalid JSON data type")
#define TSDB_CODE_TSC_INVALID_JSON_CONFIG TAOS_DEF_ERROR_CODE(0, 0x0223) //"Invalid JSON configuration" #define TSDB_CODE_TSC_INVALID_JSON_CONFIG TAOS_DEF_ERROR_CODE(0, 0x0223) //"Invalid JSON configuration")
#define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0224) //"Value out of range" #define TSDB_CODE_TSC_VALUE_OUT_OF_RANGE TAOS_DEF_ERROR_CODE(0, 0x0224) //"Value out of range")
#define TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE TAOS_DEF_ERROR_CODE(0, 0x0225) //"Invalid line protocol type" #define TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE TAOS_DEF_ERROR_CODE(0, 0x0225) //"Invalid line protocol type")
#define TSDB_CODE_TSC_INVALID_PRECISION_TYPE TAOS_DEF_ERROR_CODE(0, 0x0226) //"Invalid timestamp precision type" #define TSDB_CODE_TSC_INVALID_PRECISION_TYPE TAOS_DEF_ERROR_CODE(0, 0x0226) //"Invalid timestamp precision type")
#define TSDB_CODE_TSC_RES_TOO_MANY TAOS_DEF_ERROR_CODE(0, 0x0227) //"Result set too large to be output")
// mnode // mnode
#define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed" #define TSDB_CODE_MND_MSG_NOT_PROCESSED TAOS_DEF_ERROR_CODE(0, 0x0300) //"Message not processed"
......
Subproject commit b76b5a76756a5c6530ba1d418de51fd336ae23b1 Subproject commit 02837ce9e257aec65ba2995c6bebd72952f710a2
...@@ -1106,11 +1106,11 @@ static void minMax_function(SQLFunctionCtx *pCtx, char *pOutput, int32_t isMin, ...@@ -1106,11 +1106,11 @@ static void minMax_function(SQLFunctionCtx *pCtx, char *pOutput, int32_t isMin,
if ((*retVal < pData[i]) ^ isMin) { if ((*retVal < pData[i]) ^ isMin) {
*retVal = pData[i]; *retVal = pData[i];
TSKEY k = tsList[i]; if(tsList) {
TSKEY k = tsList[i];
DO_UPDATE_TAG_COLUMNS(pCtx, k); DO_UPDATE_TAG_COLUMNS(pCtx, k);
}
} }
*notNullElems += 1; *notNullElems += 1;
} }
#if defined(_DEBUG_VIEW) #if defined(_DEBUG_VIEW)
......
...@@ -564,7 +564,7 @@ static void tsdbFreeTableData(STableData *pTableData) { ...@@ -564,7 +564,7 @@ static void tsdbFreeTableData(STableData *pTableData) {
} }
} }
static char *tsdbGetTsTupleKey(const void *data) { return memRowTuple((SMemRow)data); } static char *tsdbGetTsTupleKey(const void *data) { return memRowKeys((SMemRow)data); }
static int tsdbAdjustMemMaxTables(SMemTable *pMemTable, int maxTables) { static int tsdbAdjustMemMaxTables(SMemTable *pMemTable, int maxTables) {
ASSERT(pMemTable->maxTables < maxTables); ASSERT(pMemTable->maxTables < maxTables);
......
...@@ -122,6 +122,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON_CONFIG, "Invalid JSON configur ...@@ -122,6 +122,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_JSON_CONFIG, "Invalid JSON configur
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_VALUE_OUT_OF_RANGE, "Value out of range") TAOS_DEFINE_ERROR(TSDB_CODE_TSC_VALUE_OUT_OF_RANGE, "Value out of range")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE, "Invalid line protocol type") TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PROTOCOL_TYPE, "Invalid line protocol type")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PRECISION_TYPE, "Invalid timestamp precision type") TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PRECISION_TYPE, "Invalid timestamp precision type")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_RES_TOO_MANY, "Result set too large to be output")
// mnode // mnode
TAOS_DEFINE_ERROR(TSDB_CODE_MND_MSG_NOT_PROCESSED, "Message not processed") TAOS_DEFINE_ERROR(TSDB_CODE_MND_MSG_NOT_PROCESSED, "Message not processed")
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
from util.log import *
from util.cases import *
from util.sql import *
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def run(self):
tdSql.prepare()
ret = tdSql.execute('create table tb (ts timestamp, speed int)')
insertRows = 10
tdLog.info("insert %d rows" % (insertRows))
for i in range(0, insertRows):
ret = tdSql.execute(
'insert into tb values (now + %dm, %d)' %
(i, i))
tdLog.info("insert earlier data")
tdSql.execute('insert into tb values (now - 5m , 10)')
tdSql.execute('insert into tb values (now - 6m , 10)')
tdSql.execute('insert into tb values (now - 7m , 10)')
tdSql.execute('insert into tb values (now - 8m , 10)')
tdSql.query("select * from tb")
tdSql.checkRows(insertRows + 4)
# test case for https://jira.taosdata.com:18080/browse/TD-3716:
tdSql.error("insert into tb(now, 1)")
# test case for TD-10717
tdSql.error("insert into tb values(now,1),,(now+1s,1)")
tdSql.execute("insert into tb values(now+2s,1),(now+3s,1),(now+4s,1)")
tdSql.query("select * from tb")
tdSql.checkRows(insertRows + 4 +3)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
python3 test.py -f 1-insert/0-sql/basic.py
\ No newline at end of file
#!/usr/bin/python
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# install pip
# pip install src/connector/python/
# -*- coding: utf-8 -*-
import sys
import getopt
import subprocess
import time
from distutils.log import warn as printf
from fabric2 import Connection
sys.path.append("../pytest")
from util.log import *
from util.dnodes import *
from util.cases import *
import taos
if __name__ == "__main__":
fileName = "all"
deployPath = ""
masterIp = ""
testCluster = False
valgrind = 0
logSql = True
stop = 0
restart = False
windows = 0
opts, args = getopt.gnu_getopt(sys.argv[1:], 'f:p:m:l:scghrw', [
'file=', 'path=', 'master', 'logSql', 'stop', 'cluster', 'valgrind', 'help', 'windows'])
for key, value in opts:
if key in ['-h', '--help']:
tdLog.printNoPrefix(
'A collection of test cases written using Python')
tdLog.printNoPrefix('-f Name of test case file written by Python')
tdLog.printNoPrefix('-p Deploy Path for Simulator')
tdLog.printNoPrefix('-m Master Ip for Simulator')
tdLog.printNoPrefix('-l <True:False> logSql Flag')
tdLog.printNoPrefix('-s stop All dnodes')
tdLog.printNoPrefix('-c Test Cluster Flag')
tdLog.printNoPrefix('-g valgrind Test Flag')
tdLog.printNoPrefix('-r taosd restart test')
tdLog.printNoPrefix('-w taos on windows')
sys.exit(0)
if key in ['-r', '--restart']:
restart = True
if key in ['-f', '--file']:
fileName = value
if key in ['-p', '--path']:
deployPath = value
if key in ['-m', '--master']:
masterIp = value
if key in ['-l', '--logSql']:
if (value.upper() == "TRUE"):
logSql = True
elif (value.upper() == "FALSE"):
logSql = False
else:
tdLog.printNoPrefix("logSql value %s is invalid" % logSql)
sys.exit(0)
if key in ['-c', '--cluster']:
testCluster = True
if key in ['-g', '--valgrind']:
valgrind = 1
if key in ['-s', '--stop']:
stop = 1
if key in ['-w', '--windows']:
windows = 1
if (stop != 0):
if (valgrind == 0):
toBeKilled = "taosd"
else:
toBeKilled = "valgrind.bin"
killCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}' | xargs kill -TERM > /dev/null 2>&1" % toBeKilled
psCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}'" % toBeKilled
processID = subprocess.check_output(psCmd, shell=True)
while(processID):
os.system(killCmd)
time.sleep(1)
processID = subprocess.check_output(psCmd, shell=True)
for port in range(6030, 6041):
usePortPID = "lsof -i tcp:%d | grep LISTEn | awk '{print $2}'" % port
processID = subprocess.check_output(usePortPID, shell=True)
if processID:
killCmd = "kill -TERM %s" % processID
os.system(killCmd)
fuserCmd = "fuser -k -n tcp %d" % port
os.system(fuserCmd)
if valgrind:
time.sleep(2)
tdLog.info('stop All dnodes')
if masterIp == "":
host = '127.0.0.1'
else:
host = masterIp
tdLog.info("Procedures for tdengine deployed in %s" % (host))
if windows:
tdCases.logSql(logSql)
tdLog.info("Procedures for testing self-deployment")
td_clinet = TDSimClient("C:\\TDengine")
td_clinet.deploy()
remote_conn = Connection("root@%s"%host)
with remote_conn.cd('/var/lib/jenkins/workspace/TDinternal/community/tests/pytest'):
remote_conn.run("python3 ./test.py")
conn = taos.connect(
host="%s"%(host),
config=td_clinet.cfgDir)
tdCases.runOneWindows(conn, fileName)
else:
tdDnodes.init(deployPath)
tdDnodes.setTestCluster(testCluster)
tdDnodes.setValgrind(valgrind)
tdDnodes.stopAll()
is_test_framework = 0
key_word = 'tdCases.addLinux'
try:
if key_word in open(fileName).read():
is_test_framework = 1
except:
pass
if is_test_framework:
moduleName = fileName.replace(".py", "").replace("/", ".")
uModule = importlib.import_module(moduleName)
try:
ucase = uModule.TDTestCase()
tdDnodes.deploy(1,ucase.updatecfgDict)
except :
tdDnodes.deploy(1,{})
else:
pass
tdDnodes.deploy(1,{})
tdDnodes.start(1)
tdCases.logSql(logSql)
if testCluster:
tdLog.info("Procedures for testing cluster")
if fileName == "all":
tdCases.runAllCluster()
else:
tdCases.runOneCluster(fileName)
else:
tdLog.info("Procedures for testing self-deployment")
conn = taos.connect(
host,
config=tdDnodes.getSimCfgPath())
if fileName == "all":
tdCases.runAllLinux(conn)
else:
tdCases.runOneWindows(conn, fileName)
if restart:
if fileName == "all":
tdLog.info("not need to query ")
else:
sp = fileName.rsplit(".", 1)
if len(sp) == 2 and sp[1] == "py":
tdDnodes.stopAll()
tdDnodes.start(1)
time.sleep(1)
conn = taos.connect( host, config=tdDnodes.getSimCfgPath())
tdLog.info("Procedures for tdengine deployed in %s" % (host))
tdLog.info("query test after taosd restart")
tdCases.runOneLinux(conn, sp[0] + "_" + "restart.py")
else:
tdLog.info("not need to query")
conn.close()
...@@ -29,6 +29,7 @@ python3 ./test.py -f insert/in_function.py ...@@ -29,6 +29,7 @@ python3 ./test.py -f insert/in_function.py
python3 ./test.py -f insert/modify_column.py python3 ./test.py -f insert/modify_column.py
#python3 ./test.py -f insert/line_insert.py #python3 ./test.py -f insert/line_insert.py
python3 ./test.py -f insert/specialSql.py python3 ./test.py -f insert/specialSql.py
python3 ./test.py -f insert/timestamp.py
# timezone # timezone
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
from util.log import *
from util.cases import *
from util.sql import *
from util.dnodes import *
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
self.ts = 1607281690000
def run(self):
tdSql.prepare()
# TS-806
tdLog.info("test case for TS-806")
# Case 1
tdSql.execute("create table t1(ts timestamp, c1 int)")
tdSql.execute("insert into t1(c1, ts) values(1, %d)" % self.ts)
tdSql.query("select * from t1")
tdSql.checkRows(1)
# Case 2
tdSql.execute("insert into t1(c1, ts) values(2, %d)(3, %d)" % (self.ts + 1000, self.ts + 2000))
tdSql.query("select * from t1")
tdSql.checkRows(3)
# Case 3
tdSql.execute("create table t2(ts timestamp, c1 timestamp)")
tdSql.execute(" insert into t2(c1, ts) values(%d, %d)" % (self.ts, self.ts + 5000))
tdSql.query("select * from t2")
tdSql.checkRows(1)
tdSql.execute(" insert into t2(c1, ts) values(%d, %d)(%d, %d)" % (self.ts, self.ts + 6000, self.ts + 3000, self.ts + 8000))
tdSql.query("select * from t2")
tdSql.checkRows(3)
# Case 4
tdSql.execute("create table stb(ts timestamp, c1 int, c2 binary(20)) tags(tstag timestamp, t1 int)")
tdSql.execute("insert into tb1(c2, ts, c1) using stb(t1, tstag) tags(1, now) values('test', %d, 1)" % self.ts)
tdSql.query("select * from stb")
tdSql.checkRows(1)
# Case 5
tdSql.execute("insert into tb1(c2, ts, c1) using stb(t1, tstag) tags(1, now) values('test', now, 1) tb2(c1, ts) using stb tags(now + 2m, 1000) values(1, now - 1h)")
tdSql.query("select * from stb")
tdSql.checkRows(3)
tdSql.execute(" insert into tb1(c2, ts, c1) using stb(t1, tstag) tags(1, now) values('test', now + 10s, 1) tb2(c1, ts) using stb(tstag) tags(now + 2m) values(1, now - 3h)(2, now - 2h)")
tdSql.query("select * from stb")
tdSql.checkRows(6)
# Case 6
tdSql.execute("create table stb2 (ts timestamp, c1 timestamp, c2 timestamp) tags(t1 timestamp, t2 timestamp)")
tdSql.execute(" insert into tb4(c1, c2, ts) using stb2(t2, t1) tags(now, now + 1h) values(now + 1s, now + 2s, now + 3s)(now -1s, now - 2s, now - 3s) tb5(c2, ts, c1) using stb2(t2) tags(now + 1h) values(now, now, now)")
tdSql.query("select * from stb2")
tdSql.checkRows(3)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
...@@ -132,6 +132,25 @@ class TDTestCase: ...@@ -132,6 +132,25 @@ class TDTestCase:
tdSql.error("select top(tbcol1, 12) from st order by ts, tbcol1") tdSql.error("select top(tbcol1, 12) from st order by ts, tbcol1")
tdSql.error("select top(tbcol1, 2) from st1 group by tbcol1 order by tbcol2") tdSql.error("select top(tbcol1, 2) from st1 group by tbcol1 order by tbcol2")
fun_list = ['avg','count','twa','sum','stddev','leastsquares','min',
'max','first','last','top','bottom','percentile','apercentile',
'last_row','diff','spread','distinct']
key = ['tbol','tagcol']
for i in range(1,15):
for k in key:
for j in fun_list:
if j == 'leastsquares':
pick_func=j+'('+ k + str(i) +',1,1)'
elif j == 'top' or j == 'bottom' : continue
elif j == 'percentile' or j == 'apercentile':
pick_func=j+'('+ k + str(i) +',1)'
else:
pick_func=j+'('+ k + str(i) +')'
sql = 'select %s from st group by %s order by %s' % (pick_func , k+str(i), k+str(i))
tdSql.error(sql)
sql = 'select %s from st6 group by %s order by %s ' % (pick_func , k+str(i), k+str(i))
tdSql.error(sql)
tdSql.query("select top(tbcol1, 2) from st1 group by tbcol2 order by tbcol2") tdSql.query("select top(tbcol1, 2) from st1 group by tbcol2 order by tbcol2")
tdSql.query("select top(tbcol1, 12) from st order by tbcol1, ts") tdSql.query("select top(tbcol1, 12) from st order by tbcol1, ts")
......
...@@ -103,10 +103,27 @@ class TDTestCase: ...@@ -103,10 +103,27 @@ class TDTestCase:
select count(*) as count, loc from st where ts between 1600000000000 and 1600000000010 group by loc''') select count(*) as count, loc from st where ts between 1600000000000 and 1600000000010 group by loc''')
tdSql.checkRows(6) tdSql.checkRows(6)
# https://jira.taosdata.com:18080/browse/TS-715
tdLog.info("test case for TS-715")
sql = ""
tdSql.execute("create table st2(ts timestamp, c1 int, c2 int, c3 int) tags(loc nchar(20))")
for i in range(101):
if i == 0:
sql = "select last(*) from sub0 "
else:
sql += f"union all select last(*) from sub{i} "
tdSql.execute("create table sub%d using st2 tags('nchar%d')" % (i, i))
tdSql.execute("insert into sub%d values(%d, %d, %d, %d)" % (i, self.ts + i, i, i, i))
tdSql.error(sql)
def stop(self): def stop(self):
tdSql.close() tdSql.close()
tdLog.success("%s successfully executed" % __file__) tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase()) tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase()) tdCases.addLinux(__file__, TDTestCase())
\ No newline at end of file
...@@ -419,12 +419,14 @@ if $data03 != @20-09-15 00:00:00.000@ then ...@@ -419,12 +419,14 @@ if $data03 != @20-09-15 00:00:00.000@ then
return -1 return -1
endi endi
sql select diff(val) from (select c1 val from nest_tb0); sql_error select diff(val) from (select c1 val from nest_tb0);
sql select diff(val) from (select ts,c1 val from nest_tb0);
if $rows != 9999 then if $rows != 9999 then
return -1 return -1
endi endi
if $data00 != @70-01-01 08:00:00.000@ then if $data00 != @20-09-15 00:01:00.000@ then
return -1 return -1
endi endi
......
...@@ -31,6 +31,8 @@ $tsu = $tsu + $ts0 ...@@ -31,6 +31,8 @@ $tsu = $tsu + $ts0
#sql_error select top(c1, 1) from $stb where ts >= $ts0 and ts <= $tsu slimit 5 offset 1 #sql_error select top(c1, 1) from $stb where ts >= $ts0 and ts <= $tsu slimit 5 offset 1
#sql_error select bottom(c1, 1) from $stb where ts >= $ts0 and ts <= $tsu slimit 5 offset 1 #sql_error select bottom(c1, 1) from $stb where ts >= $ts0 and ts <= $tsu slimit 5 offset 1
sql_error select t1 from $stb slimit 5 offset 1;
### select from stb + group by + slimit offset ### select from stb + group by + slimit offset
sql select max(c1), min(c2), avg(c3), sum(c4), spread(c5), sum(c6), count(c7), first(c8), last(c9) from $stb group by t1 slimit 5 soffset 0 sql select max(c1), min(c2), avg(c3), sum(c4), spread(c5), sum(c6), count(c7), first(c8), last(c9) from $stb group by t1 slimit 5 soffset 0
if $rows != 5 then if $rows != 5 then
......
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
from util.log import *
from util.cases import *
from util.sql import *
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def run(self):
tdSql.prepare()
ret = tdSql.execute('create table tb (ts timestamp, speed int)')
insertRows = 10
tdLog.info("insert %d rows" % (insertRows))
for i in range(0, insertRows):
ret = tdSql.execute(
'insert into tb values (now + %dm, %d)' %
(i, i))
tdLog.info("insert earlier data")
tdSql.execute('insert into tb values (now - 5m , 10)')
tdSql.execute('insert into tb values (now - 6m , 10)')
tdSql.execute('insert into tb values (now - 7m , 10)')
tdSql.execute('insert into tb values (now - 8m , 10)')
tdSql.query("select * from tb")
tdSql.checkRows(insertRows + 4)
# test case for https://jira.taosdata.com:18080/browse/TD-3716:
tdSql.error("insert into tb(now, 1)")
# test case for TD-10717
tdSql.error("insert into tb values(now,1),,(now+1s,1)")
tdSql.execute("insert into tb values(now+2s,1),(now+3s,1),(now+4s,1)")
tdSql.query("select * from tb")
tdSql.checkRows(insertRows + 4 +3)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())
python3 test.py -f 1-insert/0-sql/basic.py
\ No newline at end of file
#!/usr/bin/python
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# install pip
# pip install src/connector/python/
# -*- coding: utf-8 -*-
import sys
import getopt
import subprocess
import time
from distutils.log import warn as printf
from fabric2 import Connection
sys.path.append("../pytest")
from util.log import *
from util.dnodes import *
from util.cases import *
import taos
if __name__ == "__main__":
fileName = "all"
deployPath = ""
masterIp = ""
testCluster = False
valgrind = 0
logSql = True
stop = 0
restart = False
windows = 0
opts, args = getopt.gnu_getopt(sys.argv[1:], 'f:p:m:l:scghrw', [
'file=', 'path=', 'master', 'logSql', 'stop', 'cluster', 'valgrind', 'help', 'windows'])
for key, value in opts:
if key in ['-h', '--help']:
tdLog.printNoPrefix(
'A collection of test cases written using Python')
tdLog.printNoPrefix('-f Name of test case file written by Python')
tdLog.printNoPrefix('-p Deploy Path for Simulator')
tdLog.printNoPrefix('-m Master Ip for Simulator')
tdLog.printNoPrefix('-l <True:False> logSql Flag')
tdLog.printNoPrefix('-s stop All dnodes')
tdLog.printNoPrefix('-c Test Cluster Flag')
tdLog.printNoPrefix('-g valgrind Test Flag')
tdLog.printNoPrefix('-r taosd restart test')
tdLog.printNoPrefix('-w taos on windows')
sys.exit(0)
if key in ['-r', '--restart']:
restart = True
if key in ['-f', '--file']:
fileName = value
if key in ['-p', '--path']:
deployPath = value
if key in ['-m', '--master']:
masterIp = value
if key in ['-l', '--logSql']:
if (value.upper() == "TRUE"):
logSql = True
elif (value.upper() == "FALSE"):
logSql = False
else:
tdLog.printNoPrefix("logSql value %s is invalid" % logSql)
sys.exit(0)
if key in ['-c', '--cluster']:
testCluster = True
if key in ['-g', '--valgrind']:
valgrind = 1
if key in ['-s', '--stop']:
stop = 1
if key in ['-w', '--windows']:
windows = 1
if (stop != 0):
if (valgrind == 0):
toBeKilled = "taosd"
else:
toBeKilled = "valgrind.bin"
killCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}' | xargs kill -TERM > /dev/null 2>&1" % toBeKilled
psCmd = "ps -ef|grep -w %s| grep -v grep | awk '{print $2}'" % toBeKilled
processID = subprocess.check_output(psCmd, shell=True)
while(processID):
os.system(killCmd)
time.sleep(1)
processID = subprocess.check_output(psCmd, shell=True)
for port in range(6030, 6041):
usePortPID = "lsof -i tcp:%d | grep LISTEn | awk '{print $2}'" % port
processID = subprocess.check_output(usePortPID, shell=True)
if processID:
killCmd = "kill -TERM %s" % processID
os.system(killCmd)
fuserCmd = "fuser -k -n tcp %d" % port
os.system(fuserCmd)
if valgrind:
time.sleep(2)
tdLog.info('stop All dnodes')
if masterIp == "":
host = '127.0.0.1'
else:
host = masterIp
tdLog.info("Procedures for tdengine deployed in %s" % (host))
if windows:
tdCases.logSql(logSql)
tdLog.info("Procedures for testing self-deployment")
td_clinet = TDSimClient("C:\\TDengine")
td_clinet.deploy()
remote_conn = Connection("root@%s"%host)
with remote_conn.cd('/var/lib/jenkins/workspace/TDinternal/community/tests/pytest'):
remote_conn.run("python3 ./test.py")
conn = taos.connect(
host="%s"%(host),
config=td_clinet.cfgDir)
tdCases.runOneWindows(conn, fileName)
else:
tdDnodes.init(deployPath)
tdDnodes.setTestCluster(testCluster)
tdDnodes.setValgrind(valgrind)
tdDnodes.stopAll()
is_test_framework = 0
key_word = 'tdCases.addLinux'
try:
if key_word in open(fileName).read():
is_test_framework = 1
except:
pass
if is_test_framework:
moduleName = fileName.replace(".py", "").replace("/", ".")
uModule = importlib.import_module(moduleName)
try:
ucase = uModule.TDTestCase()
tdDnodes.deploy(1,ucase.updatecfgDict)
except :
tdDnodes.deploy(1,{})
else:
pass
tdDnodes.deploy(1,{})
tdDnodes.start(1)
tdCases.logSql(logSql)
if testCluster:
tdLog.info("Procedures for testing cluster")
if fileName == "all":
tdCases.runAllCluster()
else:
tdCases.runOneCluster(fileName)
else:
tdLog.info("Procedures for testing self-deployment")
conn = taos.connect(
host,
config=tdDnodes.getSimCfgPath())
if fileName == "all":
tdCases.runAllLinux(conn)
else:
tdCases.runOneWindows(conn, fileName)
if restart:
if fileName == "all":
tdLog.info("not need to query ")
else:
sp = fileName.rsplit(".", 1)
if len(sp) == 2 and sp[1] == "py":
tdDnodes.stopAll()
tdDnodes.start(1)
time.sleep(1)
conn = taos.connect( host, config=tdDnodes.getSimCfgPath())
tdLog.info("Procedures for tdengine deployed in %s" % (host))
tdLog.info("query test after taosd restart")
tdCases.runOneLinux(conn, sp[0] + "_" + "restart.py")
else:
tdLog.info("not need to query")
conn.close()
...@@ -158,7 +158,13 @@ function runPyCaseOneByOne { ...@@ -158,7 +158,13 @@ function runPyCaseOneByOne {
} }
function runPyCaseOneByOnefq() { function runPyCaseOneByOnefq() {
cd $tests_dir/pytest if [[ $3 =~ system ]] ; then
cd $tests_dir/system-test
elif [[ $3 =~ develop ]] ; then
cd $tests_dir/develop-test
else
cd $tests_dir/pytest
fi
if [[ $1 =~ full ]] ; then if [[ $1 =~ full ]] ; then
start=1 start=1
end=`sed -n '$=' fulltest.sh` end=`sed -n '$=' fulltest.sh`
...@@ -361,6 +367,12 @@ if [ "$2" != "sim" ] && [ "$2" != "jdbc" ] && [ "$2" != "unit" ] && [ "$2" != " ...@@ -361,6 +367,12 @@ if [ "$2" != "sim" ] && [ "$2" != "jdbc" ] && [ "$2" != "unit" ] && [ "$2" != "
elif [ "$1" == "p4" ]; then elif [ "$1" == "p4" ]; then
echo "### run Python_4 test ###" echo "### run Python_4 test ###"
runPyCaseOneByOnefq p4 1 runPyCaseOneByOnefq p4 1
elif [ "$1" == "system-test" ]; then
echo "### run system-test test ###"
runPyCaseOneByOnefq full 1 system
elif [ "$1" == "develop-test" ]; then
echo "### run develop-test test ###"
runPyCaseOneByOnefq full 1 develop
elif [ "$1" == "b2" ] || [ "$1" == "b3" ]; then elif [ "$1" == "b2" ] || [ "$1" == "b3" ]; then
exit $(($totalFailed + $totalPyFailed)) exit $(($totalFailed + $totalPyFailed))
elif [ "$1" == "smoke" ] || [ -z "$1" ]; then elif [ "$1" == "smoke" ] || [ -z "$1" ]; then
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册