提交 26b362bc 编写于 作者: D dapan1121

Merge branch 'develop' into feature/TD-6140

......@@ -16,9 +16,9 @@
[submodule "deps/TSZ"]
path = deps/TSZ
url = https://github.com/taosdata/TSZ.git
[submodule "deps/avro"]
path = deps/avro
url = https://github.com/apache/avro
[submodule "src/kit/taos-tools"]
path = src/kit/taos-tools
url = https://github.com/taosdata/taos-tools
[submodule "src/plugins/taosadapter"]
path = src/plugins/taosadapter
url = https://github.com/taosdata/taosadapter
......@@ -34,7 +34,7 @@ At the moment, TDengine only supports building and running on Linux systems. You
To build TDengine, use [CMake](https://cmake.org/) 3.0.2 or higher versions in the project directory.
## Install tools
## Install build dependencies
### Ubuntu 16.04 and above & Debian:
```bash
......@@ -58,6 +58,12 @@ To install Apache Maven:
sudo apt-get install -y maven
```
#### Install build dependencies for taos-tools
To build the [taos-tools](https://github.com/taosdata/taos-tools) on Ubuntu/Debian, the following packages need to be installed.
```bash
sudo apt install libjansson-dev libsnappy-dev liblzma-dev libz-dev pkg-config
```
### Centos 7:
```bash
sudo yum install epel-release
......@@ -91,6 +97,12 @@ To install Apache Maven:
sudo dnf install -y maven
```
#### Install build dependencies for taos-tools
To build the [taos-tools](https://github.com/taosdata/taos-tools) on CentOS, the following packages need to be installed.
```bash
sudo yum install xz-devel snappy-devel jansson-devel pkgconfig libatomic
```
### Setup golang environment
TDengine includes few components developed by Go language. Please refer to golang.org official documentation for golang environment setup.
......@@ -108,7 +120,7 @@ git clone https://github.com/taosdata/TDengine.git
cd TDengine
```
The connectors for go & grafana have been moved to separated repositories,
The connectors for go & grafana and some tools have been moved to separated repositories,
so you should run this command in the TDengine directory to install them:
```bash
git submodule update --init --recursive
......
......@@ -160,6 +160,32 @@ IF (TD_BUILD_HTTP)
ADD_DEFINITIONS(-DHTTP_EMBEDDED)
ENDIF ()
IF ("${BUILD_TOOLS}" STREQUAL "")
IF (TD_LINUX)
IF (TD_ARM_32)
SET(BUILD_TOOLS "false")
ELSEIF (TD_ARM_64)
SET(BUILD_TOOLS "false")
ELSE ()
SET(BUILD_TOOLS "true")
ENDIF ()
ELSEIF (TD_DARWIN)
SET(BUILD_TOOLS "false")
ELSE ()
SET(BUILD_TOOLS "false")
ENDIF ()
ENDIF ()
IF ("${BUILD_TOOLS}" MATCHES "false")
MESSAGE("${Yellow} Will _not_ build taos_tools! ${ColourReset}")
SET(TD_TAOS_TOOLS FALSE)
ELSE ()
MESSAGE("")
MESSAGE("${Green} Will build taos_tools! ${ColourReset}")
MESSAGE("")
SET(TD_TAOS_TOOLS TRUE)
ENDIF ()
IF (${BUILD_LUA} MATCHES "false")
SET(TD_BUILD_LUA FALSE)
ENDIF ()
......@@ -167,18 +193,10 @@ ENDIF ()
IF (TD_BUILD_LUA)
MESSAGE("Enable lua")
ADD_DEFINITIONS(-DLUA_EMBEDDED)
SET(LINK_LUA "lua")
ELSE ()
MESSAGE("Disable lua")
ENDIF ()
IF ("${AVRO_SUPPORT}" MATCHES "true")
SET(TD_AVRO_SUPPORT TRUE)
ELSEIF ("${AVRO_SUPPORT}" MATCHES "false")
SET(TD_AVRO_SUPPORT FALSE)
ENDIF ()
IF (TD_AVRO_SUPPORT)
ADD_DEFINITIONS(-DAVRO_SUPPORT)
SET(LINK_LUA "")
ENDIF ()
IF (TD_LINUX)
......
......@@ -98,9 +98,9 @@ ENDIF ()
SET(TD_BUILD_HTTP FALSE)
SET(TD_BUILD_LUA TRUE)
SET(TD_TAOS_TOOLS TRUE)
SET(TD_AVRO_SUPPORT FALSE)
SET(TD_BUILD_LUA TRUE)
SET(TD_MEMORY_SANITIZER FALSE)
IF (${MEMORY_SANITIZER} MATCHES "true")
......
......@@ -28,30 +28,6 @@ IF (TD_DARWIN AND TD_MQTT)
ADD_SUBDIRECTORY(MQTT-C)
ENDIF ()
IF (TD_AVRO_SUPPORT)
MESSAGE("")
MESSAGE("${Green} ENABLE avro format support ${ColourReset}")
MESSAGE("")
include(ExternalProject)
ExternalProject_Add(
apache-avro
PREFIX "avro"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c
BUILD_IN_SOURCE 1
PATCH_COMMAND
COMMAND git clean -f -d
COMMAND sed -i.bak -e "/TARGETS avroappend/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avrocat/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avromod/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
COMMAND sed -i.bak -e "/TARGETS avropipe/d" ${CMAKE_CURRENT_SOURCE_DIR}/avro/lang/c/src/CMakeLists.txt
CONFIGURE_COMMAND cmake -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}/build
)
ELSE ()
MESSAGE("")
MESSAGE("${Yellow} NO avro format support ${ColourReset}")
MESSAGE("")
ENDIF ()
IF (TD_LINUX_64 AND JEMALLOC_ENABLED)
MESSAGE("")
MESSAGE("${Green} ENABLE jemalloc ${ColourReset}")
......
Subproject commit a1fce29d9675b4dd95dfee9db32cc505d0b2227c
......@@ -110,7 +110,8 @@ TDengine是一个高效的存储、查询、分析时序大数据的平台,专
* [用户管理](/administrator#user):添加、删除TDengine用户,修改用户密码
* [数据导入](/administrator#import):可按脚本文件导入,也可按数据文件导入
* [数据导出](/administrator#export):从shell按表导出,也可用taosdump工具做各种导出
* [系统监控](/administrator#status):检查系统现有的连接、查询、流式计算,日志和事件等
* [系统连接、任务查询管理](/administrator#status):检查系统现有的连接、查询、流式计算,日志和事件等
* [系统监控](/administrator#monitoring):系统监控,使用TDinsight进行集群监控等
* [性能优化](/administrator#optimize):对长期运行的系统进行维护优化,保障性能表现
* [文件目录结构](/administrator#directories):TDengine数据文件、配置文件等所在目录
* [参数限制与保留关键字](/administrator#keywords):TDengine的参数限制与保留关键字列表
......
......@@ -553,7 +553,7 @@ KILL STREAM <stream-id>;
强制关闭流式计算,其中的中stream-id是SHOW STREAMS中显示的connection-id:stream-no字串,如103:2,拷贝粘贴即可。
## 系统监控
## <a class="anchor" id="monitoring"></a>系统监控
TDengine启动后,会自动创建一个监测数据库log,并自动将服务器的CPU、内存、硬盘空间、带宽、请求数、磁盘读写速度、慢查询等信息定时写入该数据库。TDengine还将重要的系统操作(比如登录、创建、删除数据库等)日志以及各种错误报警信息记录下来存放在log库里。系统管理员可以从CLI直接查看这个数据库,也可以在WEB通过图形化界面查看这些监测信息。
......
......@@ -1603,3 +1603,18 @@ TAOS SQL 支持对标签、TBNAME 进行 GROUP BY 操作,也支持普通列进
IS NOT NULL 支持所有类型的列。不为空的表达式为 <>"",仅对非数值类型的列适用。
## 表(列)名合法性说明
TDengine 中的表(列)名命名规则如下:
只能由字母、数字、下划线构成,数字不能在首位,长度不能超过192字节,不区分大小写。
转移后表(列)名规则:
为了兼容支持更多形式的表(列)名,TDengine 引入新的转义符 "`"。可用让表名与关键词不冲突,同时不受限于上述表名称合法性约束检查。
转义后的表(列)名同样受到长度限制要求,且长度计算的时候不计算转义符。使用转义字符以后,不再对转义字符中的内容进行大小写统一。
例如:
\`aBc\` 和 \`abc\` 是不同的表(列)名,但是 abc 和 aBc 是相同的表(列)名。
需要注意的是转义字符中的内容必须是可打印字符。
支持版本
支持转义符的功能从 2.3.0.1 版本开始。
......@@ -106,7 +106,8 @@ TDengine is a highly efficient platform to store, query, and analyze time-series
- [User Management](/administrator#user): add/delete TDengine users, modify user password
- [Import Data](/administrator#import): import data into TDengine from either script or CSV file
- [Export Data](/administrator#export): export data either from TDengine shell or from the taosdump tool
- [System Monitor](/administrator#status): monitor the system connections, queries, streaming calculation, logs, and events
- [System Connection and Task Query Management](/administrator#status): show the system connections, queries, streaming calculation and others
- [System Monitor](/administrator#monitoring): monitor TDengine cluster with log database and TDinsight.
- [File Directory Structure](/administrator#directories): directories where TDengine data files and configuration files located
- [Parameter Limitss and Reserved Keywords](/administrator#keywords): TDengine’s list of parameter limits and reserved keywords
......
......@@ -400,7 +400,7 @@ KILL STREAM <stream-id>;
Force to turn off the stream computing, in which stream-id is the connection-id: stream-no string displayed in SHOW STREAMS, such as 103: 2, copy and paste it.
## System Monitoring
## <a class="anchor" id="monitoring"></a>System Monitoring
After TDengine is started, it will automatically create a monitoring database log and write the server's CPU, memory, hard disk space, bandwidth, number of requests, disk read-write speed, slow query and other information into the database regularly. TDengine also records important system operations (such as logging in, creating, deleting databases, etc.) logs and various error alarm information and stores them in the log database. The system administrator can view the database directly from CLI or view the monitoring information through GUI on WEB.
......
......@@ -142,11 +142,11 @@ function check_main_path() {
function check_bin_path() {
# check install bin dir and all sub dir
bin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "remove.sh" "tarbitrator" "set_core.sh")
bin_dir=("taos" "taosd" "taosadapter" "taosdemo" "remove.sh" "tarbitrator" "set_core.sh")
for i in "${bin_dir[@]}";do
check_file ${sbin_dir} $i
done
lbin_dir=("taos" "taosd" "taosadapter" "taosdemo" "taosdump" "rmtaos" "tarbitrator" "set_core")
lbin_dir=("taos" "taosd" "taosadapter" "taosdemo" "rmtaos" "tarbitrator" "set_core")
for i in "${lbin_dir[@]}";do
check_link ${bin_link_dir}/$i
done
......@@ -170,7 +170,7 @@ function check_lib_path() {
function check_header_path() {
# check all header
header_dir=("taos.h" "taoserror.h")
header_dir=("taos.h" "taosdef.h" "taoserror.h")
for i in "${header_dir[@]}";do
check_link ${inc_link_dir}/$i
done
......
......@@ -27,7 +27,6 @@ else
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || :
......
......@@ -59,7 +59,6 @@ cp ${compile_dir}/../packaging/tools/set_core.sh ${pkg_dir}${install_home_pat
cp ${compile_dir}/../packaging/tools/taosd-dump-cfg.gdb ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosdemo ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
if [ -f "${compile_dir}/build/bin/taosadapter" ]; then
......@@ -69,6 +68,7 @@ fi
cp ${compile_dir}/build/bin/taos ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_path}/driver
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../src/inc/taosdef.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
......@@ -78,16 +78,6 @@ cp ${compile_dir}/build/lib/taos-jdbcdriver*.* ${pkg_dir}${install_home_path}/c
install_user_local_path="/usr/local"
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
mkdir -p ${pkg_dir}${install_user_local_path}/lib
cp ${compile_dir}/build/lib/libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/
ln -sf libavro.so.23.0.0 ${pkg_dir}${install_user_local_path}/lib/libavro.so.23
ln -sf libavro.so.23 ${pkg_dir}${install_user_local_path}/lib/libavro.so
fi
if [ -f ${compile_dir}/build/lib/libavro.a ]; then
cp ${compile_dir}/build/lib/libavro.a ${pkg_dir}${install_user_local_path}/lib/
fi
if [ -f ${compile_dir}/build/bin/jemalloc-config ]; then
mkdir -p ${pkg_dir}${install_user_local_path}/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
cp ${compile_dir}/build/bin/jemalloc-config ${pkg_dir}${install_user_local_path}/bin/
......@@ -128,13 +118,7 @@ chmod 755 ${pkg_dir}/DEBIAN/*
debver="Version: "$tdengine_ver
sed -i "2c$debver" ${pkg_dir}/DEBIAN/control
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak "s/#Depends: no/Depends: libjansson4, libsnappy1v5/g" ${pkg_dir}/DEBIAN/control
fi
#get taos version, then set deb name
if [ "$verMode" == "cluster" ]; then
debname="TDengine-server-"${tdengine_ver}-${osType}-${cpuType}
elif [ "$verMode" == "edge" ]; then
......@@ -153,13 +137,11 @@ else
exit 1
fi
# make deb package
dpkg -b ${pkg_dir} $debname
echo "make deb package success!"
cp ${pkg_dir}/*.deb ${output_dir}
# clean tmep dir
# clean temp dir
rm -rf ${pkg_dir}
......@@ -7,19 +7,19 @@
# chkconfig: 2345 99 01
#
### BEGIN INIT INFO
# Provides: TDEngine
# Provides: TDengine
# Required-Start: $local_fs $network $syslog
# Required-Stop: $local_fs $network $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Starts TDEngine taosd
# Description: Starts TDEngine taosd, a time-series database engine
# Short-Description: Starts TDengine taosd
# Description: Starts TDengine taosd, a time-series database engine
### END INIT INFO
set -e
PATH="/bin:/usr/bin:/sbin:/usr/sbin"
NAME="TDEngine"
NAME="TDengine"
USER="root"
GROUP="root"
DAEMON="/usr/local/taos/bin/taosd"
......@@ -40,7 +40,7 @@ MAX_OPEN_FILES=65535
case "$1" in
start)
log_action_begin_msg "Starting TDEngine..."
log_action_begin_msg "Starting TDengine..."
$DAEMON_HTTPD &
if start-stop-daemon --test --start --chuid "$USER:$GROUP" --background --make-pidfile --pidfile "$PID_FILE" --exec "$DAEMON" -- $APPARGS &> /dev/null; then
......@@ -57,7 +57,7 @@ case "$1" in
;;
stop)
log_action_begin_msg "Stopping TDEngine..."
log_action_begin_msg "Stopping TDengine..."
pkill -9 $DAEMON_HTTPD_NAME
set +e
if [ -f "$PID_FILE" ]; then
......@@ -66,12 +66,12 @@ case "$1" in
log_action_cont_msg "TSD is not running but pid file exists, cleaning up"
elif [ $? -eq 3 ]; then
PID="`cat $PID_FILE`"
log_failure_msg "Failed to stop TDEngine (pid $PID)"
log_failure_msg "Failed to stop TDengine (pid $PID)"
exit 1
fi
rm -f "$PID_FILE"
else
log_action_cont_msg "TDEngine was not running"
log_action_cont_msg "TDengine was not running"
fi
log_action_end_msg 0
set -e
......
......@@ -408,13 +408,17 @@ fi
echo "build ${pagMode} package ..."
if [[ "$pagMode" == "lite" ]]; then
BUILD_HTTP=true
BUILD_TOOLS=false
else
BUILD_HTTP=false
BUILD_TOOLS=true
fi
# check support cpu type
if [[ "$cpuType" == "x64" ]] || [[ "$cpuType" == "aarch64" ]] || [[ "$cpuType" == "aarch32" ]] || [[ "$cpuType" == "mips64" ]] ; then
if [ "$verMode" != "cluster" ]; then
# community-version compile
cmake ../ -DCPUTYPE=${cpuType} -DOSTYPE=${osType} -DSOMODE=${soMode} -DDBNAME=${dbName} -DVERTYPE=${verType} -DVERDATE="${build_time}" -DGITINFO=${gitinfo} -DGITINFOI=${gitinfoOfInternal} -DVERNUMBER=${verNumber} -DVERCOMPATIBLE=${verNumberComp} -DPAGMODE=${pagMode} -DBUILD_HTTP=${BUILD_HTTP} ${allocator_macro}
cmake ../ -DCPUTYPE=${cpuType} -DOSTYPE=${osType} -DSOMODE=${soMode} -DDBNAME=${dbName} -DVERTYPE=${verType} -DVERDATE="${build_time}" -DGITINFO=${gitinfo} -DGITINFOI=${gitinfoOfInternal} -DVERNUMBER=${verNumber} -DVERCOMPATIBLE=${verNumberComp} -DPAGMODE=${pagMode} -DBUILD_HTTP=${BUILD_HTTP} -DBUILD_TOOLS=${BUILD_TOOLS} ${allocator_macro}
else
# enterprise-version compile
if [[ "$dbName" == "power" ]]; then
......@@ -473,7 +477,7 @@ if [[ "$cpuType" == "x64" ]] || [[ "$cpuType" == "aarch64" ]] || [[ "$cpuType" =
#sed -i "s/taos\.cfg/taos\.cfg/g" ${top_dir}/../enterprise/src/plugins/module/src/moduleMain.c
fi
cmake ../../ -DCPUTYPE=${cpuType} -DOSTYPE=${osType} -DSOMODE=${soMode} -DDBNAME=${dbName} -DVERTYPE=${verType} -DVERDATE="${build_time}" -DGITINFO=${gitinfo} -DGITINFOI=${gitinfoOfInternal} -DVERNUMBER=${verNumber} -DVERCOMPATIBLE=${verNumberComp} ${allocator_macro}
cmake ../../ -DCPUTYPE=${cpuType} -DOSTYPE=${osType} -DSOMODE=${soMode} -DDBNAME=${dbName} -DVERTYPE=${verType} -DVERDATE="${build_time}" -DGITINFO=${gitinfo} -DGITINFOI=${gitinfoOfInternal} -DVERNUMBER=${verNumber} -DVERCOMPATIBLE=${verNumberComp} -DBUILD_HTTP=${BUILD_HTTP} -DBUILD_TOOLS=${BUILD_TOOLS} ${allocator_macro}
fi
else
echo "input cpuType=${cpuType} error!!!"
......@@ -482,9 +486,9 @@ fi
if [[ "$allocator" == "jemalloc" ]]; then
# jemalloc need compile first, so disable parallel build
make V=1 && ${csudo} make install
make -j 8 && ${csudo} make install
else
make -j8 && ${csudo} make install
make -j 8 && ${csudo} make install
fi
cd ${curr_dir}
......@@ -503,6 +507,15 @@ if [ "$osType" != "Darwin" ]; then
${csudo} mkdir -p ${output_dir}
cd ${script_dir}/deb
${csudo} ./makedeb.sh ${compile_dir} ${output_dir} ${verNumber} ${cpuType} ${osType} ${verMode} ${verType}
if [ -d ${top_dir}/src/kit/taos-tools/packaging/deb ]; then
cd ${top_dir}/src/kit/taos-tools/packaging/deb
[ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0"
taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g')
${csudo} ./make-taos-tools-deb.sh ${top_dir} \
${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType}
fi
else
echo "==========dpkg command not exist, so not release deb package!!!"
fi
......@@ -517,6 +530,15 @@ if [ "$osType" != "Darwin" ]; then
${csudo} mkdir -p ${output_dir}
cd ${script_dir}/rpm
${csudo} ./makerpm.sh ${compile_dir} ${output_dir} ${verNumber} ${cpuType} ${osType} ${verMode} ${verType}
if [ -d ${top_dir}/src/kit/taos-tools/packaging/rpm ]; then
cd ${top_dir}/src/kit/taos-tools/packaging/rpm
[ -z "$taos_tools_ver" ] && taos_tools_ver="0.1.0"
taos_tools_ver=$(git describe --tags|sed -e 's/ver-//g'|sed -e 's/-/_/g')
${csudo} ./make-taos-tools-rpm.sh ${top_dir} \
${compile_dir} ${output_dir} ${taos_tools_ver} ${cpuType} ${osType} ${verMode} ${verType}
fi
else
echo "==========rpmbuild command not exist, so not release rpm package!!!"
fi
......
......@@ -56,10 +56,6 @@ cd ${pkg_dir}
${csudo} mkdir -p BUILD BUILDROOT RPMS SOURCES SPECS SRPMS
if [ -f ${compile_dir}/build/lib/libavro.so.23.0.0 ]; then
sed -i.bak 's/#Requires:/Requires: jansson snappy/g' ${spec_file}
fi
${csudo} rpmbuild --define="_version ${tdengine_ver}" --define="_topdir ${pkg_dir}" --define="_compiledir ${compile_dir}" -bb ${spec_file}
# copy rpm package to output_dir, and modify package name, then clean temp dir
......
#!/bin/bash
#
# taosd This shell script takes care of starting and stopping TDEngine.
# taosd This shell script takes care of starting and stopping TDengine.
#
# chkconfig: 2345 99 01
# description: TDEngine is a districuted, scalable, high-performance Time Series Database
# (TSDB). More than just a pure database, TDEngine also provides the ability
# description: TDengine is a districuted, scalable, high-performance Time Series Database
# (TSDB). More than just a pure database, TDengine also provides the ability
# to do stream computing, aggregation etc.
#
#
......@@ -13,8 +13,8 @@
# Required-Start: $network $local_fs $remote_fs
# Required-Stop: $network $local_fs $remote_fs
# Short-Description: start and stop taosd
# Description: TDEngine is a districuted, scalable, high-performance Time Series Database
# (TSDB). More than just a pure database, TDEngine also provides the ability
# Description: TDengine is a districuted, scalable, high-performance Time Series Database
# (TSDB). More than just a pure database, TDengine also provides the ability
# to do stream computing, aggregation etc.
### END INIT INFO
......
......@@ -72,9 +72,9 @@ if [ -f %{_compiledir}/build/bin/taosadapter ]; then
cp %{_compiledir}/build/bin/taosadapter %{buildroot}%{homepath}/bin ||:
fi
cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taosdef.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
......@@ -82,15 +82,6 @@ cp -r %{_compiledir}/../src/connector/nodejs %{buildroot}%{homepath}/conn
cp %{_compiledir}/build/lib/taos-jdbcdriver*.* %{buildroot}%{homepath}/connector ||:
cp -r %{_compiledir}/../tests/examples/* %{buildroot}%{homepath}/examples
if [ -f %{_compiledir}/build/lib/libavro.so.23.0.0 ]; then
cp %{_compiledir}/build/lib/libavro.so.23.0.0 %{buildroot}%{homepath}/driver
ln -sf libavro.so.23.0.0 %{buildroot}%{homepath}/driver/libavro.so.23
ln -sf libavro.so.23 %{buildroot}%{homepath}/driver/libavro.so
fi
if [ -f %{_compiledir}/build/lib/libavro.a ]; then
cp %{_compiledir}/build/lib/libavro.a %{buildroot}%{homepath}/driver
fi
if [ -f %{_compiledir}/build/bin/jemalloc-config ]; then
mkdir -p %{buildroot}%{userlocalpath}/bin
mkdir -p %{buildroot}%{userlocalpath}/lib
......@@ -206,9 +197,9 @@ if [ $1 -eq 0 ];then
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || :
......
......@@ -236,6 +236,29 @@ function install_lib() {
${csudo} ldconfig
}
function install_avro() {
if [ "$osType" != "Darwin" ]; then
avro_dir=${script_dir}/avro
if [ -f "${avro_dir}/lib/libavro.so.23.0.0" ] && [ -d /usr/local/$1 ]; then
${csudo} /usr/bin/install -c -d /usr/local/$1
${csudo} /usr/bin/install -c -m 755 ${avro_dir}/lib/libavro.so.23.0.0 /usr/local/$1
${csudo} ln -sf /usr/local/$1/libavro.so.23.0.0 /usr/local/$1/libavro.so.23
${csudo} ln -sf /usr/local/$1/libavro.so.23 /usr/local/$1/libavro.so
${csudo} /usr/bin/install -c -d /usr/local/$1
[ -f ${avro_dir}/lib/libavro.a ] &&
${csudo} /usr/bin/install -c -m 755 ${avro_dir}/lib/libavro.a /usr/local/$1
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/$1" | ${csudo} tee /etc/ld.so.conf.d/libavro.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/libavro.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
fi
fi
fi
}
function install_jemalloc() {
jemalloc_dir=${script_dir}/jemalloc
......@@ -281,7 +304,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......@@ -290,9 +313,10 @@ function install_jemalloc() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -848,6 +872,8 @@ function update_TDengine() {
fi
tar -zxf taos.tar.gz
install_jemalloc
install_avro lib
install_avro lib64
echo -e "${GREEN}Start to update TDengine...${NC}"
# Stop the service if running
......@@ -960,6 +986,9 @@ function install_TDengine() {
install_header
install_lib
install_jemalloc
install_avro lib
install_avro lib64
if [ "$pagMode" != "lite" ]; then
install_connector
fi
......
......@@ -116,9 +116,10 @@ function install_bin() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -167,7 +168,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -160,7 +160,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......@@ -169,9 +169,10 @@ function install_jemalloc() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -116,9 +116,10 @@ function install_bin() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -116,9 +116,10 @@ function install_bin() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -86,7 +86,6 @@ function install_bin() {
${csudo} rm -f ${bin_link_dir}/taos || :
if [ "$osType" != "Darwin" ]; then
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
fi
${csudo} rm -f ${bin_link_dir}/rmtaos || :
${csudo} rm -f ${bin_link_dir}/set_core || :
......@@ -97,7 +96,6 @@ function install_bin() {
[ -x ${install_main_dir}/bin/taos ] && ${csudo} ln -s ${install_main_dir}/bin/taos ${bin_link_dir}/taos || :
if [ "$osType" != "Darwin" ]; then
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo} ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo} ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump || :
fi
[ -x ${install_main_dir}/bin/remove_client.sh ] && ${csudo} ln -s ${install_main_dir}/bin/remove_client.sh ${bin_link_dir}/rmtaos || :
[ -x ${install_main_dir}/bin/set_core.sh ] && ${csudo} ln -s ${install_main_dir}/bin/set_core.sh ${bin_link_dir}/set_core || :
......@@ -128,7 +126,7 @@ function install_lib() {
${csudo} ln -s ${install_main_dir}/driver/libtaos.* ${lib_link_dir}/libtaos.1.dylib
${csudo} ln -s ${lib_link_dir}/libtaos.1.dylib ${lib_link_dir}/libtaos.dylib
fi
if [ "$osType" != "Darwin" ]; then
${csudo} ldconfig
else
......@@ -137,9 +135,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -188,7 +187,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -133,9 +133,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -184,7 +185,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -129,9 +129,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -133,9 +133,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -263,7 +263,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -263,7 +263,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -258,7 +258,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......@@ -267,9 +267,10 @@ function install_jemalloc() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......
......@@ -212,9 +212,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -263,7 +264,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -214,9 +214,10 @@ function install_lib() {
}
function install_header() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${script_dir}/inc/* ${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
}
......@@ -265,7 +266,7 @@ function install_jemalloc() {
fi
if [ -d /etc/ld.so.conf.d ]; then
${csudo} echo "/usr/local/lib" > /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......
......@@ -4,7 +4,7 @@
# is required to use systemd to manage services at boot
set -e
# set -x
#set -x
# -----------------------Variables definition
source_dir=$1
......@@ -232,7 +232,7 @@ function install_jemalloc() {
/usr/local/lib/pkgconfig
fi
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf
echo "/usr/local/lib" | ${csudo} tee /etc/ld.so.conf.d/jemalloc.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/jemalloc.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......@@ -253,7 +253,7 @@ function install_jemalloc() {
function install_avro() {
if [ "$osType" != "Darwin" ]; then
if [ -f "${binary_dir}/build/$1/libavro.so.23.0.0" ]; then
if [ -f "${binary_dir}/build/$1/libavro.so.23.0.0" ] && [ -d /usr/local/$1 ]; then
${csudo} /usr/bin/install -c -d /usr/local/$1
${csudo} /usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.so.23.0.0 /usr/local/$1
${csudo} ln -sf libavro.so.23.0.0 /usr/local/$1/libavro.so.23
......@@ -263,7 +263,7 @@ function install_avro() {
${csudo} /usr/bin/install -c -m 755 ${binary_dir}/build/$1/libavro.a /usr/local/$1
if [ -d /etc/ld.so.conf.d ]; then
echo "/usr/local/$1" | ${csudo} tee /etc/ld.so.conf.d/libavro.conf
echo "/usr/local/$1" | ${csudo} tee /etc/ld.so.conf.d/libavro.conf > /dev/null || echo -e "failed to write /etc/ld.so.conf.d/libavro.conf"
${csudo} ldconfig
else
echo "/etc/ld.so.conf.d not found!"
......@@ -329,15 +329,16 @@ function install_lib() {
function install_header() {
if [ "$osType" != "Darwin" ]; then
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taoserror.h \
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h || :
${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taosdef.h ${source_dir}/src/inc/taoserror.h \
${install_main_dir}/include && ${csudo} chmod 644 ${install_main_dir}/include/*
${csudo} ln -s ${install_main_dir}/include/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${install_main_dir}/include/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${install_main_dir}/include/taoserror.h ${inc_link_dir}/taoserror.h
else
${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taoserror.h \
${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taosdef.h ${source_dir}/src/inc/taoserror.h \
${install_main_dir}/include \
|| ${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taoserror.h \
|| ${csudo} cp -f ${source_dir}/src/inc/taos.h ${source_dir}/src/inc/taosdef.h ${source_dir}/src/inc/taoserror.h \
${install_main_2_dir}/include \
&& ${csudo} chmod 644 ${install_main_dir}/include/* \
|| ${csudo} chmod 644 ${install_main_2_dir}/include/*
......
......@@ -34,7 +34,7 @@ fi
bin_files="${build_dir}/bin/tarbitrator ${script_dir}/remove_arbi.sh"
install_files="${script_dir}/install_arbi.sh"
#header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
#header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
init_file_tarbitrator_deb=${script_dir}/../deb/tarbitratord
init_file_tarbitrator_rpm=${script_dir}/../rpm/tarbitratord
......
......@@ -44,7 +44,7 @@ mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x $
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || :
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || :
cd ${release_dir}
cd ${release_dir}
if [ "$verMode" == "cluster" ]; then
pkg_name=${install_dir}-${osType}-${cpuType}
......@@ -57,8 +57,8 @@ fi
if [ "$verType" == "beta" ]; then
pkg_name=${pkg_name}-${verType}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
else
echo "unknow verType, nor stabel or beta"
exit 1
......
......@@ -44,7 +44,7 @@ mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x $
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || :
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || :
cd ${release_dir}
cd ${release_dir}
if [ "$verMode" == "cluster" ]; then
pkg_name=${install_dir}-${osType}-${cpuType}
......@@ -57,8 +57,8 @@ fi
if [ "$verType" == "beta" ]; then
pkg_name=${pkg_name}-${verType}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
else
echo "unknow verType, nor stabel or beta"
exit 1
......
......@@ -44,7 +44,7 @@ mkdir -p ${install_dir}/bin && cp ${bin_files} ${install_dir}/bin && chmod a+x $
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || :
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || :
cd ${release_dir}
cd ${release_dir}
if [ "$verMode" == "cluster" ]; then
pkg_name=${install_dir}-${osType}-${cpuType}
......@@ -57,8 +57,8 @@ fi
if [ "$verType" == "beta" ]; then
pkg_name=${pkg_name}-${verType}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
elif [ "$verType" == "stable" ]; then
pkg_name=${pkg_name}
else
echo "unknow verType, nor stabel or beta"
exit 1
......
......@@ -45,7 +45,7 @@ if [ "$osType" != "Darwin" ]; then
strip ${build_dir}/bin/taos
bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh"
else
bin_files="${build_dir}/bin/taos ${build_dir}/bin/taosdump ${build_dir}/bin/taosdemo \
bin_files="${build_dir}/bin/taos ${build_dir}/bin/taosdemo \
${script_dir}/remove_client.sh ${script_dir}/set_core.sh ${script_dir}/get_client.sh ${script_dir}/taosd-dump-cfg.gdb"
fi
lib_files="${build_dir}/lib/libtaos.so.${version}"
......@@ -54,7 +54,7 @@ else
lib_files="${build_dir}/lib/libtaos.${version}.dylib"
fi
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -53,7 +53,7 @@ else
lib_files="${build_dir}/lib/libtaos.${version}.dylib"
fi
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -46,7 +46,7 @@ else
lib_files="${build_dir}/lib/libtaos.${version}.dylib"
fi
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -46,7 +46,7 @@ else
lib_files="${build_dir}/lib/libtaos.${version}.dylib"
fi
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -51,7 +51,7 @@ else
fi
lib_files="${build_dir}/lib/libtaos.so.${version}"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......@@ -92,6 +92,12 @@ mkdir -p ${install_dir}/init.d && cp ${init_file_rpm} ${install_dir}/init.d/taos
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_deb} ${install_dir}/init.d/tarbitratord.deb || :
mkdir -p ${install_dir}/init.d && cp ${init_file_tarbitrator_rpm} ${install_dir}/init.d/tarbitratord.rpm || :
if [ -f ${build_dir}/lib/libavro.so.23.0.0 ]; then
mkdir -p ${install_dir}/avro/{lib,lib/pkgconfig}
cp ${build_dir}/lib/libavro.* ${install_dir}/avro/lib
cp ${build_dir}/lib/pkgconfig/avro-c.pc ${install_dir}/avro/lib/pkgconfig
fi
if [ -f ${build_dir}/bin/jemalloc-config ]; then
mkdir -p ${install_dir}/jemalloc/{bin,lib,lib/pkgconfig,include/jemalloc,share/doc/jemalloc,share/man/man3}
cp ${build_dir}/bin/jemalloc-config ${install_dir}/jemalloc/bin
......
......@@ -32,7 +32,7 @@ else
fi
lib_files="${build_dir}/lib/libtaos.so.${version}"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -32,7 +32,7 @@ else
fi
lib_files="${build_dir}/lib/libtaos.so.${version}"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......
......@@ -32,7 +32,7 @@ else
fi
lib_files="${build_dir}/lib/libtaos.so.${version}"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taoserror.h"
header_files="${code_dir}/inc/taos.h ${code_dir}/inc/taosdef.h ${code_dir}/inc/taoserror.h"
if [ "$verMode" == "cluster" ]; then
cfg_dir="${top_dir}/../enterprise/packaging/cfg"
else
......@@ -59,7 +59,6 @@ else
cp ${script_dir}/remove_tq.sh ${install_dir}/bin
cp ${build_dir}/bin/taosadapter ${install_dir}/bin/taosadapter ||:
cp ${build_dir}/bin/taosdemo ${install_dir}/bin/tqdemo
cp ${build_dir}/bin/taosdump ${install_dir}/bin/tqdump
cp ${build_dir}/bin/tarbitrator ${install_dir}/bin
cp ${script_dir}/set_core.sh ${install_dir}/bin
cp ${script_dir}/get_client.sh ${install_dir}/bin
......
......@@ -81,29 +81,12 @@ function kill_taosd() {
}
function install_include() {
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taoserror.h|| :
${csudo} rm -f ${inc_link_dir}/taos.h ${inc_link_dir}/taosdef.h ${inc_link_dir}/taoserror.h|| :
${csudo} ln -s ${inc_dir}/taos.h ${inc_link_dir}/taos.h
${csudo} ln -s ${inc_dir}/taosdef.h ${inc_link_dir}/taosdef.h
${csudo} ln -s ${inc_dir}/taoserror.h ${inc_link_dir}/taoserror.h
}
function install_avro_lib() {
${csudo} rm -f ${lib_link_dir}/libavro* || :
${csudo} rm -f ${lib64_link_dir}/libavro* || :
if [[ -f ${lib_dir}/libavro.so.23.0.0 ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23.0.0
${csudo} ln -s ${lib_link_dir}/libavro.so.23.0.0 ${lib_link_dir}/libavro.so.23
${csudo} ln -s ${lib_link_dir}/libavro.so.23 ${lib_link_dir}/libavro.so
if [[ -d ${lib64_link_dir} && ! -e ${lib64_link_dir}/libavro.so ]]; then
${csudo} ln -s ${lib_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23.0.0 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23.0.0 ${lib64_link_dir}/libavro.so.23 || :
${csudo} ln -s ${lib64_link_dir}/libavro.so.23 ${lib64_link_dir}/libavro.so || :
fi
fi
${csudo} ldconfig
}
function install_lib() {
${csudo} rm -f ${lib_link_dir}/libtaos* || :
${csudo} rm -f ${lib64_link_dir}/libtaos* || :
......@@ -503,7 +486,6 @@ function install_TDengine() {
# Install include, lib, binary and service
install_include
install_lib
install_avro_lib
install_bin
install_config
install_taosadapter_config
......
......@@ -122,10 +122,10 @@ ${csudo} rm -f ${bin_link_dir}/taos || :
${csudo} rm -f ${bin_link_dir}/taosd || :
${csudo} rm -f ${bin_link_dir}/taosadapter || :
${csudo} rm -f ${bin_link_dir}/taosdemo || :
${csudo} rm -f ${bin_link_dir}/taosdump || :
${csudo} rm -f ${bin_link_dir}/set_core || :
${csudo} rm -f ${cfg_link_dir}/*.new || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${lib_link_dir}/libtaos.* || :
${csudo} rm -f ${lib64_link_dir}/libtaos.* || :
......
......@@ -96,6 +96,7 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -57,6 +57,7 @@ function clean_bin() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -57,6 +57,7 @@ function clean_bin() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -57,6 +57,7 @@ function clean_bin() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -57,6 +57,7 @@ function clean_bin() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -52,13 +52,14 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
function clean_config() {
# Remove link
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/* || :
}
function clean_log() {
......
......@@ -52,13 +52,14 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
function clean_config() {
# Remove link
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/* || :
}
function clean_log() {
......
......@@ -46,13 +46,14 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
function clean_config() {
# Remove link
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/* || :
}
function clean_log() {
......
......@@ -52,13 +52,14 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
function clean_config() {
# Remove link
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/* || :
}
function clean_log() {
......
......@@ -88,6 +88,7 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -85,6 +85,7 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
......
......@@ -87,13 +87,14 @@ function clean_lib() {
function clean_header() {
# Remove link
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
${csudo} rm -f ${inc_link_dir}/taos.h || :
${csudo} rm -f ${inc_link_dir}/taosdef.h || :
${csudo} rm -f ${inc_link_dir}/taoserror.h || :
}
function clean_config() {
# Remove link
${csudo} rm -f ${cfg_link_dir}/* || :
${csudo} rm -f ${cfg_link_dir}/* || :
}
function clean_log() {
......@@ -109,7 +110,7 @@ function clean_service_on_systemd() {
fi
${csudo} systemctl disable ${tq_service_name} &> /dev/null || echo &> /dev/null
${csudo} rm -f ${tq_service_config}
tarbitratord_service_config="${service_config_dir}/${tarbitrator_service_name}.service"
if systemctl is-active --quiet ${tarbitrator_service_name}; then
echo "TQ tarbitrator is running, stopping it..."
......
......@@ -26,7 +26,7 @@ IF (TD_LINUX)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m rt cJson)
IF (TD_LINUX_64)
TARGET_LINK_LIBRARIES(taos lua cJson)
TARGET_LINK_LIBRARIES(taos ${LINK_LUA} cJson)
ENDIF ()
SET_TARGET_PROPERTIES(taos PROPERTIES CLEAN_DIRECT_OUTPUT 1)
......@@ -45,13 +45,13 @@ ELSEIF (TD_DARWIN)
# set the static lib name
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m lua cJson)
TARGET_LINK_LIBRARIES(taos_static common query trpc tutil pthread m ${LINK_LUA} cJson)
SET_TARGET_PROPERTIES(taos_static PROPERTIES OUTPUT_NAME "taos_static")
SET_TARGET_PROPERTIES(taos_static PROPERTIES CLEAN_DIRECT_OUTPUT 1)
# generate dynamic library (*.dylib)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m lua cJson)
TARGET_LINK_LIBRARIES(taos common query trpc tutil pthread m ${LINK_LUA} cJson)
SET_TARGET_PROPERTIES(taos PROPERTIES CLEAN_DIRECT_OUTPUT 1)
#set version of .dylib
......@@ -77,7 +77,7 @@ ELSEIF (TD_WINDOWS)
IF (NOT TD_GODLL)
SET_TARGET_PROPERTIES(taos PROPERTIES LINK_FLAGS /DEF:${TD_COMMUNITY_DIR}/src/client/src/taos.def)
ENDIF ()
TARGET_LINK_LIBRARIES(taos trpc tutil query lua cJson)
TARGET_LINK_LIBRARIES(taos trpc tutil query ${LINK_LUA} cJson)
ELSEIF (TD_DARWIN)
SET(CMAKE_MACOSX_RPATH 1)
......@@ -85,12 +85,12 @@ ELSEIF (TD_DARWIN)
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/cJson/inc)
ADD_LIBRARY(taos_static STATIC ${SRC})
TARGET_LINK_LIBRARIES(taos_static query trpc tutil pthread m lua cJson)
TARGET_LINK_LIBRARIES(taos_static query trpc tutil pthread m ${LINK_LUA} cJson)
SET_TARGET_PROPERTIES(taos_static PROPERTIES OUTPUT_NAME "taos_static")
# generate dynamic library (*.dylib)
ADD_LIBRARY(taos SHARED ${SRC})
TARGET_LINK_LIBRARIES(taos query trpc tutil pthread m lua cJson)
TARGET_LINK_LIBRARIES(taos query trpc tutil pthread m ${LINK_LUA} cJson)
SET_TARGET_PROPERTIES(taos PROPERTIES CLEAN_DIRECT_OUTPUT 1)
......
......@@ -2293,7 +2293,15 @@ void tscFirstRoundRetrieveCallback(void* param, TAOS_RES* tres, int numOfRows) {
if (row[i] == NULL) {
setNull(p + offset, pExpr->base.resType, pExpr->base.resBytes);
} else {
memcpy(p + offset, row[i], length[i]);
if(pExpr->base.resType == TSDB_DATA_TYPE_NCHAR){
int32_t output = 0;
bool ret = taosMbsToUcs4(row[i], length[i], p + offset, pExpr->base.resBytes, &output);
if (!ret) {
tscError("stddev convert tag error:%d", ret);
}
}else{
memcpy(p + offset, row[i], length[i]);
}
}
offset += pExpr->base.resBytes;
}
......
......@@ -19,10 +19,14 @@ ENDIF ()
ADD_EXECUTABLE(taosd ${SRC})
IF (TD_LINUX_64 AND JEMALLOC_ENABLED)
ADD_DEPENDENCIES(taosd jemalloc)
ENDIF ()
IF (TD_BUILD_HTTP)
TARGET_LINK_LIBRARIES(taosd mnode monitor http tsdb twal vnode cJson lua lz4 balance sync ${LINK_JEMALLOC})
TARGET_LINK_LIBRARIES(taosd mnode monitor http tsdb twal vnode cJson ${LINK_LUA} lz4 balance sync ${LINK_JEMALLOC})
ELSE ()
TARGET_LINK_LIBRARIES(taosd mnode monitor tsdb twal vnode cJson lua lz4 balance sync ${LINK_JEMALLOC})
TARGET_LINK_LIBRARIES(taosd mnode monitor tsdb twal vnode cJson ${LINK_LUA} lz4 balance sync ${LINK_JEMALLOC})
ENDIF ()
IF (TD_SOMODE_STATIC)
......
......@@ -208,6 +208,8 @@ DLL_EXPORT int taos_load_table_info(TAOS *taos, const char* tableNameList);
DLL_EXPORT TAOS_RES *taos_schemaless_insert(TAOS* taos, char* lines[], int numLines, int protocol, int precision);
DLL_EXPORT int32_t taos_parse_time(char* timestr, int64_t* time, int32_t len, int32_t timePrec, int8_t dayligth);
#ifdef __cplusplus
}
#endif
......
......@@ -3,5 +3,10 @@ PROJECT(TDengine)
ADD_SUBDIRECTORY(shell)
ADD_SUBDIRECTORY(taosdemo)
ADD_SUBDIRECTORY(taosdump)
IF (TD_TAOS_TOOLS)
INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/src/kit/taos_tools/deps/avro/lang/c/src)
ADD_SUBDIRECTORY(taos-tools)
ENDIF ()
ADD_SUBDIRECTORY(taospack)
......@@ -19,9 +19,9 @@ ELSE ()
ENDIF ()
IF (TD_SOMODE_STATIC)
TARGET_LINK_LIBRARIES(shell taos_static cJson lua ${LINK_JEMALLOC})
TARGET_LINK_LIBRARIES(shell taos_static cJson ${LINK_LUA} ${LINK_JEMALLOC})
ELSE ()
TARGET_LINK_LIBRARIES(shell taos cJson lua ${LINK_JEMALLOC})
TARGET_LINK_LIBRARIES(shell taos cJson ${LINK_LUA} ${LINK_JEMALLOC})
ENDIF ()
SET_TARGET_PROPERTIES(shell PROPERTIES OUTPUT_NAME taos)
......
Subproject commit 7ed1e6b485d04cc93c4bb0bc9e844086da1b4714
......@@ -736,10 +736,12 @@ int parse_args(int argc, char *argv[]) {
g_args.columnCount = MAX_NUM_COLUMNS;
}
for (int col = DEFAULT_DATATYPE_NUM; col < g_args.columnCount;
for (int col = 0; col < g_args.columnCount;
col++) {
g_args.dataType[col] = "INT";
g_args.data_type[col] = TSDB_DATA_TYPE_INT;
if (g_args.data_type[col] == TSDB_DATA_TYPE_NULL) {
g_args.dataType[col] = "INT";
g_args.data_type[col] = TSDB_DATA_TYPE_INT;
}
}
for (int col = g_args.columnCount; col < MAX_NUM_COLUMNS; col++) {
g_args.dataType[col] = NULL;
......@@ -830,8 +832,10 @@ int parse_args(int argc, char *argv[]) {
g_args.data_type[0] = TSDB_DATA_TYPE_NULL;
}
g_args.dataType[0] = dataType;
g_args.dataType[1] = NULL;
g_args.data_type[1] = TSDB_DATA_TYPE_NULL;
if (g_args.data_type[1] != TSDB_DATA_TYPE_INT) {
g_args.dataType[1] = NULL;
g_args.data_type[1] = TSDB_DATA_TYPE_NULL;
}
} else {
// more than one col
int index = 0;
......@@ -899,8 +903,10 @@ int parse_args(int argc, char *argv[]) {
token = strsep(&running, ",");
if (index >= MAX_NUM_COLUMNS) break;
}
g_args.dataType[index] = NULL;
g_args.data_type[index] = TSDB_DATA_TYPE_NULL;
if (g_args.data_type[index] != TSDB_DATA_TYPE_INT) {
g_args.dataType[index] = NULL;
g_args.data_type[index] = TSDB_DATA_TYPE_NULL;
}
}
} else if ((0 == strncmp(argv[i], "-w", strlen("-w"))) ||
(0 ==
......
CMAKE_MINIMUM_REQUIRED(VERSION 3.0...3.20)
PROJECT(TDengine)
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc)
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/query/inc)
INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/build/include)
INCLUDE_DIRECTORIES(inc)
AUX_SOURCE_DIRECTORY(. SRC)
FIND_PACKAGE(Git)
IF(GIT_FOUND)
EXECUTE_PROCESS(
COMMAND ${GIT_EXECUTABLE} log --pretty=oneline -n 1 ${CMAKE_CURRENT_LIST_DIR}/taosdump.c
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
RESULT_VARIABLE RESULT
OUTPUT_VARIABLE TAOSDUMP_COMMIT_SHA1
)
IF ("${TAOSDUMP_COMMIT_SHA1}" STREQUAL "")
SET(TAOSDUMP_COMMIT_SHA1 "unknown")
ELSE ()
STRING(SUBSTRING "${TAOSDUMP_COMMIT_SHA1}" 0 7 TAOSDUMP_COMMIT_SHA1)
STRING(STRIP "${TAOSDUMP_COMMIT_SHA1}" TAOSDUMP_COMMIT_SHA1)
ENDIF ()
EXECUTE_PROCESS(
COMMAND ${GIT_EXECUTABLE} status -z -s ${CMAKE_CURRENT_LIST_DIR}/taosdump.c
RESULT_VARIABLE RESULT
OUTPUT_VARIABLE TAOSDUMP_STATUS
)
IF (TD_LINUX)
EXECUTE_PROCESS(
COMMAND bash "-c" "echo '${TAOSDUMP_STATUS}' | awk '{print $1}'"
RESULT_VARIABLE RESULT
OUTPUT_VARIABLE TAOSDUMP_STATUS
)
ENDIF (TD_LINUX)
ELSE()
MESSAGE("Git not found")
SET(TAOSDUMP_COMMIT_SHA1 "unknown")
SET(TAOSDUMP_STATUS "unknown")
ENDIF (GIT_FOUND)
MESSAGE("taosdump's latest commit in short is:" ${TAOSDUMP_COMMIT_SHA1})
STRING(STRIP "${TAOSDUMP_STATUS}" TAOSDUMP_STATUS)
IF (TAOSDUMP_STATUS MATCHES "M")
SET(TAOSDUMP_STATUS "modified")
ELSE()
SET(TAOSDUMP_STATUS "")
ENDIF ()
MESSAGE("taosdump's status is:" ${TAOSDUMP_STATUS})
ADD_DEFINITIONS(-DTAOSDUMP_COMMIT_SHA1="${TAOSDUMP_COMMIT_SHA1}")
ADD_DEFINITIONS(-DTAOSDUMP_STATUS="${TAOSDUMP_STATUS}")
MESSAGE("TD_VER_NUMBER is:" ${TD_VER_NUMBER})
IF ("${TD_VER_NUMBER}" STREQUAL "")
SET(TD_VERSION_NUMBER "TDengine-version-unknown")
ELSE()
SET(TD_VERSION_NUMBER ${TD_VER_NUMBER})
ENDIF ()
MESSAGE("TD_VERSION_NUMBER is:" ${TD_VERSION_NUMBER})
ADD_DEFINITIONS(-DTD_VERNUMBER="${TD_VERSION_NUMBER}")
LINK_DIRECTORIES(${CMAKE_BINARY_DIR}/build/lib ${CMAKE_BINARY_DIR}/build/lib64)
IF (TD_LINUX)
ADD_EXECUTABLE(taosdump ${SRC})
IF (TD_SOMODE_STATIC)
IF (TD_AVRO_SUPPORT)
TARGET_LINK_LIBRARIES(taosdump taos_static avro jansson)
ELSE ()
TARGET_LINK_LIBRARIES(taosdump taos_static)
ENDIF()
ELSE ()
IF (TD_AVRO_SUPPORT)
TARGET_LINK_LIBRARIES(taosdump taos avro jansson)
ELSE ()
TARGET_LINK_LIBRARIES(taosdump taos)
ENDIF ()
ENDIF ()
ENDIF ()
IF (TD_DARWIN)
# missing <argp.h> for macosx
# ADD_EXECUTABLE(taosdump ${SRC})
# IF (TD_SOMODE_STATIC)
# TARGET_LINK_LIBRARIES(taosdump taos_static jansson)
# ELSE ()
# TARGET_LINK_LIBRARIES(taosdump taos jansson)
# ENDIF ()
ENDIF ()
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <stdio.h>
#include <pthread.h>
#include <iconv.h>
#include <sys/stat.h>
#include <sys/syscall.h>
#include "os.h"
#include "taos.h"
#include "taosdef.h"
#include "taosmsg.h"
#include "tglobal.h"
#include "tsclient.h"
#include "tsdb.h"
#include "tutil.h"
static char **g_tsDumpInSqlFiles = NULL;
static char g_tsCharset[63] = {0};
#ifdef AVRO_SUPPORT
#include <avro.h>
#include <jansson.h>
static char **g_tsDumpInAvroFiles = NULL;
static void print_json_aux(json_t *element, int indent);
#endif /* AVRO_SUPPORT */
#define TSDB_SUPPORT_NANOSECOND 1
#define MAX_FILE_NAME_LEN 256 // max file name length on linux is 255
#define MAX_PATH_LEN 4096 // max path length on linux is 4095
#define COMMAND_SIZE 65536
#define MAX_RECORDS_PER_REQ 32766
//#define DEFAULT_DUMP_FILE "taosdump.sql"
// for strncpy buffer overflow
#define min(a, b) (((a) < (b)) ? (a) : (b))
static int converStringToReadable(char *str, int size, char *buf, int bufsize);
static int convertNCharToReadable(char *str, int size, char *buf, int bufsize);
typedef struct {
short bytes;
int8_t type;
} SOColInfo;
#define debugPrint(fmt, ...) \
do { if (g_args.debug_print || g_args.verbose_print) \
fprintf(stderr, "DEBG: "fmt, __VA_ARGS__); } while(0)
#define verbosePrint(fmt, ...) \
do { if (g_args.verbose_print) \
fprintf(stderr, "VERB: "fmt, __VA_ARGS__); } while(0)
#define performancePrint(fmt, ...) \
do { if (g_args.performance_print) \
fprintf(stderr, "PERF: "fmt, __VA_ARGS__); } while(0)
#define warnPrint(fmt, ...) \
do { fprintf(stderr, "\033[33m"); \
fprintf(stderr, "WARN: "fmt, __VA_ARGS__); \
fprintf(stderr, "\033[0m"); } while(0)
#define errorPrint(fmt, ...) \
do { fprintf(stderr, "\033[31m"); \
fprintf(stderr, "ERROR: "fmt, __VA_ARGS__); \
fprintf(stderr, "\033[0m"); } while(0)
#define okPrint(fmt, ...) \
do { fprintf(stderr, "\033[32m"); \
fprintf(stderr, "OK: "fmt, __VA_ARGS__); \
fprintf(stderr, "\033[0m"); } while(0)
static bool isStringNumber(char *input)
{
int len = strlen(input);
if (0 == len) {
return false;
}
for (int i = 0; i < len; i++) {
if (!isdigit(input[i]))
return false;
}
return true;
}
// -------------------------- SHOW DATABASE INTERFACE-----------------------
enum _show_db_index {
TSDB_SHOW_DB_NAME_INDEX,
TSDB_SHOW_DB_CREATED_TIME_INDEX,
TSDB_SHOW_DB_NTABLES_INDEX,
TSDB_SHOW_DB_VGROUPS_INDEX,
TSDB_SHOW_DB_REPLICA_INDEX,
TSDB_SHOW_DB_QUORUM_INDEX,
TSDB_SHOW_DB_DAYS_INDEX,
TSDB_SHOW_DB_KEEP_INDEX,
TSDB_SHOW_DB_CACHE_INDEX,
TSDB_SHOW_DB_BLOCKS_INDEX,
TSDB_SHOW_DB_MINROWS_INDEX,
TSDB_SHOW_DB_MAXROWS_INDEX,
TSDB_SHOW_DB_WALLEVEL_INDEX,
TSDB_SHOW_DB_FSYNC_INDEX,
TSDB_SHOW_DB_COMP_INDEX,
TSDB_SHOW_DB_CACHELAST_INDEX,
TSDB_SHOW_DB_PRECISION_INDEX,
TSDB_SHOW_DB_UPDATE_INDEX,
TSDB_SHOW_DB_STATUS_INDEX,
TSDB_MAX_SHOW_DB
};
// -----------------------------------------SHOW TABLES CONFIGURE -------------------------------------
enum _show_tables_index {
TSDB_SHOW_TABLES_NAME_INDEX,
TSDB_SHOW_TABLES_CREATED_TIME_INDEX,
TSDB_SHOW_TABLES_COLUMNS_INDEX,
TSDB_SHOW_TABLES_METRIC_INDEX,
TSDB_SHOW_TABLES_UID_INDEX,
TSDB_SHOW_TABLES_TID_INDEX,
TSDB_SHOW_TABLES_VGID_INDEX,
TSDB_MAX_SHOW_TABLES
};
// ---------------------------------- DESCRIBE STABLE CONFIGURE ------------------------------
enum _describe_table_index {
TSDB_DESCRIBE_METRIC_FIELD_INDEX,
TSDB_DESCRIBE_METRIC_TYPE_INDEX,
TSDB_DESCRIBE_METRIC_LENGTH_INDEX,
TSDB_DESCRIBE_METRIC_NOTE_INDEX,
TSDB_MAX_DESCRIBE_METRIC
};
#define COL_NOTE_LEN 4
#define COL_TYPEBUF_LEN 16
#define COL_VALUEBUF_LEN 32
typedef struct {
char field[TSDB_COL_NAME_LEN];
char type[COL_TYPEBUF_LEN];
int length;
char note[COL_NOTE_LEN];
char value[COL_VALUEBUF_LEN];
char *var_value;
} ColDes;
typedef struct {
char name[TSDB_TABLE_NAME_LEN];
ColDes cols[];
} TableDef;
extern char version[];
#define DB_PRECISION_LEN 8
#define DB_STATUS_LEN 16
typedef struct {
char name[TSDB_TABLE_NAME_LEN];
bool belongStb;
char stable[TSDB_TABLE_NAME_LEN];
} TableInfo;
typedef struct {
char name[TSDB_TABLE_NAME_LEN];
char stable[TSDB_TABLE_NAME_LEN];
} TableRecord;
typedef struct {
bool isStb;
bool belongStb;
int64_t dumpNtbCount;
TableRecord **dumpNtbInfos;
TableRecord tableRecord;
} TableRecordInfo;
typedef struct {
char name[TSDB_DB_NAME_LEN];
char create_time[32];
int64_t ntables;
int32_t vgroups;
int16_t replica;
int16_t quorum;
int16_t days;
char keeplist[32];
//int16_t daysToKeep;
//int16_t daysToKeep1;
//int16_t daysToKeep2;
int32_t cache; //MB
int32_t blocks;
int32_t minrows;
int32_t maxrows;
int8_t wallevel;
int32_t fsync;
int8_t comp;
int8_t cachelast;
char precision[DB_PRECISION_LEN]; // time resolution
int8_t update;
char status[DB_STATUS_LEN];
int64_t dumpTbCount;
TableRecordInfo **dumpTbInfos;
} SDbInfo;
typedef struct {
pthread_t threadID;
int32_t threadIndex;
char dbName[TSDB_DB_NAME_LEN];
char stbName[TSDB_TABLE_NAME_LEN];
int precision;
TAOS *taos;
int64_t rowsOfDumpOut;
int64_t count;
int64_t from;
} threadInfo;
typedef struct {
int64_t totalRowsOfDumpOut;
int64_t totalChildTblsOfDumpOut;
int32_t totalSuperTblsOfDumpOut;
int32_t totalDatabasesOfDumpOut;
} resultStatistics;
#ifdef AVRO_SUPPORT
enum enAvro_Codec {
AVRO_CODEC_START = 0,
AVRO_CODEC_NULL = AVRO_CODEC_START,
AVRO_CODEC_DEFLATE,
AVRO_CODEC_SNAPPY,
AVRO_CODEC_LZMA,
AVRO_CODEC_UNKNOWN = 255
};
char *g_avro_codec[] = {
"null",
"deflate",
"snappy",
"lzma",
"unknown"
};
/* avro sectin begin */
#define RECORD_NAME_LEN 64
#define FIELD_NAME_LEN 64
#define TYPE_NAME_LEN 16
typedef struct FieldStruct_S {
char name[FIELD_NAME_LEN];
char type[TYPE_NAME_LEN];
} FieldStruct;
typedef struct RecordSchema_S {
char name[RECORD_NAME_LEN];
char *fields;
int num_fields;
} RecordSchema;
/* avro section end */
#endif
static int64_t g_totalDumpOutRows = 0;
SDbInfo **g_dbInfos = NULL;
TableInfo *g_tablesList = NULL;
const char *argp_program_version = version;
#ifdef _TD_PRO_
const char *argp_program_bug_address = "<support@hanatech.com.cn>";
#elif (_TD_KH_ == true)
const char *argp_program_bug_address = "<support@wellintech.com>";
#elif (_TD_JH_ == true)
const char *argp_program_bug_address = "<jhkj@njsteel.com.cn>";
#else
const char *argp_program_bug_address = "<support@taosdata.com>";
#endif
/* Program documentation. */
static char doc[] = "";
/* "Argp example #4 -- a program with somewhat more complicated\ */
/* options\ */
/* \vThis part of the documentation comes *after* the options;\ */
/* note that the text is automatically filled, but it's possible\ */
/* to force a line-break, e.g.\n<-- here."; */
/* A description of the arguments we accept. */
static char args_doc[] = "dbname [tbname ...]\n--databases db1,db2,... \n--all-databases\n-i inpath\n-o outpath";
/* Keys for options without short-options. */
#define OPT_ABORT 1 /* –abort */
/* The options we understand. */
static struct argp_option options[] = {
// connection option
{"host", 'h', "HOST", 0, "Server host dumping data from. Default is localhost.", 0},
{"user", 'u', "USER", 0, "User name used to connect to server. Default is root.", 0},
#ifdef _TD_POWER_
{"password", 'p', 0, 0, "User password to connect to server. Default is powerdb.", 0},
#elif (_TD_TQ_ == true)
{"password", 'p', 0, 0, "User password to connect to server. Default is tqueue.", 0},
#elif (_TD_PRO_ == true)
{"password", 'p', 0, 0, "User password to connect to server. Default is prodb.", 0},
#elif (_TD_KH_ == true)
{"password", 'p', 0, 0, "User password to connect to server. Default is khroot.", 0},
#elif (_TD_JH_ == true)
{"password", 'p', 0, 0, "User password to connect to server. Default is jhdata.", 0},
#else
{"password", 'p', 0, 0, "User password to connect to server. Default is taosdata.", 0},
#endif
{"port", 'P', "PORT", 0, "Port to connect", 0},
{"mysqlFlag", 'q', "MYSQLFLAG", 0, "mysqlFlag, Default is 0", 0},
// input/output file
{"outpath", 'o', "OUTPATH", 0, "Output file path.", 1},
{"inpath", 'i', "INPATH", 0, "Input file path.", 1},
{"resultFile", 'r', "RESULTFILE", 0, "DumpOut/In Result file path and name.", 1},
#ifdef _TD_POWER_
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/power/power.cfg.", 1},
#elif (_TD_TQ_ == true)
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/tq/tq.cfg.", 1},
#elif (_TD_PRO_ == true)
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/ProDB/prodb.cfg.", 1},
#elif (_TD_KH_ == true)
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/kinghistorian/kinghistorian.cfg.", 1},
#elif (_TD_JH_ == true)
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/jh_taos/taos.cfg.", 1},
#else
{"config-dir", 'c', "CONFIG_DIR", 0, "Configure directory. Default is /etc/taos/taos.cfg.", 1},
#endif
{"encode", 'e', "ENCODE", 0, "Input file encoding.", 1},
// dump unit options
{"all-databases", 'A', 0, 0, "Dump all databases.", 2},
{"databases", 'D', "DATABASES", 0, "Dump inputed databases. Use comma to seprate databases\' name.", 2},
{"allow-sys", 'a', 0, 0, "Allow to dump sys database", 2},
// dump format options
{"schemaonly", 's', 0, 0, "Only dump schema.", 2},
{"without-property", 'N', 0, 0, "Dump schema without properties.", 2},
#ifdef AVRO_SUPPORT
{"avro", 'v', 0, 0, "Dump apache avro format data file. By default, dump sql command sequence.", 3},
{"avro-codec", 'd', "snappy", 0, "Choose an avro codec among null, deflate, snappy, and lzma.", 4},
#endif
{"start-time", 'S', "START_TIME", 0, "Start time to dump. Either epoch or ISO8601/RFC3339 format is acceptable. ISO8601 format example: 2017-10-01T00:00:00.000+0800 or 2017-10-0100:00:00:000+0800 or '2017-10-01 00:00:00.000+0800'", 8},
{"end-time", 'E', "END_TIME", 0, "End time to dump. Either epoch or ISO8601/RFC3339 format is acceptable. ISO8601 format example: 2017-10-01T00:00:00.000+0800 or 2017-10-0100:00:00.000+0800 or '2017-10-01 00:00:00.000+0800'", 9},
{"data-batch", 'B', "DATA_BATCH", 0, "Number of data point per insert statement. Max value is 32766. Default is 1.", 10},
{"max-sql-len", 'L', "SQL_LEN", 0, "Max length of one sql. Default is 65480.", 10},
{"table-batch", 't', "TABLE_BATCH", 0, "Number of table dumpout into one output file. Default is 1.", 10},
{"thread_num", 'T', "THREAD_NUM", 0, "Number of thread for dump in file. Default is 5.", 10},
{"debug", 'g', 0, 0, "Print debug info.", 15},
{0}
};
#define HUMAN_TIME_LEN 28
/* Used by main to communicate with parse_opt. */
typedef struct arguments {
// connection option
char *host;
char *user;
char password[SHELL_MAX_PASSWORD_LEN];
uint16_t port;
uint16_t mysqlFlag;
// output file
char outpath[MAX_FILE_NAME_LEN];
char inpath[MAX_FILE_NAME_LEN];
// result file
char *resultFile;
char *encode;
// dump unit option
bool all_databases;
bool databases;
char *databasesSeq;
// dump format option
bool schemaonly;
bool with_property;
#ifdef AVRO_SUPPORT
bool avro;
int avro_codec;
#endif
int64_t start_time;
char humanStartTime[HUMAN_TIME_LEN];
int64_t end_time;
char humanEndTime[HUMAN_TIME_LEN];
char precision[8];
int32_t data_batch;
int32_t max_sql_len;
int32_t table_batch; // num of table which will be dump into one output file.
bool allow_sys;
// other options
int32_t thread_num;
int abort;
char **arg_list;
int arg_list_len;
bool isDumpIn;
bool debug_print;
bool verbose_print;
bool performance_print;
int dumpDbCount;
} SArguments;
/* Our argp parser. */
static error_t parse_opt(int key, char *arg, struct argp_state *state);
static struct argp argp = {options, parse_opt, args_doc, doc};
static resultStatistics g_resultStatistics = {0};
static FILE *g_fpOfResult = NULL;
static int g_numOfCores = 1;
struct arguments g_args = {
// connection option
NULL,
"root",
#ifdef _TD_POWER_
"powerdb",
#elif (_TD_TQ_ == true)
"tqueue",
#elif (_TD_PRO_ == true)
"prodb",
#elif (_TD_KH_ == true)
"khroot",
#elif (_TD_JH_ == true)
"jhdata",
#else
"taosdata",
#endif
0,
0,
// outpath and inpath
"",
"",
"./dump_result.txt",
NULL,
// dump unit option
false, // all_databases
false, // databases
NULL, // databasesSeq
// dump format option
false, // schemaonly
true, // with_property
#ifdef AVRO_SUPPORT
false, // avro
AVRO_CODEC_SNAPPY, // avro_codec
#endif
-INT64_MAX + 1, // start_time
{0}, // humanStartTime
INT64_MAX, // end_time
{0}, // humanEndTime
"ms", // precision
1, // data_batch
TSDB_MAX_SQL_LEN, // max_sql_len
1, // table_batch
false, // allow_sys
// other options
8, // thread_num
0, // abort
NULL, // arg_list
0, // arg_list_len
false, // isDumpIn
false, // debug_print
false, // verbose_print
false, // performance_print
0, // dumpDbCount
};
// get taosdump commit number version
#ifndef TAOSDUMP_COMMIT_SHA1
#define TAOSDUMP_COMMIT_SHA1 "unknown"
#endif
#ifndef TD_VERNUMBER
#define TD_VERNUMBER "unknown"
#endif
#ifndef TAOSDUMP_STATUS
#define TAOSDUMP_STATUS "unknown"
#endif
static void printVersion() {
char tdengine_ver[] = TD_VERNUMBER;
char taosdump_ver[] = TAOSDUMP_COMMIT_SHA1;
char taosdump_status[] = TAOSDUMP_STATUS;
if (strlen(taosdump_status) == 0) {
printf("taosdump version %s-%s\n",
tdengine_ver, taosdump_ver);
} else {
printf("taosdump version %s-%s, status:%s\n",
tdengine_ver, taosdump_ver, taosdump_status);
}
}
void errorWrongValue(char *program, char *wrong_arg, char *wrong_value)
{
fprintf(stderr, "%s %s: %s is an invalid value\n", program, wrong_arg, wrong_value);
fprintf(stderr, "Try `taosdump --help' or `taosdump --usage' for more information.\n");
}
static void errorUnrecognized(char *program, char *wrong_arg)
{
fprintf(stderr, "%s: unrecognized options '%s'\n", program, wrong_arg);
fprintf(stderr, "Try `taosdump --help' or `taosdump --usage' for more information.\n");
}
static void errorPrintReqArg(char *program, char *wrong_arg)
{
fprintf(stderr,
"%s: option requires an argument -- '%s'\n",
program, wrong_arg);
fprintf(stderr,
"Try `taosdump --help' or `taosdump --usage' for more information.\n");
}
static void errorPrintReqArg2(char *program, char *wrong_arg)
{
fprintf(stderr,
"%s: option requires a number argument '-%s'\n",
program, wrong_arg);
fprintf(stderr,
"Try `taosdump --help' or `taosdump --usage' for more information.\n");
}
static void errorPrintReqArg3(char *program, char *wrong_arg)
{
fprintf(stderr,
"%s: option '%s' requires an argument\n",
program, wrong_arg);
fprintf(stderr,
"Try `taosdump --help' or `taosdump --usage' for more information.\n");
}
/* Parse a single option. */
static error_t parse_opt(int key, char *arg, struct argp_state *state) {
/* Get the input argument from argp_parse, which we
know is a pointer to our arguments structure. */
wordexp_t full_path;
switch (key) {
// connection option
case 'a':
g_args.allow_sys = true;
break;
case 'h':
g_args.host = arg;
break;
case 'u':
g_args.user = arg;
break;
case 'p':
break;
case 'P':
if (!isStringNumber(arg)) {
errorPrintReqArg2("taosdump", "P");
exit(EXIT_FAILURE);
}
uint64_t port = atoi(arg);
if (port > 65535) {
errorWrongValue("taosdump", "-P or --port", arg);
exit(EXIT_FAILURE);
}
g_args.port = (uint16_t)port;
break;
case 'q':
g_args.mysqlFlag = atoi(arg);
break;
case 'o':
if (wordexp(arg, &full_path, 0) != 0) {
errorPrint("Invalid path %s\n", arg);
return -1;
}
if (full_path.we_wordv[0]) {
tstrncpy(g_args.outpath, full_path.we_wordv[0],
MAX_FILE_NAME_LEN);
wordfree(&full_path);
} else {
errorPrintReqArg3("taosdump", "-o or --outpath");
exit(EXIT_FAILURE);
}
break;
case 'g':
g_args.debug_print = true;
break;
case 'i':
g_args.isDumpIn = true;
if (wordexp(arg, &full_path, 0) != 0) {
errorPrint("Invalid path %s\n", arg);
return -1;
}
if (full_path.we_wordv[0]) {
tstrncpy(g_args.inpath, full_path.we_wordv[0],
MAX_FILE_NAME_LEN);
wordfree(&full_path);
} else {
errorPrintReqArg3("taosdump", "-i or --inpath");
exit(EXIT_FAILURE);
}
break;
#ifdef AVRO_SUPPORT
case 'v':
g_args.avro = true;
break;
case 'd':
for (int i = AVRO_CODEC_START; i < AVRO_CODEC_UNKNOWN; i ++) {
if (0 == strcmp(arg, g_avro_codec[i])) {
g_args.avro_codec = i;
break;
}
}
break;
#endif
case 'r':
g_args.resultFile = arg;
break;
case 'c':
if (0 == strlen(arg)) {
errorPrintReqArg3("taosdump", "-c or --config-dir");
exit(EXIT_FAILURE);
}
if (wordexp(arg, &full_path, 0) != 0) {
errorPrint("Invalid path %s\n", arg);
exit(EXIT_FAILURE);
}
tstrncpy(configDir, full_path.we_wordv[0], MAX_FILE_NAME_LEN);
wordfree(&full_path);
break;
case 'e':
g_args.encode = arg;
break;
// dump unit option
case 'A':
break;
case 'D':
g_args.databases = true;
break;
// dump format option
case 's':
g_args.schemaonly = true;
break;
case 'N':
g_args.with_property = false;
break;
case 'S':
// parse time here.
break;
case 'E':
break;
case 'B':
g_args.data_batch = atoi(arg);
if (g_args.data_batch > MAX_RECORDS_PER_REQ) {
g_args.data_batch = MAX_RECORDS_PER_REQ;
}
break;
case 'L':
{
int32_t len = atoi(arg);
if (len > TSDB_MAX_ALLOWED_SQL_LEN) {
len = TSDB_MAX_ALLOWED_SQL_LEN;
} else if (len < TSDB_MAX_SQL_LEN) {
len = TSDB_MAX_SQL_LEN;
}
g_args.max_sql_len = len;
break;
}
case 't':
g_args.table_batch = atoi(arg);
break;
case 'T':
if (!isStringNumber(arg)) {
errorPrint("%s", "\n\t-T need a number following!\n");
exit(EXIT_FAILURE);
}
g_args.thread_num = atoi(arg);
break;
case OPT_ABORT:
g_args.abort = 1;
break;
case ARGP_KEY_ARG:
if (strlen(state->argv[state->next - 1])) {
g_args.arg_list = &state->argv[state->next - 1];
g_args.arg_list_len = state->argc - state->next + 1;
}
state->next = state->argc;
break;
default:
return ARGP_ERR_UNKNOWN;
}
return 0;
}
static void freeTbDes(TableDef *tableDes)
{
for (int i = 0; i < TSDB_MAX_COLUMNS; i ++) {
if (tableDes->cols[i].var_value) {
free(tableDes->cols[i].var_value);
}
}
free(tableDes);
}
static int queryDbImpl(TAOS *taos, char *command) {
TAOS_RES *res = NULL;
int32_t code = -1;
res = taos_query(taos, command);
code = taos_errno(res);
if (code != 0) {
errorPrint("Failed to run <%s>, reason: %s\n",
command, taos_errstr(res));
taos_free_result(res);
//taos_close(taos);
return code;
}
taos_free_result(res);
return 0;
}
static void parse_args(
int argc, char *argv[], SArguments *arguments) {
for (int i = 1; i < argc; i++) {
if ((strncmp(argv[i], "-p", 2) == 0)
|| (strncmp(argv[i], "--password", 10) == 0)) {
if ((strlen(argv[i]) == 2)
|| (strncmp(argv[i], "--password", 10) == 0)) {
printf("Enter password: ");
taosSetConsoleEcho(false);
if(scanf("%20s", arguments->password) > 1) {
errorPrint("%s() LN%d, password read error!\n", __func__, __LINE__);
}
taosSetConsoleEcho(true);
} else {
tstrncpy(arguments->password, (char *)(argv[i] + 2),
SHELL_MAX_PASSWORD_LEN);
strcpy(argv[i], "-p");
}
} else if (strcmp(argv[i], "-gg") == 0) {
arguments->verbose_print = true;
strcpy(argv[i], "");
} else if (strcmp(argv[i], "-PP") == 0) {
arguments->performance_print = true;
strcpy(argv[i], "");
} else if ((strcmp(argv[i], "-A") == 0)
|| (0 == strncmp(
argv[i], "--all-database",
strlen("--all-database")))) {
g_args.all_databases = true;
} else if ((strncmp(argv[i], "-D", strlen("-D")) == 0)
|| (0 == strncmp(
argv[i], "--database",
strlen("--database")))) {
if (2 == strlen(argv[i])) {
if (argc == i+1) {
errorPrintReqArg(argv[0], "D");
exit(EXIT_FAILURE);
}
arguments->databasesSeq = argv[++i];
} else if (0 == strncmp(argv[i], "--databases=", strlen("--databases="))) {
arguments->databasesSeq = (char *)(argv[i] + strlen("--databases="));
} else if (0 == strncmp(argv[i], "-D", strlen("-D"))) {
arguments->databasesSeq = (char *)(argv[i] + strlen("-D"));
} else if (strlen("--databases") == strlen(argv[i])) {
if (argc == i+1) {
errorPrintReqArg3(argv[0], "--databases");
exit(EXIT_FAILURE);
}
arguments->databasesSeq = argv[++i];
} else {
errorUnrecognized(argv[0], argv[i]);
exit(EXIT_FAILURE);
}
g_args.databases = true;
} else if (0 == strncmp(argv[i], "--version", strlen("--version")) ||
0 == strncmp(argv[i], "-V", strlen("-V"))) {
printVersion();
exit(EXIT_SUCCESS);
} else {
continue;
}
}
}
static void copyHumanTimeToArg(char *timeStr, bool isStartTime)
{
if (isStartTime)
tstrncpy(g_args.humanStartTime, timeStr, HUMAN_TIME_LEN);
else
tstrncpy(g_args.humanEndTime, timeStr, HUMAN_TIME_LEN);
}
static void copyTimestampToArg(char *timeStr, bool isStartTime)
{
if (isStartTime)
g_args.start_time = atol(timeStr);
else
g_args.end_time = atol(timeStr);
}
static void parse_timestamp(
int argc, char *argv[], SArguments *arguments) {
for (int i = 1; i < argc; i++) {
char *tmp;
bool isStartTime = false;
bool isEndTime = false;
if (strcmp(argv[i], "-S") == 0) {
isStartTime = true;
} else if (strcmp(argv[i], "-E") == 0) {
isEndTime = true;
}
if (isStartTime || isEndTime) {
if (NULL == argv[i+1]) {
errorPrint("%s need a valid value following!\n", argv[i]);
exit(-1);
}
tmp = strdup(argv[i+1]);
if (strchr(tmp, ':') && strchr(tmp, '-')) {
copyHumanTimeToArg(tmp, isStartTime);
} else {
copyTimestampToArg(tmp, isStartTime);
}
free(tmp);
}
}
}
static int getPrecisionByString(char *precision)
{
if (0 == strncasecmp(precision,
"ms", 2)) {
return TSDB_TIME_PRECISION_MILLI;
} else if (0 == strncasecmp(precision,
"us", 2)) {
return TSDB_TIME_PRECISION_MICRO;
#if TSDB_SUPPORT_NANOSECOND == 1
} else if (0 == strncasecmp(precision,
"ns", 2)) {
return TSDB_TIME_PRECISION_NANO;
#endif
} else {
errorPrint("Invalid time precision: %s",
precision);
}
return -1;
}
static void freeDbInfos() {
if (g_dbInfos == NULL) return;
for (int i = 0; i < g_args.dumpDbCount; i++)
tfree(g_dbInfos[i]);
tfree(g_dbInfos);
}
// check table is normal table or super table
static int getTableRecordInfo(
char *dbName,
char *table, TableRecordInfo *pTableRecordInfo) {
TAOS *taos = taos_connect(g_args.host, g_args.user, g_args.password,
dbName, g_args.port);
if (taos == NULL) {
errorPrint("Failed to connect to server %s\n", g_args.host);
return -1;
}
TAOS_ROW row = NULL;
bool isSet = false;
TAOS_RES *result = NULL;
memset(pTableRecordInfo, 0, sizeof(TableRecordInfo));
char command[COMMAND_SIZE];
sprintf(command, "USE %s", dbName);
result = taos_query(taos, command);
int32_t code = taos_errno(result);
if (code != 0) {
errorPrint("invalid database %s, reason: %s\n",
dbName, taos_errstr(result));
return 0;
}
sprintf(command, "SHOW TABLES LIKE \'%s\'", table);
result = taos_query(taos, command);
code = taos_errno(result);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>. reason: %s\n",
__func__, __LINE__, command, taos_errstr(result));
taos_free_result(result);
return -1;
}
TAOS_FIELD *fields = taos_fetch_fields(result);
while ((row = taos_fetch_row(result)) != NULL) {
isSet = true;
pTableRecordInfo->isStb = false;
tstrncpy(pTableRecordInfo->tableRecord.name,
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
min(TSDB_TABLE_NAME_LEN,
fields[TSDB_SHOW_TABLES_NAME_INDEX].bytes + 1));
if (strlen((char *)row[TSDB_SHOW_TABLES_METRIC_INDEX]) > 0) {
pTableRecordInfo->belongStb = true;
tstrncpy(pTableRecordInfo->tableRecord.stable,
(char *)row[TSDB_SHOW_TABLES_METRIC_INDEX],
min(TSDB_TABLE_NAME_LEN,
fields[TSDB_SHOW_TABLES_METRIC_INDEX].bytes + 1));
} else {
pTableRecordInfo->belongStb = false;
}
break;
}
taos_free_result(result);
result = NULL;
if (isSet) {
return 0;
}
sprintf(command, "SHOW STABLES LIKE \'%s\'", table);
result = taos_query(taos, command);
code = taos_errno(result);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>. reason: %s\n",
__func__, __LINE__, command, taos_errstr(result));
taos_free_result(result);
return -1;
}
while ((row = taos_fetch_row(result)) != NULL) {
isSet = true;
pTableRecordInfo->isStb = true;
tstrncpy(pTableRecordInfo->tableRecord.stable, table,
TSDB_TABLE_NAME_LEN);
break;
}
taos_free_result(result);
result = NULL;
if (isSet) {
return 0;
}
errorPrint("%s() LN%d, invalid table/stable %s\n",
__func__, __LINE__, table);
return -1;
}
static int inDatabasesSeq(
char *name,
int len)
{
if (strstr(g_args.databasesSeq, ",") == NULL) {
if (0 == strncmp(g_args.databasesSeq, name, len)) {
return 0;
}
} else {
char *dupSeq = strdup(g_args.databasesSeq);
char *running = dupSeq;
char *dbname = strsep(&running, ",");
while (dbname) {
if (0 == strncmp(dbname, name, len)) {
tfree(dupSeq);
return 0;
}
dbname = strsep(&running, ",");
}
}
return -1;
}
static int getDumpDbCount()
{
int count = 0;
TAOS *taos = NULL;
TAOS_RES *result = NULL;
char *command = "show databases";
TAOS_ROW row;
/* Connect to server */
taos = taos_connect(g_args.host, g_args.user, g_args.password,
NULL, g_args.port);
if (NULL == taos) {
errorPrint("Failed to connect to server %s\n", g_args.host);
return 0;
}
result = taos_query(taos, command);
int32_t code = taos_errno(result);
if (0 != code) {
errorPrint("%s() LN%d, failed to run command <%s>, reason: %s\n",
__func__, __LINE__, command, taos_errstr(result));
taos_close(taos);
return 0;
}
TAOS_FIELD *fields = taos_fetch_fields(result);
while ((row = taos_fetch_row(result)) != NULL) {
// sys database name : 'log', but subsequent version changed to 'log'
if ((strncasecmp(row[TSDB_SHOW_DB_NAME_INDEX], "log",
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) == 0)
&& (!g_args.allow_sys)) {
continue;
}
if (g_args.databases) { // input multi dbs
if (inDatabasesSeq(
(char *)row[TSDB_SHOW_DB_NAME_INDEX],
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) != 0)
continue;
} else if (!g_args.all_databases) { // only input one db
if (strncasecmp(g_args.arg_list[0],
(char *)row[TSDB_SHOW_DB_NAME_INDEX],
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) != 0)
continue;
}
count++;
}
if (count == 0) {
errorPrint("%d databases valid to dump\n", count);
}
taos_close(taos);
return count;
}
static void dumpCreateMTableClause(
char* dbName,
char *stable,
TableDef *tableDes,
int numOfCols,
FILE *fp
) {
int counter = 0;
int count_temp = 0;
char* tmpBuf = (char *)malloc(COMMAND_SIZE);
if (tmpBuf == NULL) {
errorPrint("%s() LN%d, failed to allocate %d memory\n",
__func__, __LINE__, COMMAND_SIZE);
return;
}
char *pstr = NULL;
pstr = tmpBuf;
pstr += sprintf(tmpBuf,
"CREATE TABLE IF NOT EXISTS %s.%s USING %s.%s TAGS (",
dbName, tableDes->name, dbName, stable);
for (; counter < numOfCols; counter++) {
if (tableDes->cols[counter].note[0] != '\0') break;
}
assert(counter < numOfCols);
count_temp = counter;
for (; counter < numOfCols; counter++) {
if (counter != count_temp) {
if (0 == strcasecmp(tableDes->cols[counter].type, "binary")
|| 0 == strcasecmp(tableDes->cols[counter].type, "nchar")) {
//pstr += sprintf(pstr, ", \'%s\'", tableDes->cols[counter].note);
if (tableDes->cols[counter].var_value) {
pstr += sprintf(pstr, ", \'%s\'",
tableDes->cols[counter].var_value);
} else {
pstr += sprintf(pstr, ", \'%s\'", tableDes->cols[counter].value);
}
} else {
pstr += sprintf(pstr, ", \'%s\'", tableDes->cols[counter].value);
}
} else {
if (0 == strcasecmp(tableDes->cols[counter].type, "binary")
|| 0 == strcasecmp(tableDes->cols[counter].type, "nchar")) {
//pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].note);
if (tableDes->cols[counter].var_value) {
pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].var_value);
} else {
pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].value);
}
} else {
pstr += sprintf(pstr, "\'%s\'", tableDes->cols[counter].value);
}
/* pstr += sprintf(pstr, "%s", tableDes->cols[counter].note); */
}
/* if (strcasecmp(tableDes->cols[counter].type, "binary") == 0 || strcasecmp(tableDes->cols[counter].type, "nchar")
* == 0) { */
/* pstr += sprintf(pstr, "(%d)", tableDes->cols[counter].length); */
/* } */
}
pstr += sprintf(pstr, ");");
fprintf(fp, "%s\n", tmpBuf);
free(tmpBuf);
}
static int64_t getNtbCountOfStb(char *dbName, char *stbName)
{
TAOS *taos = taos_connect(g_args.host, g_args.user, g_args.password,
dbName, g_args.port);
if (taos == NULL) {
errorPrint("Failed to connect to server %s\n", g_args.host);
return -1;
}
int64_t count = 0;
char command[COMMAND_SIZE];
sprintf(command, "SELECT COUNT(TBNAME) FROM %s.%s", dbName, stbName);
TAOS_RES *res = taos_query(taos, command);
int32_t code = taos_errno(res);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>. reason: %s\n",
__func__, __LINE__, command, taos_errstr(res));
taos_free_result(res);
taos_close(taos);
return -1;
}
TAOS_ROW row = NULL;
if ((row = taos_fetch_row(res)) != NULL) {
count = *(int64_t*)row[TSDB_SHOW_TABLES_NAME_INDEX];
}
taos_close(taos);
return count;
}
static int getTableDes(
TAOS *taos,
char* dbName, char *table,
TableDef *tableDes, bool isSuperTable) {
TAOS_ROW row = NULL;
TAOS_RES* res = NULL;
int colCount = 0;
char sqlstr[COMMAND_SIZE];
sprintf(sqlstr, "describe %s.%s;", dbName, table);
res = taos_query(taos, sqlstr);
int32_t code = taos_errno(res);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>, reason: %s\n",
__func__, __LINE__, sqlstr, taos_errstr(res));
taos_free_result(res);
return -1;
}
TAOS_FIELD *fields = taos_fetch_fields(res);
tstrncpy(tableDes->name, table, TSDB_TABLE_NAME_LEN);
while ((row = taos_fetch_row(res)) != NULL) {
tstrncpy(tableDes->cols[colCount].field,
(char *)row[TSDB_DESCRIBE_METRIC_FIELD_INDEX],
min(TSDB_COL_NAME_LEN,
fields[TSDB_DESCRIBE_METRIC_FIELD_INDEX].bytes + 1));
tstrncpy(tableDes->cols[colCount].type,
(char *)row[TSDB_DESCRIBE_METRIC_TYPE_INDEX],
min(16, fields[TSDB_DESCRIBE_METRIC_TYPE_INDEX].bytes + 1));
tableDes->cols[colCount].length =
*((int *)row[TSDB_DESCRIBE_METRIC_LENGTH_INDEX]);
tstrncpy(tableDes->cols[colCount].note,
(char *)row[TSDB_DESCRIBE_METRIC_NOTE_INDEX],
min(COL_NOTE_LEN,
fields[TSDB_DESCRIBE_METRIC_NOTE_INDEX].bytes + 1));
colCount++;
}
taos_free_result(res);
res = NULL;
if (isSuperTable) {
return colCount;
}
// if child-table have tag, using select tagName from table to get tagValue
for (int i = 0 ; i < colCount; i++) {
if (strcmp(tableDes->cols[i].note, "TAG") != 0) continue;
sprintf(sqlstr, "select %s from %s.%s",
tableDes->cols[i].field, dbName, table);
res = taos_query(taos, sqlstr);
code = taos_errno(res);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>, reason: %s\n",
__func__, __LINE__, sqlstr, taos_errstr(res));
taos_free_result(res);
taos_close(taos);
return -1;
}
fields = taos_fetch_fields(res);
row = taos_fetch_row(res);
if (NULL == row) {
errorPrint("%s() LN%d, fetch failed to run command <%s>, reason:%s\n",
__func__, __LINE__, sqlstr, taos_errstr(res));
taos_free_result(res);
taos_close(taos);
return -1;
}
if (row[TSDB_SHOW_TABLES_NAME_INDEX] == NULL) {
sprintf(tableDes->cols[i].note, "%s", "NUL");
sprintf(tableDes->cols[i].value, "%s", "NULL");
taos_free_result(res);
res = NULL;
continue;
}
int32_t* length = taos_fetch_lengths(res);
switch (fields[0].type) {
case TSDB_DATA_TYPE_BOOL:
sprintf(tableDes->cols[i].value, "%d",
((((int32_t)(*((char *)
row[TSDB_SHOW_TABLES_NAME_INDEX])))==1)
?1:0));
break;
case TSDB_DATA_TYPE_TINYINT:
sprintf(tableDes->cols[i].value, "%d",
*((int8_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_SMALLINT:
sprintf(tableDes->cols[i].value, "%d",
*((int16_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_INT:
sprintf(tableDes->cols[i].value, "%d",
*((int32_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_BIGINT:
sprintf(tableDes->cols[i].value, "%" PRId64 "",
*((int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_FLOAT:
sprintf(tableDes->cols[i].value, "%f",
GET_FLOAT_VAL(row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_DOUBLE:
sprintf(tableDes->cols[i].value, "%f",
GET_DOUBLE_VAL(row[TSDB_SHOW_TABLES_NAME_INDEX]));
break;
case TSDB_DATA_TYPE_BINARY:
memset(tableDes->cols[i].value, 0,
sizeof(tableDes->cols[i].value));
int len = strlen((char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
// FIXME for long value
if (len < (COL_VALUEBUF_LEN - 2)) {
converStringToReadable(
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
length[0],
tableDes->cols[i].value,
len);
} else {
tableDes->cols[i].var_value = calloc(1, len * 2);
if (tableDes->cols[i].var_value == NULL) {
errorPrint("%s() LN%d, memory alalocation failed!\n",
__func__, __LINE__);
taos_free_result(res);
return -1;
}
converStringToReadable((char *)row[0],
length[0],
(char *)(tableDes->cols[i].var_value), len);
}
break;
case TSDB_DATA_TYPE_NCHAR:
memset(tableDes->cols[i].value, 0,
sizeof(tableDes->cols[i].note));
int nlen = strlen((char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
if (nlen < (COL_VALUEBUF_LEN-2)) {
char tbuf[COL_VALUEBUF_LEN-2]; // need reserve 2 bytes for ' '
convertNCharToReadable(
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
length[0], tbuf, COL_VALUEBUF_LEN-2);
sprintf(tableDes->cols[i].value, "%s", tbuf);
} else {
tableDes->cols[i].var_value = calloc(1, nlen * 4);
if (tableDes->cols[i].var_value == NULL) {
errorPrint("%s() LN%d, memory alalocation failed!\n",
__func__, __LINE__);
taos_free_result(res);
return -1;
}
converStringToReadable(
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
length[0],
(char *)(tableDes->cols[i].var_value), nlen);
}
break;
case TSDB_DATA_TYPE_TIMESTAMP:
sprintf(tableDes->cols[i].value, "%" PRId64 "",
*(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
#if 0
if (!g_args.mysqlFlag) {
sprintf(tableDes->cols[i].value, "%" PRId64 "",
*(int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
} else {
char buf[64] = "\0";
int64_t ts = *((int64_t *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
time_t tt = (time_t)(ts / 1000);
struct tm *ptm = localtime(&tt);
strftime(buf, 64, "%y-%m-%d %H:%M:%S", ptm);
sprintf(tableDes->cols[i].value, "\'%s.%03d\'", buf,
(int)(ts % 1000));
}
#endif
break;
default:
break;
}
taos_free_result(res);
}
return colCount;
}
static int dumpCreateTableClause(TableDef *tableDes, int numOfCols,
FILE *fp, char* dbName) {
int counter = 0;
int count_temp = 0;
char sqlstr[COMMAND_SIZE];
char* pstr = sqlstr;
pstr += sprintf(sqlstr, "CREATE TABLE IF NOT EXISTS %s.%s",
dbName, tableDes->name);
for (; counter < numOfCols; counter++) {
if (tableDes->cols[counter].note[0] != '\0') break;
if (counter == 0) {
pstr += sprintf(pstr, " (%s %s",
tableDes->cols[counter].field, tableDes->cols[counter].type);
} else {
pstr += sprintf(pstr, ", %s %s",
tableDes->cols[counter].field, tableDes->cols[counter].type);
}
if (0 == strcasecmp(tableDes->cols[counter].type, "binary")
|| 0 == strcasecmp(tableDes->cols[counter].type, "nchar")) {
pstr += sprintf(pstr, "(%d)", tableDes->cols[counter].length);
}
}
count_temp = counter;
for (; counter < numOfCols; counter++) {
if (counter == count_temp) {
pstr += sprintf(pstr, ") TAGS (%s %s",
tableDes->cols[counter].field, tableDes->cols[counter].type);
} else {
pstr += sprintf(pstr, ", %s %s",
tableDes->cols[counter].field, tableDes->cols[counter].type);
}
if (0 == strcasecmp(tableDes->cols[counter].type, "binary")
|| 0 == strcasecmp(tableDes->cols[counter].type, "nchar")) {
pstr += sprintf(pstr, "(%d)", tableDes->cols[counter].length);
}
}
pstr += sprintf(pstr, ");");
debugPrint("%s() LN%d, write string: %s\n", __func__, __LINE__, sqlstr);
return fprintf(fp, "%s\n\n", sqlstr);
}
static int dumpStableClasuse(TAOS *taos, SDbInfo *dbInfo, char *stbName, FILE *fp)
{
uint64_t sizeOfTableDes =
(uint64_t)(sizeof(TableDef) + sizeof(ColDes) * TSDB_MAX_COLUMNS);
TableDef *tableDes = (TableDef *)calloc(1, sizeOfTableDes);
if (NULL == tableDes) {
errorPrint("%s() LN%d, failed to allocate %"PRIu64" memory\n",
__func__, __LINE__, sizeOfTableDes);
exit(-1);
}
int colCount = getTableDes(taos, dbInfo->name,
stbName, tableDes, true);
if (colCount < 0) {
free(tableDes);
errorPrint("%s() LN%d, failed to get stable[%s] schema\n",
__func__, __LINE__, stbName);
exit(-1);
}
dumpCreateTableClause(tableDes, colCount, fp, dbInfo->name);
free(tableDes);
return 0;
}
static int64_t dumpCreateSTableClauseOfDb(
SDbInfo *dbInfo, FILE *fp)
{
TAOS *taos = taos_connect(g_args.host,
g_args.user, g_args.password, dbInfo->name, g_args.port);
if (NULL == taos) {
errorPrint(
"Failed to connect to server %s by specified database %s\n",
g_args.host, dbInfo->name);
return 0;
}
TAOS_ROW row;
char command[COMMAND_SIZE] = {0};
sprintf(command, "SHOW %s.STABLES", dbInfo->name);
TAOS_RES* res = taos_query(taos, command);
int32_t code = taos_errno(res);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>, reason: %s\n",
__func__, __LINE__, command, taos_errstr(res));
taos_free_result(res);
taos_close(taos);
exit(-1);
}
int64_t superTblCnt = 0;
while ((row = taos_fetch_row(res)) != NULL) {
if (0 == dumpStableClasuse(taos, dbInfo,
row[TSDB_SHOW_TABLES_NAME_INDEX], fp)) {
superTblCnt ++;
}
}
taos_free_result(res);
fprintf(g_fpOfResult,
"# super table counter: %"PRId64"\n",
superTblCnt);
g_resultStatistics.totalSuperTblsOfDumpOut += superTblCnt;
taos_close(taos);
return superTblCnt;
}
static void dumpCreateDbClause(
SDbInfo *dbInfo, bool isDumpProperty, FILE *fp) {
char sqlstr[TSDB_MAX_SQL_LEN] = {0};
char *pstr = sqlstr;
pstr += sprintf(pstr, "CREATE DATABASE IF NOT EXISTS %s ", dbInfo->name);
if (isDumpProperty) {
pstr += sprintf(pstr,
"REPLICA %d QUORUM %d DAYS %d KEEP %s CACHE %d BLOCKS %d MINROWS %d MAXROWS %d FSYNC %d CACHELAST %d COMP %d PRECISION '%s' UPDATE %d",
dbInfo->replica, dbInfo->quorum, dbInfo->days,
dbInfo->keeplist,
dbInfo->cache,
dbInfo->blocks, dbInfo->minrows, dbInfo->maxrows,
dbInfo->fsync,
dbInfo->cachelast,
dbInfo->comp, dbInfo->precision, dbInfo->update);
}
pstr += sprintf(pstr, ";");
fprintf(fp, "%s\n\n", sqlstr);
}
static FILE* openDumpInFile(char *fptr) {
wordexp_t full_path;
if (wordexp(fptr, &full_path, 0) != 0) {
errorPrint("illegal file name: %s\n", fptr);
return NULL;
}
char *fname = full_path.we_wordv[0];
FILE *f = NULL;
if ((fname) && (strlen(fname) > 0)) {
f = fopen(fname, "r");
if (f == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, fname);
}
}
wordfree(&full_path);
return f;
}
static uint64_t getFilesNum(char *ext)
{
uint64_t count = 0;
int namelen, extlen;
struct dirent *pDirent;
DIR *pDir;
extlen = strlen(ext);
bool isSql = (0 == strcmp(ext, "sql"));
pDir = opendir(g_args.inpath);
if (pDir != NULL) {
while ((pDirent = readdir(pDir)) != NULL) {
namelen = strlen (pDirent->d_name);
if (namelen > extlen) {
if (strcmp (ext, &(pDirent->d_name[namelen - extlen])) == 0) {
if (isSql) {
if (0 == strcmp(pDirent->d_name, "dbs.sql")) {
continue;
}
}
verbosePrint("%s found\n", pDirent->d_name);
count ++;
}
}
}
closedir (pDir);
}
debugPrint("%"PRId64" .%s files found!\n", count, ext);
return count;
}
static void freeFileList(char **fileList, int64_t count)
{
for (int64_t i = 0; i < count; i++) {
tfree(fileList[i]);
}
tfree(fileList);
}
static void createDumpinList(char *ext, int64_t count)
{
bool isSql = (0 == strcmp(ext, "sql"));
if (isSql) {
g_tsDumpInSqlFiles = (char **)calloc(count, sizeof(char *));
assert(g_tsDumpInSqlFiles);
for (int64_t i = 0; i < count; i++) {
g_tsDumpInSqlFiles[i] = calloc(1, MAX_FILE_NAME_LEN);
assert(g_tsDumpInSqlFiles[i]);
}
}
#ifdef AVRO_SUPPORT
else {
g_tsDumpInAvroFiles = (char **)calloc(count, sizeof(char *));
assert(g_tsDumpInAvroFiles);
for (int64_t i = 0; i < count; i++) {
g_tsDumpInAvroFiles[i] = calloc(1, MAX_FILE_NAME_LEN);
assert(g_tsDumpInAvroFiles[i]);
}
}
#endif
int namelen, extlen;
struct dirent *pDirent;
DIR *pDir;
extlen = strlen(ext);
count = 0;
pDir = opendir(g_args.inpath);
if (pDir != NULL) {
while ((pDirent = readdir(pDir)) != NULL) {
namelen = strlen (pDirent->d_name);
if (namelen > extlen) {
if (strcmp (ext, &(pDirent->d_name[namelen - extlen])) == 0) {
verbosePrint("%s found\n", pDirent->d_name);
if (isSql) {
if (0 == strcmp(pDirent->d_name, "dbs.sql")) {
continue;
}
strncpy(g_tsDumpInSqlFiles[count++], pDirent->d_name, MAX_FILE_NAME_LEN);
}
#ifdef AVRO_SUPPORT
else {
strncpy(g_tsDumpInAvroFiles[count++], pDirent->d_name, MAX_FILE_NAME_LEN);
}
#endif
}
}
}
closedir (pDir);
}
debugPrint("%"PRId64" .%s files filled to list!\n", count, ext);
}
#ifdef AVRO_SUPPORT
static int convertTbDesToJson(
char *dbName, char *tbName, TableDef *tableDes, int colCount,
char **jsonSchema)
{
// {
// "type": "record",
// "name": "dbname.tbname",
// "fields": [
// {
// "name": "col0 name",
// "type": "long"
// },
// {
// "name": "col1 name",
// "type": "int"
// },
// {
// "name": "col2 name",
// "type": "float"
// },
// {
// "name": "col3 name",
// "type": "boolean"
// },
// ...
// {
// "name": "coln name",
// "type": "string"
// }
// ]
// }
*jsonSchema = (char *)calloc(1,
17 + TSDB_DB_NAME_LEN /* dbname section */
+ 17 /* type: record */
+ 11 + TSDB_TABLE_NAME_LEN /* tbname section */
+ 10 /* fields section */
+ (TSDB_COL_NAME_LEN + 11 + 16) * colCount + 4); /* fields section */
if (*jsonSchema == NULL) {
errorPrint("%s() LN%d, memory allocation failed!\n", __func__, __LINE__);
return -1;
}
char *pstr = *jsonSchema;
pstr += sprintf(pstr,
"{\"type\": \"record\", \"name\": \"%s.%s\", \"fields\": [",
dbName, tbName);
for (int i = 0; i < colCount; i ++) {
if (0 == i) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "long");
} else {
if (strcasecmp(tableDes->cols[i].type, "binary") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "string");
} else if (strcasecmp(tableDes->cols[i].type, "nchar") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "bytes");
} else if (strcasecmp(tableDes->cols[i].type, "bool") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "boolean");
} else if (strcasecmp(tableDes->cols[i].type, "tinyint") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "int");
} else if (strcasecmp(tableDes->cols[i].type, "smallint") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "int");
} else if (strcasecmp(tableDes->cols[i].type, "bigint") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "long");
} else if (strcasecmp(tableDes->cols[i].type, "timestamp") == 0) {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field, "long");
} else {
pstr += sprintf(pstr,
"{\"name\": \"%s\", \"type\": \"%s\"",
tableDes->cols[i].field,
strtolower(tableDes->cols[i].type, tableDes->cols[i].type));
}
}
if ((i != (colCount -1))
&& (strcmp(tableDes->cols[i + 1].note, "TAG") != 0)) {
pstr += sprintf(pstr, "},");
} else {
pstr += sprintf(pstr, "}");
break;
}
}
pstr += sprintf(pstr, "]}");
debugPrint("%s() LN%d, jsonSchema:\n %s\n", __func__, __LINE__, *jsonSchema);
return 0;
}
static void print_json_indent(int indent) {
int i;
for (i = 0; i < indent; i++) {
putchar(' ');
}
}
const char *json_plural(size_t count) { return count == 1 ? "" : "s"; }
static void print_json_object(json_t *element, int indent) {
size_t size;
const char *key;
json_t *value;
print_json_indent(indent);
size = json_object_size(element);
printf("JSON Object of %lld pair%s:\n", (long long)size, json_plural(size));
json_object_foreach(element, key, value) {
print_json_indent(indent + 2);
printf("JSON Key: \"%s\"\n", key);
print_json_aux(value, indent + 2);
}
}
static void print_json_array(json_t *element, int indent) {
size_t i;
size_t size = json_array_size(element);
print_json_indent(indent);
printf("JSON Array of %lld element%s:\n", (long long)size, json_plural(size));
for (i = 0; i < size; i++) {
print_json_aux(json_array_get(element, i), indent + 2);
}
}
static void print_json_string(json_t *element, int indent) {
print_json_indent(indent);
printf("JSON String: \"%s\"\n", json_string_value(element));
}
static void print_json_integer(json_t *element, int indent) {
print_json_indent(indent);
printf("JSON Integer: \"%" JSON_INTEGER_FORMAT "\"\n", json_integer_value(element));
}
static void print_json_real(json_t *element, int indent) {
print_json_indent(indent);
printf("JSON Real: %f\n", json_real_value(element));
}
static void print_json_true(json_t *element, int indent) {
(void)element;
print_json_indent(indent);
printf("JSON True\n");
}
static void print_json_false(json_t *element, int indent) {
(void)element;
print_json_indent(indent);
printf("JSON False\n");
}
static void print_json_null(json_t *element, int indent) {
(void)element;
print_json_indent(indent);
printf("JSON Null\n");
}
static void print_json_aux(json_t *element, int indent)
{
switch(json_typeof(element)) {
case JSON_OBJECT:
print_json_object(element, indent);
break;
case JSON_ARRAY:
print_json_array(element, indent);
break;
case JSON_STRING:
print_json_string(element, indent);
break;
case JSON_INTEGER:
print_json_integer(element, indent);
break;
case JSON_REAL:
print_json_real(element, indent);
break;
case JSON_TRUE:
print_json_true(element, indent);
break;
case JSON_FALSE:
print_json_false(element, indent);
break;
case JSON_NULL:
print_json_null(element, indent);
break;
default:
fprintf(stderr, "unrecongnized JSON type %d\n", json_typeof(element));
}
}
static void print_json(json_t *root) { print_json_aux(root, 0); }
static json_t *load_json(char *jsonbuf)
{
json_t *root;
json_error_t error;
root = json_loads(jsonbuf, 0, &error);
if (root) {
return root;
} else {
fprintf(stderr, "json error on line %d: %s\n", error.line, error.text);
return NULL;
}
}
static RecordSchema *parse_json_to_recordschema(json_t *element)
{
RecordSchema *recordSchema = malloc(sizeof(RecordSchema));
assert(recordSchema);
if (JSON_OBJECT != json_typeof(element)) {
fprintf(stderr, "%s() LN%d, json passed is not an object\n",
__func__, __LINE__);
return NULL;
}
const char *key;
json_t *value;
json_object_foreach(element, key, value) {
if (0 == strcmp(key, "name")) {
tstrncpy(recordSchema->name, json_string_value(value), RECORD_NAME_LEN-1);
} else if (0 == strcmp(key, "fields")) {
if (JSON_ARRAY == json_typeof(value)) {
size_t i;
size_t size = json_array_size(value);
verbosePrint("%s() LN%d, JSON Array of %lld element%s:\n",
__func__, __LINE__,
(long long)size, json_plural(size));
recordSchema->num_fields = size;
recordSchema->fields = malloc(sizeof(FieldStruct) * size);
assert(recordSchema->fields);
for (i = 0; i < size; i++) {
FieldStruct *field = (FieldStruct *)(recordSchema->fields + sizeof(FieldStruct) * i);
json_t *arr_element = json_array_get(value, i);
const char *ele_key;
json_t *ele_value;
json_object_foreach(arr_element, ele_key, ele_value) {
if (0 == strcmp(ele_key, "name")) {
tstrncpy(field->name, json_string_value(ele_value), FIELD_NAME_LEN-1);
} else if (0 == strcmp(ele_key, "type")) {
if (JSON_STRING == json_typeof(ele_value)) {
tstrncpy(field->type, json_string_value(ele_value), TYPE_NAME_LEN-1);
} else if (JSON_OBJECT == json_typeof(ele_value)) {
const char *obj_key;
json_t *obj_value;
json_object_foreach(ele_value, obj_key, obj_value) {
if (0 == strcmp(obj_key, "type")) {
if (JSON_STRING == json_typeof(obj_value)) {
tstrncpy(field->type,
json_string_value(obj_value), TYPE_NAME_LEN-1);
}
}
}
}
}
}
}
} else {
fprintf(stderr, "%s() LN%d, fields have no array\n",
__func__, __LINE__);
return NULL;
}
break;
}
}
return recordSchema;
}
static void freeRecordSchema(RecordSchema *recordSchema)
{
if (recordSchema) {
if (recordSchema->fields) {
free(recordSchema->fields);
}
free(recordSchema);
}
}
static int64_t writeResultToAvro(
char *avroFilename,
char *jsonSchema,
TAOS_RES *res)
{
avro_schema_t schema;
if (avro_schema_from_json_length(jsonSchema, strlen(jsonSchema), &schema)) {
errorPrint("%s() LN%d, Unable to parse:\n%s \nto schema\nerror message: %s\n",
__func__, __LINE__, jsonSchema, avro_strerror());
exit(EXIT_FAILURE);
}
json_t *json_root = load_json(jsonSchema);
debugPrint("\n%s() LN%d\n *** Schema parsed:\n", __func__, __LINE__);
RecordSchema *recordSchema;
if (json_root) {
if (g_args.debug_print || g_args.verbose_print) {
print_json(json_root);
}
recordSchema = parse_json_to_recordschema(json_root);
if (NULL == recordSchema) {
fprintf(stderr, "Failed to parse json to recordschema\n");
exit(EXIT_FAILURE);
}
json_decref(json_root);
} else {
errorPrint("json:\n%s\n can't be parsed by jansson\n", jsonSchema);
exit(EXIT_FAILURE);
}
avro_file_writer_t db;
int rval = avro_file_writer_create_with_codec
(avroFilename, schema, &db, g_avro_codec[g_args.avro_codec], 0);
if (rval) {
errorPrint("There was an error creating %s. reason: %s\n",
avroFilename, avro_strerror());
exit(EXIT_FAILURE);
}
TAOS_ROW row = NULL;
int numFields = taos_field_count(res);
assert(numFields > 0);
TAOS_FIELD *fields = taos_fetch_fields(res);
avro_value_iface_t *wface =
avro_generic_class_from_schema(schema);
avro_value_t record;
avro_generic_value_new(wface, &record);
int64_t count = 0;
while ((row = taos_fetch_row(res)) != NULL) {
avro_value_t value;
for (int col = 0; col < numFields; col++) {
if (0 != avro_value_get_by_name(
&record, fields[col].name, &value, NULL)) {
errorPrint("%s() LN%d, avro_value_get_by_name(..%s..) failed",
__func__, __LINE__, fields[col].name);
continue;
}
int len;
switch (fields[col].type) {
case TSDB_DATA_TYPE_BOOL:
if (NULL == row[col]) {
avro_value_set_int(&value, TSDB_DATA_BOOL_NULL);
} else {
avro_value_set_boolean(&value,
((((int32_t)(*((char *)row[col])))==1)?1:0));
}
break;
case TSDB_DATA_TYPE_TINYINT:
if (NULL == row[col]) {
avro_value_set_int(&value, TSDB_DATA_TINYINT_NULL);
} else {
avro_value_set_int(&value, *((int8_t *)row[col]));
}
break;
case TSDB_DATA_TYPE_SMALLINT:
if (NULL == row[col]) {
avro_value_set_int(&value, TSDB_DATA_SMALLINT_NULL);
} else {
avro_value_set_int(&value, *((int16_t *)row[col]));
}
break;
case TSDB_DATA_TYPE_INT:
if (NULL == row[col]) {
avro_value_set_int(&value, TSDB_DATA_INT_NULL);
} else {
avro_value_set_int(&value, *((int32_t *)row[col]));
}
break;
case TSDB_DATA_TYPE_BIGINT:
if (NULL == row[col]) {
avro_value_set_long(&value, TSDB_DATA_BIGINT_NULL);
} else {
avro_value_set_long(&value, *((int64_t *)row[col]));
}
break;
case TSDB_DATA_TYPE_FLOAT:
if (NULL == row[col]) {
avro_value_set_float(&value, TSDB_DATA_FLOAT_NULL);
} else {
avro_value_set_float(&value, GET_FLOAT_VAL(row[col]));
}
break;
case TSDB_DATA_TYPE_DOUBLE:
if (NULL == row[col]) {
avro_value_set_double(&value, TSDB_DATA_DOUBLE_NULL);
} else {
avro_value_set_double(&value, GET_DOUBLE_VAL(row[col]));
}
break;
case TSDB_DATA_TYPE_BINARY:
if (NULL == row[col]) {
avro_value_set_string(&value,
(char *)NULL);
} else {
avro_value_set_string(&value, (char *)row[col]);
}
break;
case TSDB_DATA_TYPE_NCHAR:
if (NULL == row[col]) {
avro_value_set_bytes(&value,
(void*)NULL,0);
} else {
len = strlen((char*)row[col]);
avro_value_set_bytes(&value, (void*)(row[col]),len);
}
break;
case TSDB_DATA_TYPE_TIMESTAMP:
if (NULL == row[col]) {
avro_value_set_long(&value, TSDB_DATA_BIGINT_NULL);
} else {
avro_value_set_long(&value, *((int64_t *)row[col]));
}
break;
default:
break;
}
}
if (0 != avro_file_writer_append_value(db, &record)) {
errorPrint("%s() LN%d, Unable to write record to file. Message: %s\n",
__func__, __LINE__,
avro_strerror());
} else {
count ++;
}
}
avro_value_decref(&record);
avro_value_iface_decref(wface);
freeRecordSchema(recordSchema);
avro_file_writer_close(db);
avro_schema_decref(schema);
return count;
}
void freeBindArray(char *bindArray, int onlyCol)
{
TAOS_BIND *bind;
for (int j = 0; j < onlyCol; j++) {
bind = (TAOS_BIND *)((char *)bindArray + (sizeof(TAOS_BIND) * j));
if ((TSDB_DATA_TYPE_BINARY != bind->buffer_type)
&& (TSDB_DATA_TYPE_NCHAR != bind->buffer_type)) {
tfree(bind->buffer);
}
}
}
static int dumpInOneAvroFile(char* fcharset,
char* encode, char *avroFilepath)
{
debugPrint("avroFilepath: %s\n", avroFilepath);
avro_file_reader_t reader;
if(avro_file_reader(avroFilepath, &reader)) {
fprintf(stderr, "Unable to open avro file %s: %s\n",
avroFilepath, avro_strerror());
return -1;
}
int buf_len = TSDB_MAX_COLUMNS * (TSDB_COL_NAME_LEN + 11 + 16) + 4;
char *jsonbuf = calloc(1, buf_len);
assert(jsonbuf);
avro_writer_t jsonwriter = avro_writer_memory(jsonbuf, buf_len);;
avro_schema_t schema;
schema = avro_file_reader_get_writer_schema(reader);
avro_schema_to_json(schema, jsonwriter);
if (0 == strlen(jsonbuf)) {
errorPrint("Failed to parse avro file: %s schema. reason: %s\n",
avroFilepath, avro_strerror());
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
return -1;
}
debugPrint("Schema:\n %s\n", jsonbuf);
json_t *json_root = load_json(jsonbuf);
debugPrint("\n%s() LN%d\n *** Schema parsed:\n", __func__, __LINE__);
if (g_args.debug_print) {
print_json(json_root);
}
const char *namespace = avro_schema_namespace((const avro_schema_t)schema);
debugPrint("Namespace: %s\n", namespace);
TAOS *taos = taos_connect(g_args.host, g_args.user, g_args.password,
namespace, g_args.port);
if (taos == NULL) {
errorPrint("Failed to connect to server %s\n", g_args.host);
return -1;
}
TAOS_STMT *stmt = taos_stmt_init(taos);
if (NULL == stmt) {
taos_close(taos);
errorPrint("%s() LN%d, stmt init failed! reason: %s\n",
__func__, __LINE__, taos_errstr(NULL));
return -1;
}
RecordSchema *recordSchema = parse_json_to_recordschema(json_root);
if (NULL == recordSchema) {
errorPrint("Failed to parse json to recordschema. reason: %s\n",
avro_strerror());
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
return -1;
}
json_decref(json_root);
TableDef *tableDes = (TableDef *)calloc(1, sizeof(TableDef)
+ sizeof(ColDes) * TSDB_MAX_COLUMNS);
int allColCount = getTableDes(taos, (char *)namespace, recordSchema->name, tableDes, false);
if (allColCount < 0) {
errorPrint("%s() LN%d, failed to get table[%s] schema\n",
__func__,
__LINE__,
recordSchema->name);
free(tableDes);
freeRecordSchema(recordSchema);
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
return -1;
}
char *stmtBuffer = calloc(1, TSDB_MAX_ALLOWED_SQL_LEN);
assert(stmtBuffer);
char *pstr = stmtBuffer;
pstr += sprintf(pstr, "INSERT INTO ? VALUES(?");
int onlyCol = 1; // at least timestamp
for (int col = 1; col < allColCount; col++) {
if (strcmp(tableDes->cols[col].note, "TAG") == 0) continue;
pstr += sprintf(pstr, ",?");
onlyCol ++;
}
pstr += sprintf(pstr, ")");
if (0 != taos_stmt_prepare(stmt, stmtBuffer, 0)) {
errorPrint("Failed to execute taos_stmt_prepare(). reason: %s\n",
taos_stmt_errstr(stmt));
free(stmtBuffer);
free(tableDes);
freeRecordSchema(recordSchema);
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
return -1;
}
if (0 != taos_stmt_set_tbname(stmt, recordSchema->name)) {
errorPrint("Failed to execute taos_stmt_set_tbname(%s). reason: %s\n",
recordSchema->name, taos_stmt_errstr(stmt));
free(stmtBuffer);
free(tableDes);
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
return -1;
}
avro_value_iface_t *value_class = avro_generic_class_from_schema(schema);
avro_value_t value;
avro_generic_value_new(value_class, &value);
char *bindArray =
malloc(sizeof(TAOS_BIND) * onlyCol);
assert(bindArray);
int success = 0;
int failed = 0;
while(!avro_file_reader_read_value(reader, &value)) {
memset(bindArray, 0, sizeof(TAOS_BIND) * onlyCol);
TAOS_BIND *bind;
for (int i = 0; i < recordSchema->num_fields; i++) {
bind = (TAOS_BIND *)((char *)bindArray + (sizeof(TAOS_BIND) * i));
avro_value_t field_value;
FieldStruct *field = (FieldStruct *)(recordSchema->fields + sizeof(FieldStruct) * i);
bind->is_null = NULL;
int is_null = 1;
if (0 == i) {
int64_t *ts = malloc(sizeof(int64_t));
assert(ts);
avro_value_get_by_name(&value, field->name, &field_value, NULL);
avro_value_get_long(&field_value, ts);
bind->buffer_type = TSDB_DATA_TYPE_TIMESTAMP;
bind->buffer_length = sizeof(int64_t);
bind->buffer = ts;
bind->length = &bind->buffer_length;
} else if (0 == avro_value_get_by_name(
&value, field->name, &field_value, NULL)) {
if (0 == strcasecmp(tableDes->cols[i].type, "int")) {
int32_t *n32 = malloc(sizeof(int32_t));
assert(n32);
avro_value_get_int(&field_value, n32);
debugPrint("%d | ", *n32);
bind->buffer_type = TSDB_DATA_TYPE_INT;
bind->buffer_length = sizeof(int32_t);
bind->buffer = n32;
} else if (0 == strcasecmp(tableDes->cols[i].type, "tinyint")) {
int32_t *n8 = malloc(sizeof(int32_t));
assert(n8);
avro_value_get_int(&field_value, n8);
debugPrint("%d | ", *n8);
bind->buffer_type = TSDB_DATA_TYPE_TINYINT;
bind->buffer_length = sizeof(int8_t);
bind->buffer = (int8_t *)n8;
} else if (0 == strcasecmp(tableDes->cols[i].type, "smallint")) {
int32_t *n16 = malloc(sizeof(int32_t));
assert(n16);
avro_value_get_int(&field_value, n16);
debugPrint("%d | ", *n16);
bind->buffer_type = TSDB_DATA_TYPE_SMALLINT;
bind->buffer_length = sizeof(int16_t);
bind->buffer = (int32_t*)n16;
} else if (0 == strcasecmp(tableDes->cols[i].type, "bigint")) {
int64_t *n64 = malloc(sizeof(int64_t));
assert(n64);
avro_value_get_long(&field_value, n64);
debugPrint("%"PRId64" | ", *n64);
bind->buffer_type = TSDB_DATA_TYPE_BIGINT;
bind->buffer_length = sizeof(int64_t);
bind->buffer = n64;
} else if (0 == strcasecmp(tableDes->cols[i].type, "timestamp")) {
int64_t *n64 = malloc(sizeof(int64_t));
assert(n64);
avro_value_get_long(&field_value, n64);
debugPrint("%"PRId64" | ", *n64);
bind->buffer_type = TSDB_DATA_TYPE_TIMESTAMP;
bind->buffer_length = sizeof(int64_t);
bind->buffer = n64;
} else if (0 == strcasecmp(tableDes->cols[i].type, "float")) {
float *f = malloc(sizeof(float));
assert(f);
avro_value_get_float(&field_value, f);
if (TSDB_DATA_FLOAT_NULL == *f) {
debugPrint("%s | ", "NULL");
bind->is_null = &is_null;
} else {
debugPrint("%f | ", *f);
bind->buffer = f;
}
bind->buffer_type = TSDB_DATA_TYPE_FLOAT;
bind->buffer_length = sizeof(float);
} else if (0 == strcasecmp(tableDes->cols[i].type, "double")) {
double *dbl = malloc(sizeof(double));
assert(dbl);
avro_value_get_double(&field_value, dbl);
if (TSDB_DATA_DOUBLE_NULL == *dbl) {
debugPrint("%s | ", "NULL");
bind->is_null = &is_null;
} else {
debugPrint("%f | ", *dbl);
bind->buffer = dbl;
}
bind->buffer = dbl;
bind->buffer_type = TSDB_DATA_TYPE_DOUBLE;
bind->buffer_length = sizeof(double);
} else if (0 == strcasecmp(tableDes->cols[i].type, "binary")) {
size_t size;
char *buf = NULL;
avro_value_get_string(&field_value, (const char **)&buf, &size);
debugPrint("%s | ", (char *)buf);
bind->buffer_type = TSDB_DATA_TYPE_BINARY;
bind->buffer_length = tableDes->cols[i].length;
bind->buffer = buf;
} else if (0 == strcasecmp(tableDes->cols[i].type, "nchar")) {
size_t bytessize;
void *bytesbuf = NULL;
avro_value_get_bytes(&field_value, (const void **)&bytesbuf, &bytessize);
debugPrint("%s | ", (char*)bytesbuf);
bind->buffer_type = TSDB_DATA_TYPE_NCHAR;
bind->buffer_length = tableDes->cols[i].length;
bind->buffer = bytesbuf;
} else if (0 == strcasecmp(tableDes->cols[i].type, "bool")) {
int32_t *bl = malloc(sizeof(int32_t));
assert(bl);
avro_value_get_boolean(&field_value, bl);
debugPrint("%s | ", (*bl)?"true":"false");
bind->buffer_type = TSDB_DATA_TYPE_BOOL;
bind->buffer_length = sizeof(int8_t);
bind->buffer = (int8_t*)bl;
}
bind->length = &bind->buffer_length;
}
}
debugPrint("%s", "\n");
if (0 != taos_stmt_bind_param(stmt, (TAOS_BIND *)bindArray)) {
errorPrint("%s() LN%d stmt_bind_param() failed! reason: %s\n",
__func__, __LINE__, taos_stmt_errstr(stmt));
freeBindArray(bindArray, onlyCol);
failed --;
continue;
}
if (0 != taos_stmt_add_batch(stmt)) {
errorPrint("%s() LN%d stmt_bind_param() failed! reason: %s\n",
__func__, __LINE__, taos_stmt_errstr(stmt));
freeBindArray(bindArray, onlyCol);
failed --;
continue;
}
freeBindArray(bindArray, onlyCol);
success ++;
continue;
}
if (0 != taos_stmt_execute(stmt)) {
errorPrint("%s() LN%d stmt_bind_param() failed! reason: %s\n",
__func__, __LINE__, taos_stmt_errstr(stmt));
failed = success;
}
avro_value_decref(&value);
avro_value_iface_decref(value_class);
tfree(bindArray);
tfree(stmtBuffer);
tfree(tableDes);
freeRecordSchema(recordSchema);
avro_schema_decref(schema);
avro_file_reader_close(reader);
avro_writer_free(jsonwriter);
tfree(jsonbuf);
taos_stmt_close(stmt);
taos_close(taos);
if (failed < 0)
return failed;
return success;
}
static void* dumpInAvroWorkThreadFp(void *arg)
{
threadInfo *pThread = (threadInfo*)arg;
setThreadName("dumpInAvroWorkThrd");
verbosePrint("[%d] process %"PRId64" files from %"PRId64"\n",
pThread->threadIndex, pThread->count, pThread->from);
for (int64_t i = 0; i < pThread->count; i++) {
char avroFile[MAX_PATH_LEN];
sprintf(avroFile, "%s/%s", g_args.inpath,
g_tsDumpInAvroFiles[pThread->from + i]);
if (0 == dumpInOneAvroFile(g_tsCharset,
g_args.encode,
avroFile)) {
okPrint("[%d] Success dump in file: %s\n",
pThread->threadIndex, avroFile);
}
}
return NULL;
}
static int64_t dumpInAvroWorkThreads()
{
int64_t ret = 0;
int32_t threads = g_args.thread_num;
uint64_t avroFileCount = getFilesNum("avro");
if (0 == avroFileCount) {
debugPrint("No .avro file found in %s\n", g_args.inpath);
return 0;
}
createDumpinList("avro", avroFileCount);
threadInfo *pThread;
pthread_t *pids = calloc(1, threads * sizeof(pthread_t));
threadInfo *infos = (threadInfo *)calloc(
threads, sizeof(threadInfo));
assert(pids);
assert(infos);
int64_t a = avroFileCount / threads;
if (a < 1) {
threads = avroFileCount;
a = 1;
}
int64_t b = 0;
if (threads != 0) {
b = avroFileCount % threads;
}
int64_t from = 0;
for (int32_t t = 0; t < threads; ++t) {
pThread = infos + t;
pThread->threadIndex = t;
pThread->from = from;
pThread->count = t<b?a+1:a;
from += pThread->count;
verbosePrint(
"Thread[%d] takes care avro files total %"PRId64" files from %"PRId64"\n",
t, pThread->count, pThread->from);
if (pthread_create(pids + t, NULL,
dumpInAvroWorkThreadFp, (void*)pThread) != 0) {
errorPrint("%s() LN%d, thread[%d] failed to start\n",
__func__, __LINE__, pThread->threadIndex);
exit(EXIT_FAILURE);
}
}
for (int t = 0; t < threads; ++t) {
pthread_join(pids[t], NULL);
}
free(infos);
free(pids);
freeFileList(g_tsDumpInAvroFiles, avroFileCount);
return ret;
}
#endif /* AVRO_SUPPORT */
static int64_t writeResultToSql(TAOS_RES *res, FILE *fp, char *dbName, char *tbName)
{
int64_t totalRows = 0;
int32_t sql_buf_len = g_args.max_sql_len;
char* tmpBuffer = (char *)calloc(1, sql_buf_len + 128);
assert(tmpBuffer);
char *pstr = tmpBuffer;
TAOS_ROW row = NULL;
int rowFlag = 0;
int64_t lastRowsPrint = 5000000;
int count = 0;
int numFields = taos_field_count(res);
assert(numFields > 0);
TAOS_FIELD *fields = taos_fetch_fields(res);
int32_t curr_sqlstr_len = 0;
int32_t total_sqlstr_len = 0;
while ((row = taos_fetch_row(res)) != NULL) {
curr_sqlstr_len = 0;
int32_t* length = taos_fetch_lengths(res); // act len
if (count == 0) {
total_sqlstr_len = 0;
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"INSERT INTO %s.%s VALUES (", dbName, tbName);
} else {
if (g_args.mysqlFlag) {
if (0 == rowFlag) {
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "(");
rowFlag++;
} else {
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, ", (");
}
} else {
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "(");
}
}
for (int col = 0; col < numFields; col++) {
if (col != 0) curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, ", ");
if (row[col] == NULL) {
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "NULL");
continue;
}
switch (fields[col].type) {
case TSDB_DATA_TYPE_BOOL:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%d",
((((int32_t)(*((char *)row[col])))==1)?1:0));
break;
case TSDB_DATA_TYPE_TINYINT:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%d",
*((int8_t *)row[col]));
break;
case TSDB_DATA_TYPE_SMALLINT:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%d",
*((int16_t *)row[col]));
break;
case TSDB_DATA_TYPE_INT:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%d",
*((int32_t *)row[col]));
break;
case TSDB_DATA_TYPE_BIGINT:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"%" PRId64 "",
*((int64_t *)row[col]));
break;
case TSDB_DATA_TYPE_FLOAT:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%f",
GET_FLOAT_VAL(row[col]));
break;
case TSDB_DATA_TYPE_DOUBLE:
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, "%f",
GET_DOUBLE_VAL(row[col]));
break;
case TSDB_DATA_TYPE_BINARY:
{
char tbuf[COMMAND_SIZE] = {0};
converStringToReadable((char *)row[col], length[col],
tbuf, COMMAND_SIZE);
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"\'%s\'", tbuf);
break;
}
case TSDB_DATA_TYPE_NCHAR:
{
char tbuf[COMMAND_SIZE] = {0};
convertNCharToReadable((char *)row[col], length[col],
tbuf, COMMAND_SIZE);
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"\'%s\'", tbuf);
break;
}
case TSDB_DATA_TYPE_TIMESTAMP:
if (!g_args.mysqlFlag) {
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"%" PRId64 "",
*(int64_t *)row[col]);
} else {
char buf[64] = "\0";
int64_t ts = *((int64_t *)row[col]);
time_t tt = (time_t)(ts / 1000);
struct tm *ptm = localtime(&tt);
strftime(buf, 64, "%y-%m-%d %H:%M:%S", ptm);
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len,
"\'%s.%03d\'",
buf, (int)(ts % 1000));
}
break;
default:
break;
}
}
curr_sqlstr_len += sprintf(pstr + curr_sqlstr_len, ")");
totalRows++;
count++;
fprintf(fp, "%s", tmpBuffer);
if (totalRows >= lastRowsPrint) {
printf(" %"PRId64 " rows already be dumpout from %s.%s\n",
totalRows, dbName, tbName);
lastRowsPrint += 5000000;
}
total_sqlstr_len += curr_sqlstr_len;
if ((count >= g_args.data_batch)
|| (sql_buf_len - total_sqlstr_len < TSDB_MAX_BYTES_PER_ROW)) {
fprintf(fp, ";\n");
count = 0;
}
}
debugPrint("total_sqlstr_len: %d\n", total_sqlstr_len);
fprintf(fp, "\n");
free(tmpBuffer);
return totalRows;
}
static int64_t dumpTableData(FILE *fp, char *tbName,
char* dbName, int precision,
char *jsonSchema) {
int64_t totalRows = 0;
char sqlstr[1024] = {0};
int64_t start_time, end_time;
if (strlen(g_args.humanStartTime)) {
if (TSDB_CODE_SUCCESS != taosParseTime(
g_args.humanStartTime, &start_time,
strlen(g_args.humanStartTime),
precision, 0)) {
errorPrint("Input %s, time format error!\n",
g_args.humanStartTime);
return -1;
}
} else {
start_time = g_args.start_time;
}
if (strlen(g_args.humanEndTime)) {
if (TSDB_CODE_SUCCESS != taosParseTime(
g_args.humanEndTime, &end_time, strlen(g_args.humanEndTime),
precision, 0)) {
errorPrint("Input %s, time format error!\n", g_args.humanEndTime);
return -1;
}
} else {
end_time = g_args.end_time;
}
sprintf(sqlstr,
"select * from %s.%s where _c0 >= %" PRId64 " and _c0 <= %" PRId64 " order by _c0 asc;",
dbName, tbName, start_time, end_time);
TAOS *taos = taos_connect(g_args.host,
g_args.user, g_args.password, dbName, g_args.port);
if (NULL == taos) {
errorPrint(
"Failed to connect to server %s by specified database %s\n",
g_args.host, dbName);
return -1;
}
TAOS_RES* res = taos_query(taos, sqlstr);
int32_t code = taos_errno(res);
if (code != 0) {
errorPrint("failed to run command %s, reason: %s\n",
sqlstr, taos_errstr(res));
taos_free_result(res);
taos_close(taos);
return -1;
}
#ifdef AVRO_SUPPORT
if (g_args.avro) {
char avroFilename[MAX_PATH_LEN] = {0};
if (g_args.outpath[0] != 0) {
sprintf(avroFilename, "%s/%s.%s.avro",
g_args.outpath, dbName, tbName);
} else {
sprintf(avroFilename, "%s.%s.avro",
dbName, tbName);
}
totalRows = writeResultToAvro(avroFilename, jsonSchema, res);
} else
#endif
totalRows = writeResultToSql(res, fp, dbName, tbName);
taos_free_result(res);
taos_close(taos);
return totalRows;
}
static int64_t dumpNormalTable(
TAOS *taos,
char *dbName,
char *stable,
char *tbName,
int precision,
FILE *fp
) {
int colCount = 0;
TableDef *tableDes = (TableDef *)calloc(1, sizeof(TableDef)
+ sizeof(ColDes) * TSDB_MAX_COLUMNS);
if (stable != NULL && stable[0] != '\0') { // dump table schema which is created by using super table
colCount = getTableDes(taos, dbName, tbName, tableDes, false);
if (colCount < 0) {
errorPrint("%s() LN%d, failed to get table[%s] schema\n",
__func__,
__LINE__,
tbName);
free(tableDes);
return -1;
}
// create child-table using super-table
dumpCreateMTableClause(dbName, stable, tableDes, colCount, fp);
} else { // dump table definition
colCount = getTableDes(taos, dbName, tbName, tableDes, false);
if (colCount < 0) {
errorPrint("%s() LN%d, failed to get table[%s] schema\n",
__func__,
__LINE__,
tbName);
free(tableDes);
return -1;
}
// create normal-table or super-table
dumpCreateTableClause(tableDes, colCount, fp, dbName);
}
char *jsonSchema = NULL;
#ifdef AVRO_SUPPORT
if (g_args.avro) {
if (0 != convertTbDesToJson(
dbName, tbName, tableDes, colCount, &jsonSchema)) {
errorPrint("%s() LN%d, convertTbDesToJson failed\n",
__func__,
__LINE__);
freeTbDes(tableDes);
return -1;
}
}
#endif
int64_t totalRows = 0;
if (!g_args.schemaonly) {
totalRows = dumpTableData(fp, tbName, dbName, precision,
jsonSchema);
}
tfree(jsonSchema);
freeTbDes(tableDes);
return totalRows;
}
static int64_t dumpNormalTableWithoutStb(TAOS *taos, SDbInfo *dbInfo, char *ntbName)
{
int64_t count = 0;
char tmpBuf[MAX_PATH_LEN] = {0};
FILE *fp = NULL;
if (g_args.outpath[0] != 0) {
sprintf(tmpBuf, "%s/%s.%s.sql",
g_args.outpath, dbInfo->name, ntbName);
} else {
sprintf(tmpBuf, "%s.%s.sql",
dbInfo->name, ntbName);
}
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, tmpBuf);
return -1;
}
count = dumpNormalTable(
taos,
dbInfo->name,
NULL,
ntbName,
getPrecisionByString(dbInfo->precision),
fp);
if (count > 0) {
atomic_add_fetch_64(&g_totalDumpOutRows, count);
}
fclose(fp);
return count;
}
static int64_t dumpNormalTableBelongStb(
TAOS *taos,
SDbInfo *dbInfo, char *stbName, char *ntbName)
{
int64_t count = 0;
char tmpBuf[MAX_PATH_LEN] = {0};
FILE *fp = NULL;
if (g_args.outpath[0] != 0) {
sprintf(tmpBuf, "%s/%s.%s.sql",
g_args.outpath, dbInfo->name, ntbName);
} else {
sprintf(tmpBuf, "%s.%s.sql",
dbInfo->name, ntbName);
}
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, tmpBuf);
return -1;
}
count = dumpNormalTable(
taos,
dbInfo->name,
stbName,
ntbName,
getPrecisionByString(dbInfo->precision),
fp);
if (count > 0) {
atomic_add_fetch_64(&g_totalDumpOutRows, count);
}
fclose(fp);
return count;
}
static void *dumpNtbOfDb(void *arg) {
threadInfo *pThreadInfo = (threadInfo *)arg;
debugPrint("dump table from = \t%"PRId64"\n", pThreadInfo->from);
debugPrint("dump table count = \t%"PRId64"\n",
pThreadInfo->count);
FILE *fp = NULL;
char tmpBuf[MAX_PATH_LEN] = {0};
if (g_args.outpath[0] != 0) {
sprintf(tmpBuf, "%s/%s.%d.sql",
g_args.outpath, pThreadInfo->dbName, pThreadInfo->threadIndex);
} else {
sprintf(tmpBuf, "%s.%d.sql",
pThreadInfo->dbName, pThreadInfo->threadIndex);
}
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, tmpBuf);
return NULL;
}
int64_t count;
for (int64_t i = 0; i < pThreadInfo->count; i++) {
debugPrint("[%d] No.\t%"PRId64" table name: %s\n",
pThreadInfo->threadIndex, i,
((TableInfo *)(g_tablesList + pThreadInfo->from+i))->name);
count = dumpNormalTable(
pThreadInfo->taos,
pThreadInfo->dbName,
((TableInfo *)(g_tablesList + pThreadInfo->from+i))->stable,
((TableInfo *)(g_tablesList + pThreadInfo->from+i))->name,
pThreadInfo->precision,
fp);
if (count < 0) {
break;
} else {
atomic_add_fetch_64(&g_totalDumpOutRows, count);
}
}
fclose(fp);
return NULL;
}
static int checkParam() {
if (g_args.all_databases && g_args.databases) {
errorPrint("%s", "conflict option --all-databases and --databases\n");
return -1;
}
if (g_args.start_time > g_args.end_time) {
errorPrint("%s", "start time is larger than end time\n");
return -1;
}
if (g_args.arg_list_len == 0) {
if ((!g_args.all_databases) && (!g_args.databases) && (!g_args.isDumpIn)) {
errorPrint("%s", "taosdump requires parameters\n");
return -1;
}
}
/*
if (g_args.isDumpIn && (strcmp(g_args.outpath, DEFAULT_DUMP_FILE) != 0)) {
fprintf(stderr, "duplicate parameter input and output file path\n");
return -1;
}
*/
if (!g_args.isDumpIn && g_args.encode != NULL) {
fprintf(stderr, "invalid option in dump out\n");
return -1;
}
if (g_args.table_batch <= 0) {
fprintf(stderr, "invalid option in dump out\n");
return -1;
}
return 0;
}
/*
static bool isEmptyCommand(char *cmd) {
char *pchar = cmd;
while (*pchar != '\0') {
if (*pchar != ' ') return false;
pchar++;
}
return true;
}
static void taosReplaceCtrlChar(char *str) {
bool ctrlOn = false;
char *pstr = NULL;
for (pstr = str; *str != '\0'; ++str) {
if (ctrlOn) {
switch (*str) {
case 'n':
*pstr = '\n';
pstr++;
break;
case 'r':
*pstr = '\r';
pstr++;
break;
case 't':
*pstr = '\t';
pstr++;
break;
case '\\':
*pstr = '\\';
pstr++;
break;
case '\'':
*pstr = '\'';
pstr++;
break;
default:
break;
}
ctrlOn = false;
} else {
if (*str == '\\') {
ctrlOn = true;
} else {
*pstr = *str;
pstr++;
}
}
}
*pstr = '\0';
}
*/
char *ascii_literal_list[] = {
"\\x00", "\\x01", "\\x02", "\\x03", "\\x04", "\\x05", "\\x06", "\\x07", "\\x08", "\\t", "\\n", "\\x0b", "\\x0c",
"\\r", "\\x0e", "\\x0f", "\\x10", "\\x11", "\\x12", "\\x13", "\\x14", "\\x15", "\\x16", "\\x17", "\\x18", "\\x19",
"\\x1a", "\\x1b", "\\x1c", "\\x1d", "\\x1e", "\\x1f", " ", "!", "\\\"", "#", "$", "%", "&",
"\\'", "(", ")", "*", "+", ",", "-", ".", "/", "0", "1", "2", "3",
"4", "5", "6", "7", "8", "9", ":", ";", "<", "=", ">", "?", "@",
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M",
"N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z",
"[", "\\\\", "]", "^", "_", "`", "a", "b", "c", "d", "e", "f", "g",
"h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
"u", "v", "w", "x", "y", "z", "{", "|", "}", "~", "\\x7f", "\\x80", "\\x81",
"\\x82", "\\x83", "\\x84", "\\x85", "\\x86", "\\x87", "\\x88", "\\x89", "\\x8a", "\\x8b", "\\x8c", "\\x8d", "\\x8e",
"\\x8f", "\\x90", "\\x91", "\\x92", "\\x93", "\\x94", "\\x95", "\\x96", "\\x97", "\\x98", "\\x99", "\\x9a", "\\x9b",
"\\x9c", "\\x9d", "\\x9e", "\\x9f", "\\xa0", "\\xa1", "\\xa2", "\\xa3", "\\xa4", "\\xa5", "\\xa6", "\\xa7", "\\xa8",
"\\xa9", "\\xaa", "\\xab", "\\xac", "\\xad", "\\xae", "\\xaf", "\\xb0", "\\xb1", "\\xb2", "\\xb3", "\\xb4", "\\xb5",
"\\xb6", "\\xb7", "\\xb8", "\\xb9", "\\xba", "\\xbb", "\\xbc", "\\xbd", "\\xbe", "\\xbf", "\\xc0", "\\xc1", "\\xc2",
"\\xc3", "\\xc4", "\\xc5", "\\xc6", "\\xc7", "\\xc8", "\\xc9", "\\xca", "\\xcb", "\\xcc", "\\xcd", "\\xce", "\\xcf",
"\\xd0", "\\xd1", "\\xd2", "\\xd3", "\\xd4", "\\xd5", "\\xd6", "\\xd7", "\\xd8", "\\xd9", "\\xda", "\\xdb", "\\xdc",
"\\xdd", "\\xde", "\\xdf", "\\xe0", "\\xe1", "\\xe2", "\\xe3", "\\xe4", "\\xe5", "\\xe6", "\\xe7", "\\xe8", "\\xe9",
"\\xea", "\\xeb", "\\xec", "\\xed", "\\xee", "\\xef", "\\xf0", "\\xf1", "\\xf2", "\\xf3", "\\xf4", "\\xf5", "\\xf6",
"\\xf7", "\\xf8", "\\xf9", "\\xfa", "\\xfb", "\\xfc", "\\xfd", "\\xfe", "\\xff"};
static int converStringToReadable(char *str, int size, char *buf, int bufsize) {
char *pstr = str;
char *pbuf = buf;
while (size > 0) {
if (*pstr == '\0') break;
pbuf = stpcpy(pbuf, ascii_literal_list[((uint8_t)(*pstr))]);
pstr++;
size--;
}
*pbuf = '\0';
return 0;
}
static int convertNCharToReadable(char *str, int size, char *buf, int bufsize) {
char *pstr = str;
char *pbuf = buf;
wchar_t wc;
while (size > 0) {
if (*pstr == '\0') break;
int byte_width = mbtowc(&wc, pstr, MB_CUR_MAX);
if (byte_width < 0) {
errorPrint("%s() LN%d, mbtowc() return fail.\n", __func__, __LINE__);
exit(-1);
}
if ((int)wc < 256) {
pbuf = stpcpy(pbuf, ascii_literal_list[(int)wc]);
} else {
memcpy(pbuf, pstr, byte_width);
pbuf += byte_width;
}
pstr += byte_width;
}
*pbuf = '\0';
return 0;
}
static void dumpCharset(FILE *fp) {
char charsetline[256];
(void)fseek(fp, 0, SEEK_SET);
sprintf(charsetline, "#!%s\n", tsCharset);
(void)fwrite(charsetline, strlen(charsetline), 1, fp);
}
static void loadFileCharset(FILE *fp, char *fcharset) {
char * line = NULL;
size_t line_size = 0;
(void)fseek(fp, 0, SEEK_SET);
ssize_t size = getline(&line, &line_size, fp);
if (size <= 2) {
goto _exit_no_charset;
}
if (strncmp(line, "#!", 2) != 0) {
goto _exit_no_charset;
}
if (line[size - 1] == '\n') {
line[size - 1] = '\0';
size--;
}
strcpy(fcharset, line + 2);
tfree(line);
return;
_exit_no_charset:
(void)fseek(fp, 0, SEEK_SET);
*fcharset = '\0';
tfree(line);
return;
}
// ======== dumpIn support multi threads functions ================================//
static int dumpInOneSqlFile(TAOS* taos, FILE* fp, char* fcharset,
char* encode, char* fileName) {
int read_len = 0;
char * cmd = NULL;
size_t cmd_len = 0;
char * line = NULL;
size_t line_len = 0;
cmd = (char *)malloc(TSDB_MAX_ALLOWED_SQL_LEN);
if (cmd == NULL) {
errorPrint("%s() LN%d, failed to allocate memory\n",
__func__, __LINE__);
return -1;
}
int lastRowsPrint = 5000000;
int lineNo = 0;
while ((read_len = getline(&line, &line_len, fp)) != -1) {
++lineNo;
if (read_len >= TSDB_MAX_ALLOWED_SQL_LEN) continue;
line[--read_len] = '\0';
//if (read_len == 0 || isCommentLine(line)) { // line starts with #
if (read_len == 0 ) {
continue;
}
if (line[read_len - 1] == '\\') {
line[read_len - 1] = ' ';
memcpy(cmd + cmd_len, line, read_len);
cmd_len += read_len;
continue;
}
memcpy(cmd + cmd_len, line, read_len);
cmd[read_len + cmd_len]= '\0';
if (queryDbImpl(taos, cmd)) {
errorPrint("%s() LN%d, error sql: lineno:%d, file:%s\n",
__func__, __LINE__, lineNo, fileName);
fprintf(g_fpOfResult, "error sql: lineno:%d, file:%s\n", lineNo, fileName);
}
memset(cmd, 0, TSDB_MAX_ALLOWED_SQL_LEN);
cmd_len = 0;
if (lineNo >= lastRowsPrint) {
printf(" %d lines already be executed from file %s\n", lineNo, fileName);
lastRowsPrint += 5000000;
}
}
tfree(cmd);
tfree(line);
return 0;
}
static void* dumpInSqlWorkThreadFp(void *arg)
{
threadInfo *pThread = (threadInfo*)arg;
setThreadName("dumpInSqlWorkThrd");
fprintf(stderr, "[%d] Start to process %"PRId64" files from %"PRId64"\n",
pThread->threadIndex, pThread->count, pThread->from);
for (int64_t i = 0; i < pThread->count; i++) {
char sqlFile[MAX_PATH_LEN];
sprintf(sqlFile, "%s/%s", g_args.inpath, g_tsDumpInSqlFiles[pThread->from + i]);
FILE* fp = openDumpInFile(sqlFile);
if (NULL == fp) {
errorPrint("[%d] Failed to open input file: %s\n",
pThread->threadIndex, sqlFile);
continue;
}
if (0 == dumpInOneSqlFile(pThread->taos, fp, g_tsCharset, g_args.encode,
sqlFile)) {
okPrint("[%d] Success dump in file: %s\n",
pThread->threadIndex, sqlFile);
}
fclose(fp);
}
return NULL;
}
static int dumpInSqlWorkThreads()
{
int32_t threads = g_args.thread_num;
uint64_t sqlFileCount = getFilesNum("sql");
if (0 == sqlFileCount) {
debugPrint("No .sql file found in %s\n", g_args.inpath);
return 0;
}
createDumpinList("sql", sqlFileCount);
threadInfo *pThread;
pthread_t *pids = calloc(1, threads * sizeof(pthread_t));
threadInfo *infos = (threadInfo *)calloc(
threads, sizeof(threadInfo));
assert(pids);
assert(infos);
int64_t a = sqlFileCount / threads;
if (a < 1) {
threads = sqlFileCount;
a = 1;
}
int64_t b = 0;
if (threads != 0) {
b = sqlFileCount % threads;
}
int64_t from = 0;
for (int32_t t = 0; t < threads; ++t) {
pThread = infos + t;
pThread->threadIndex = t;
pThread->from = from;
pThread->count = t<b?a+1:a;
from += pThread->count;
verbosePrint(
"Thread[%d] takes care sql files total %"PRId64" files from %"PRId64"\n",
t, pThread->count, pThread->from);
pThread->taos = taos_connect(g_args.host, g_args.user, g_args.password,
NULL, g_args.port);
if (pThread->taos == NULL) {
errorPrint("Failed to connect to server %s\n", g_args.host);
free(infos);
free(pids);
return -1;
}
if (pthread_create(pids + t, NULL,
dumpInSqlWorkThreadFp, (void*)pThread) != 0) {
errorPrint("%s() LN%d, thread[%d] failed to start\n",
__func__, __LINE__, pThread->threadIndex);
exit(EXIT_FAILURE);
}
}
for (int t = 0; t < threads; ++t) {
pthread_join(pids[t], NULL);
}
for (int t = 0; t < threads; ++t) {
taos_close(infos[t].taos);
}
free(infos);
free(pids);
freeFileList(g_tsDumpInSqlFiles, sqlFileCount);
return 0;
}
static int dumpInDbs()
{
TAOS *taos = taos_connect(
g_args.host, g_args.user, g_args.password,
NULL, g_args.port);
if (taos == NULL) {
errorPrint("%s() LN%d, failed to connect to server\n",
__func__, __LINE__);
return -1;
}
char dbsSql[MAX_PATH_LEN];
sprintf(dbsSql, "%s/%s", g_args.inpath, "dbs.sql");
FILE *fp = openDumpInFile(dbsSql);
if (NULL == fp) {
errorPrint("%s() LN%d, failed to open input file %s\n",
__func__, __LINE__, dbsSql);
return -1;
}
debugPrint("Success Open input file: %s\n", dbsSql);
loadFileCharset(fp, g_tsCharset);
if(0 == dumpInOneSqlFile(taos, fp, g_tsCharset, g_args.encode, dbsSql)) {
okPrint("Success dump in file: %s !\n", dbsSql);
}
fclose(fp);
taos_close(taos);
return 0;
}
static int64_t dumpIn() {
assert(g_args.isDumpIn);
int64_t ret = 0;
if (dumpInDbs()) {
errorPrint("%s", "Failed to dump dbs in!\n");
exit(EXIT_FAILURE);
}
ret = dumpInSqlWorkThreads();
#ifdef AVRO_SUPPORT
if (0 == ret) {
ret = dumpInAvroWorkThreads();
}
#endif
return ret;
}
static void *dumpNormalTablesOfStb(void *arg) {
threadInfo *pThreadInfo = (threadInfo *)arg;
debugPrint("dump table from = \t%"PRId64"\n", pThreadInfo->from);
debugPrint("dump table count = \t%"PRId64"\n", pThreadInfo->count);
char command[COMMAND_SIZE];
sprintf(command, "SELECT TBNAME FROM %s.%s LIMIT %"PRId64" OFFSET %"PRId64"",
pThreadInfo->dbName, pThreadInfo->stbName,
pThreadInfo->count, pThreadInfo->from);
TAOS_RES *res = taos_query(pThreadInfo->taos, command);
int32_t code = taos_errno(res);
if (code) {
errorPrint("%s() LN%d, failed to run command <%s>. reason: %s\n",
__func__, __LINE__, command, taos_errstr(res));
taos_free_result(res);
return NULL;
}
FILE *fp = NULL;
char tmpBuf[MAX_PATH_LEN] = {0};
if (g_args.outpath[0] != 0) {
sprintf(tmpBuf, "%s/%s.%s.%d.sql",
g_args.outpath,
pThreadInfo->dbName,
pThreadInfo->stbName,
pThreadInfo->threadIndex);
} else {
sprintf(tmpBuf, "%s.%s.%d.sql",
pThreadInfo->dbName,
pThreadInfo->stbName,
pThreadInfo->threadIndex);
}
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, tmpBuf);
return NULL;
}
TAOS_ROW row = NULL;
int64_t i = 0;
int64_t count;
while((row = taos_fetch_row(res)) != NULL) {
debugPrint("[%d] sub table %"PRId64": name: %s\n",
pThreadInfo->threadIndex, i++, (char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
count = dumpNormalTable(
pThreadInfo->taos,
pThreadInfo->dbName,
pThreadInfo->stbName,
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX],
pThreadInfo->precision,
fp);
if (count < 0) {
break;
} else {
atomic_add_fetch_64(&g_totalDumpOutRows, count);
}
}
fclose(fp);
return NULL;
}
static int64_t dumpNtbOfDbByThreads(
SDbInfo *dbInfo,
int64_t ntbCount)
{
if (ntbCount <= 0) {
return 0;
}
int threads = g_args.thread_num;
int64_t a = ntbCount / threads;
if (a < 1) {
threads = ntbCount;
a = 1;
}
assert(threads);
int64_t b = ntbCount % threads;
threadInfo *infos = calloc(1, threads * sizeof(threadInfo));
pthread_t *pids = calloc(1, threads * sizeof(pthread_t));
assert(pids);
assert(infos);
for (int64_t i = 0; i < threads; i++) {
threadInfo *pThreadInfo = infos + i;
pThreadInfo->taos = taos_connect(
g_args.host,
g_args.user,
g_args.password,
dbInfo->name,
g_args.port
);
if (NULL == pThreadInfo->taos) {
errorPrint("%s() LN%d, Failed to connect to server, reason: %s\n",
__func__,
__LINE__,
taos_errstr(NULL));
free(pids);
free(infos);
return -1;
}
pThreadInfo->threadIndex = i;
pThreadInfo->count = (i<b)?a+1:a;
pThreadInfo->from = (i==0)?0:
((threadInfo *)(infos + i - 1))->from +
((threadInfo *)(infos + i - 1))->count;
strcpy(pThreadInfo->dbName, dbInfo->name);
pThreadInfo->precision = getPrecisionByString(dbInfo->precision);
pthread_create(pids + i, NULL, dumpNtbOfDb, pThreadInfo);
}
for (int64_t i = 0; i < threads; i++) {
pthread_join(pids[i], NULL);
}
for (int64_t i = 0; i < threads; i++) {
threadInfo *pThreadInfo = infos + i;
taos_close(pThreadInfo->taos);
}
free(pids);
free(infos);
return 0;
}
static int64_t dumpNTablesOfDb(SDbInfo *dbInfo)
{
TAOS *taos = taos_connect(g_args.host,
g_args.user, g_args.password, dbInfo->name, g_args.port);
if (NULL == taos) {
errorPrint(
"Failed to connect to server %s by specified database %s\n",
g_args.host, dbInfo->name);
return 0;
}
char command[COMMAND_SIZE];
TAOS_RES *result;
int32_t code;
sprintf(command, "USE %s", dbInfo->name);
result = taos_query(taos, command);
code = taos_errno(result);
if (code != 0) {
errorPrint("invalid database %s, reason: %s\n",
dbInfo->name, taos_errstr(result));
taos_close(taos);
return 0;
}
sprintf(command, "SHOW TABLES");
result = taos_query(taos, command);
code = taos_errno(result);
if (code != 0) {
errorPrint("Failed to show %s\'s tables, reason: %s\n",
dbInfo->name, taos_errstr(result));
taos_close(taos);
return 0;
}
g_tablesList = calloc(1, dbInfo->ntables * sizeof(TableInfo));
assert(g_tablesList);
TAOS_ROW row;
int64_t count = 0;
while(NULL != (row = taos_fetch_row(result))) {
debugPrint("%s() LN%d, No.\t%"PRId64" table name: %s\n",
__func__, __LINE__,
count, (char *)row[TSDB_SHOW_TABLES_NAME_INDEX]);
tstrncpy(((TableInfo *)(g_tablesList + count))->name,
(char *)row[TSDB_SHOW_TABLES_NAME_INDEX], TSDB_TABLE_NAME_LEN);
char *stbName = (char *) row[TSDB_SHOW_TABLES_METRIC_INDEX];
if (stbName) {
tstrncpy(((TableInfo *)(g_tablesList + count))->stable,
(char *)row[TSDB_SHOW_TABLES_METRIC_INDEX], TSDB_TABLE_NAME_LEN);
((TableInfo *)(g_tablesList + count))->belongStb = true;
}
count ++;
}
taos_close(taos);
int64_t records = dumpNtbOfDbByThreads(dbInfo, count);
free(g_tablesList);
g_tablesList = NULL;
return records;
}
static int64_t dumpNtbOfStbByThreads(
SDbInfo *dbInfo, char *stbName)
{
int64_t ntbCount = getNtbCountOfStb(dbInfo->name, stbName);
if (ntbCount <= 0) {
return 0;
}
int threads = g_args.thread_num;
int64_t a = ntbCount / threads;
if (a < 1) {
threads = ntbCount;
a = 1;
}
assert(threads);
int64_t b = ntbCount % threads;
pthread_t *pids = calloc(1, threads * sizeof(pthread_t));
threadInfo *infos = calloc(1, threads * sizeof(threadInfo));
assert(pids);
assert(infos);
for (int64_t i = 0; i < threads; i++) {
threadInfo *pThreadInfo = infos + i;
pThreadInfo->taos = taos_connect(
g_args.host,
g_args.user,
g_args.password,
dbInfo->name,
g_args.port
);
if (NULL == pThreadInfo->taos) {
errorPrint("%s() LN%d, Failed to connect to server, reason: %s\n",
__func__,
__LINE__,
taos_errstr(NULL));
free(pids);
free(infos);
return -1;
}
pThreadInfo->threadIndex = i;
pThreadInfo->count = (i<b)?a+1:a;
pThreadInfo->from = (i==0)?0:
((threadInfo *)(infos + i - 1))->from +
((threadInfo *)(infos + i - 1))->count;
strcpy(pThreadInfo->dbName, dbInfo->name);
pThreadInfo->precision = getPrecisionByString(dbInfo->precision);
strcpy(pThreadInfo->stbName, stbName);
pthread_create(pids + i, NULL, dumpNormalTablesOfStb, pThreadInfo);
}
for (int64_t i = 0; i < threads; i++) {
pthread_join(pids[i], NULL);
}
int64_t records = 0;
for (int64_t i = 0; i < threads; i++) {
threadInfo *pThreadInfo = infos + i;
records += pThreadInfo->rowsOfDumpOut;
taos_close(pThreadInfo->taos);
}
free(pids);
free(infos);
return records;
}
static int64_t dumpWholeDatabase(SDbInfo *dbInfo, FILE *fp)
{
dumpCreateDbClause(dbInfo, g_args.with_property, fp);
fprintf(g_fpOfResult, "\n#### database: %s\n",
dbInfo->name);
g_resultStatistics.totalDatabasesOfDumpOut++;
dumpCreateSTableClauseOfDb(dbInfo, fp);
return dumpNTablesOfDb(dbInfo);
}
static int dumpOut() {
TAOS *taos = NULL;
TAOS_RES *result = NULL;
TAOS_ROW row;
FILE *fp = NULL;
int32_t count = 0;
char tmpBuf[MAX_PATH_LEN] = {0};
if (g_args.outpath[0] != 0) {
sprintf(tmpBuf, "%s/dbs.sql", g_args.outpath);
} else {
sprintf(tmpBuf, "dbs.sql");
}
fp = fopen(tmpBuf, "w");
if (fp == NULL) {
errorPrint("%s() LN%d, failed to open file %s\n",
__func__, __LINE__, tmpBuf);
return -1;
}
g_args.dumpDbCount = getDumpDbCount();
debugPrint("%s() LN%d, dump db count: %d\n",
__func__, __LINE__, g_args.dumpDbCount);
if (0 == g_args.dumpDbCount) {
errorPrint("%d databases valid to dump\n", g_args.dumpDbCount);
fclose(fp);
return -1;
}
g_dbInfos = (SDbInfo **)calloc(g_args.dumpDbCount, sizeof(SDbInfo *));
if (g_dbInfos == NULL) {
errorPrint("%s() LN%d, failed to allocate memory\n",
__func__, __LINE__);
goto _exit_failure;
}
char command[COMMAND_SIZE];
/* Connect to server */
taos = taos_connect(g_args.host, g_args.user, g_args.password,
NULL, g_args.port);
if (taos == NULL) {
errorPrint("Failed to connect to server %s\n", g_args.host);
goto _exit_failure;
}
/* --------------------------------- Main Code -------------------------------- */
/* if (g_args.databases || g_args.all_databases) { // dump part of databases or all databases */
/* */
dumpCharset(fp);
sprintf(command, "show databases");
result = taos_query(taos, command);
int32_t code = taos_errno(result);
if (code != 0) {
errorPrint("%s() LN%d, failed to run command <%s>, reason: %s\n",
__func__, __LINE__, command, taos_errstr(result));
goto _exit_failure;
}
TAOS_FIELD *fields = taos_fetch_fields(result);
while ((row = taos_fetch_row(result)) != NULL) {
// sys database name : 'log', but subsequent version changed to 'log'
if ((strncasecmp(row[TSDB_SHOW_DB_NAME_INDEX], "log",
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) == 0)
&& (!g_args.allow_sys)) {
continue;
}
if (g_args.databases) { // input multi dbs
if (inDatabasesSeq(
(char *)row[TSDB_SHOW_DB_NAME_INDEX],
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) != 0) {
continue;
}
} else if (!g_args.all_databases) { // only input one db
if (strncasecmp(g_args.arg_list[0],
(char *)row[TSDB_SHOW_DB_NAME_INDEX],
fields[TSDB_SHOW_DB_NAME_INDEX].bytes) != 0)
continue;
}
g_dbInfos[count] = (SDbInfo *)calloc(1, sizeof(SDbInfo));
if (g_dbInfos[count] == NULL) {
errorPrint("%s() LN%d, failed to allocate %"PRIu64" memory\n",
__func__, __LINE__, (uint64_t)sizeof(SDbInfo));
goto _exit_failure;
}
okPrint("%s exists\n", (char *)row[TSDB_SHOW_DB_NAME_INDEX]);
tstrncpy(g_dbInfos[count]->name, (char *)row[TSDB_SHOW_DB_NAME_INDEX],
min(TSDB_DB_NAME_LEN,
fields[TSDB_SHOW_DB_NAME_INDEX].bytes + 1));
if (g_args.with_property) {
g_dbInfos[count]->ntables =
*((int32_t *)row[TSDB_SHOW_DB_NTABLES_INDEX]);
g_dbInfos[count]->vgroups =
*((int32_t *)row[TSDB_SHOW_DB_VGROUPS_INDEX]);
g_dbInfos[count]->replica =
*((int16_t *)row[TSDB_SHOW_DB_REPLICA_INDEX]);
g_dbInfos[count]->quorum =
*((int16_t *)row[TSDB_SHOW_DB_QUORUM_INDEX]);
g_dbInfos[count]->days =
*((int16_t *)row[TSDB_SHOW_DB_DAYS_INDEX]);
tstrncpy(g_dbInfos[count]->keeplist,
(char *)row[TSDB_SHOW_DB_KEEP_INDEX],
min(32, fields[TSDB_SHOW_DB_KEEP_INDEX].bytes + 1));
//g_dbInfos[count]->daysToKeep = *((int16_t *)row[TSDB_SHOW_DB_KEEP_INDEX]);
//g_dbInfos[count]->daysToKeep1;
//g_dbInfos[count]->daysToKeep2;
g_dbInfos[count]->cache =
*((int32_t *)row[TSDB_SHOW_DB_CACHE_INDEX]);
g_dbInfos[count]->blocks =
*((int32_t *)row[TSDB_SHOW_DB_BLOCKS_INDEX]);
g_dbInfos[count]->minrows =
*((int32_t *)row[TSDB_SHOW_DB_MINROWS_INDEX]);
g_dbInfos[count]->maxrows =
*((int32_t *)row[TSDB_SHOW_DB_MAXROWS_INDEX]);
g_dbInfos[count]->wallevel =
*((int8_t *)row[TSDB_SHOW_DB_WALLEVEL_INDEX]);
g_dbInfos[count]->fsync =
*((int32_t *)row[TSDB_SHOW_DB_FSYNC_INDEX]);
g_dbInfos[count]->comp =
(int8_t)(*((int8_t *)row[TSDB_SHOW_DB_COMP_INDEX]));
g_dbInfos[count]->cachelast =
(int8_t)(*((int8_t *)row[TSDB_SHOW_DB_CACHELAST_INDEX]));
tstrncpy(g_dbInfos[count]->precision,
(char *)row[TSDB_SHOW_DB_PRECISION_INDEX],
DB_PRECISION_LEN);
g_dbInfos[count]->update =
*((int8_t *)row[TSDB_SHOW_DB_UPDATE_INDEX]);
}
count++;
if (g_args.databases) {
if (count > g_args.dumpDbCount)
break;
} else if (!g_args.all_databases) {
if (count >= 1)
break;
}
}
if (count == 0) {
errorPrint("%d databases valid to dump\n", count);
goto _exit_failure;
}
if (g_args.databases || g_args.all_databases) { // case: taosdump --databases dbx,dby ... OR taosdump --all-databases
for (int i = 0; i < count; i++) {
int64_t records = 0;
records = dumpWholeDatabase(g_dbInfos[i], fp);
if (records >= 0) {
okPrint("Database %s dumped\n", g_dbInfos[i]->name);
g_totalDumpOutRows += records;
}
}
} else {
if (1 == g_args.arg_list_len) {
int64_t records = dumpWholeDatabase(g_dbInfos[0], fp);
if (records >= 0) {
okPrint("Database %s dumped\n", g_dbInfos[0]->name);
g_totalDumpOutRows += records;
}
} else {
dumpCreateDbClause(g_dbInfos[0], g_args.with_property, fp);
}
int superTblCnt = 0 ;
for (int i = 1; g_args.arg_list[i]; i++) {
TableRecordInfo tableRecordInfo;
if (getTableRecordInfo(g_dbInfos[0]->name,
g_args.arg_list[i],
&tableRecordInfo) < 0) {
errorPrint("input the invalid table %s\n",
g_args.arg_list[i]);
continue;
}
int64_t records = 0;
if (tableRecordInfo.isStb) { // dump all table of this stable
int ret = dumpStableClasuse(
taos,
g_dbInfos[0],
tableRecordInfo.tableRecord.stable,
fp);
if (ret >= 0) {
superTblCnt++;
records = dumpNtbOfStbByThreads(g_dbInfos[0], g_args.arg_list[i]);
}
} else if (tableRecordInfo.belongStb){
dumpStableClasuse(
taos,
g_dbInfos[0],
tableRecordInfo.tableRecord.stable,
fp);
records = dumpNormalTableBelongStb(
taos,
g_dbInfos[0],
tableRecordInfo.tableRecord.stable,
g_args.arg_list[i]);
} else {
records = dumpNormalTableWithoutStb(taos, g_dbInfos[0], g_args.arg_list[i]);
}
if (records >= 0) {
okPrint("table: %s dumped\n", g_args.arg_list[i]);
g_totalDumpOutRows += records;
}
}
}
taos_close(taos);
/* Close the handle and return */
fclose(fp);
taos_free_result(result);
freeDbInfos();
fprintf(stderr, "dump out rows: %" PRId64 "\n", g_totalDumpOutRows);
return 0;
_exit_failure:
fclose(fp);
taos_close(taos);
taos_free_result(result);
freeDbInfos();
errorPrint("dump out rows: %" PRId64 "\n", g_totalDumpOutRows);
return -1;
}
int main(int argc, char *argv[]) {
static char verType[32] = {0};
sprintf(verType, "version: %s\n", version);
argp_program_version = verType;
int ret = 0;
/* Parse our arguments; every option seen by parse_opt will be
reflected in arguments. */
if (argc > 1) {
// parse_precision_first(argc, argv, &g_args);
parse_timestamp(argc, argv, &g_args);
parse_args(argc, argv, &g_args);
}
argp_parse(&argp, argc, argv, 0, 0, &g_args);
if (g_args.abort) {
#ifndef _ALPINE
error(10, 0, "ABORTED");
#else
abort();
#endif
}
printf("====== arguments config ======\n");
printf("host: %s\n", g_args.host);
printf("user: %s\n", g_args.user);
printf("password: %s\n", g_args.password);
printf("port: %u\n", g_args.port);
printf("mysqlFlag: %d\n", g_args.mysqlFlag);
printf("outpath: %s\n", g_args.outpath);
printf("inpath: %s\n", g_args.inpath);
printf("resultFile: %s\n", g_args.resultFile);
printf("encode: %s\n", g_args.encode);
printf("all_databases: %s\n", g_args.all_databases?"true":"false");
printf("databases: %d\n", g_args.databases);
printf("databasesSeq: %s\n", g_args.databasesSeq);
printf("schemaonly: %s\n", g_args.schemaonly?"true":"false");
printf("with_property: %s\n", g_args.with_property?"true":"false");
#ifdef AVRO_SUPPORT
printf("avro format: %s\n", g_args.avro?"true":"false");
printf("avro codec: %s\n", g_avro_codec[g_args.avro_codec]);
#endif
printf("start_time: %" PRId64 "\n", g_args.start_time);
printf("human readable start time: %s \n", g_args.humanStartTime);
printf("end_time: %" PRId64 "\n", g_args.end_time);
printf("human readable end time: %s \n", g_args.humanEndTime);
printf("precision: %s\n", g_args.precision);
printf("data_batch: %d\n", g_args.data_batch);
printf("max_sql_len: %d\n", g_args.max_sql_len);
printf("table_batch: %d\n", g_args.table_batch);
printf("thread_num: %d\n", g_args.thread_num);
printf("allow_sys: %d\n", g_args.allow_sys);
printf("abort: %d\n", g_args.abort);
printf("isDumpIn: %d\n", g_args.isDumpIn);
printf("arg_list_len: %d\n", g_args.arg_list_len);
printf("debug_print: %d\n", g_args.debug_print);
for (int32_t i = 0; i < g_args.arg_list_len; i++) {
if (g_args.databases || g_args.all_databases) {
errorPrint("%s is an invalid input if database(s) be already specified.\n",
g_args.arg_list[i]);
exit(EXIT_FAILURE);
} else {
printf("arg_list[%d]: %s\n", i, g_args.arg_list[i]);
}
}
printf("==============================\n");
if (checkParam(&g_args) < 0) {
exit(EXIT_FAILURE);
}
g_fpOfResult = fopen(g_args.resultFile, "a");
if (NULL == g_fpOfResult) {
errorPrint("Failed to open %s for save result\n", g_args.resultFile);
exit(-1);
};
fprintf(g_fpOfResult, "#############################################################################\n");
fprintf(g_fpOfResult, "============================== arguments config =============================\n");
fprintf(g_fpOfResult, "host: %s\n", g_args.host);
fprintf(g_fpOfResult, "user: %s\n", g_args.user);
fprintf(g_fpOfResult, "password: %s\n", g_args.password);
fprintf(g_fpOfResult, "port: %u\n", g_args.port);
fprintf(g_fpOfResult, "mysqlFlag: %d\n", g_args.mysqlFlag);
fprintf(g_fpOfResult, "outpath: %s\n", g_args.outpath);
fprintf(g_fpOfResult, "inpath: %s\n", g_args.inpath);
fprintf(g_fpOfResult, "resultFile: %s\n", g_args.resultFile);
fprintf(g_fpOfResult, "encode: %s\n", g_args.encode);
fprintf(g_fpOfResult, "all_databases: %s\n", g_args.all_databases?"true":"false");
fprintf(g_fpOfResult, "databases: %d\n", g_args.databases);
fprintf(g_fpOfResult, "databasesSeq: %s\n", g_args.databasesSeq);
fprintf(g_fpOfResult, "schemaonly: %s\n", g_args.schemaonly?"true":"false");
fprintf(g_fpOfResult, "with_property: %s\n", g_args.with_property?"true":"false");
#ifdef AVRO_SUPPORT
fprintf(g_fpOfResult, "avro format: %s\n", g_args.avro?"true":"false");
fprintf(g_fpOfResult, "avro codec: %s\n", g_avro_codec[g_args.avro_codec]);
#endif
fprintf(g_fpOfResult, "start_time: %" PRId64 "\n", g_args.start_time);
fprintf(g_fpOfResult, "human readable start time: %s \n", g_args.humanStartTime);
fprintf(g_fpOfResult, "end_time: %" PRId64 "\n", g_args.end_time);
fprintf(g_fpOfResult, "human readable end time: %s \n", g_args.humanEndTime);
fprintf(g_fpOfResult, "precision: %s\n", g_args.precision);
fprintf(g_fpOfResult, "data_batch: %d\n", g_args.data_batch);
fprintf(g_fpOfResult, "max_sql_len: %d\n", g_args.max_sql_len);
fprintf(g_fpOfResult, "table_batch: %d\n", g_args.table_batch);
fprintf(g_fpOfResult, "thread_num: %d\n", g_args.thread_num);
fprintf(g_fpOfResult, "allow_sys: %d\n", g_args.allow_sys);
fprintf(g_fpOfResult, "abort: %d\n", g_args.abort);
fprintf(g_fpOfResult, "isDumpIn: %d\n", g_args.isDumpIn);
fprintf(g_fpOfResult, "arg_list_len: %d\n", g_args.arg_list_len);
for (int32_t i = 0; i < g_args.arg_list_len; i++) {
fprintf(g_fpOfResult, "arg_list[%d]: %s\n", i, g_args.arg_list[i]);
}
g_numOfCores = (int32_t)sysconf(_SC_NPROCESSORS_ONLN);
time_t tTime = time(NULL);
struct tm tm = *localtime(&tTime);
if (g_args.isDumpIn) {
fprintf(g_fpOfResult, "============================== DUMP IN ============================== \n");
fprintf(g_fpOfResult, "# DumpIn start time: %d-%02d-%02d %02d:%02d:%02d\n",
tm.tm_year + 1900, tm.tm_mon + 1,
tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
if (dumpIn() < 0) {
errorPrint("%s\n", "dumpIn() failed!");
ret = -1;
}
} else {
fprintf(g_fpOfResult, "============================== DUMP OUT ============================== \n");
fprintf(g_fpOfResult, "# DumpOut start time: %d-%02d-%02d %02d:%02d:%02d\n",
tm.tm_year + 1900, tm.tm_mon + 1,
tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
if (dumpOut() < 0) {
ret = -1;
} else {
fprintf(g_fpOfResult, "\n============================== TOTAL STATISTICS ============================== \n");
fprintf(g_fpOfResult, "# total database count: %d\n",
g_resultStatistics.totalDatabasesOfDumpOut);
fprintf(g_fpOfResult, "# total super table count: %d\n",
g_resultStatistics.totalSuperTblsOfDumpOut);
fprintf(g_fpOfResult, "# total child table count: %"PRId64"\n",
g_resultStatistics.totalChildTblsOfDumpOut);
fprintf(g_fpOfResult, "# total row count: %"PRId64"\n",
g_resultStatistics.totalRowsOfDumpOut);
}
}
fprintf(g_fpOfResult, "\n");
fclose(g_fpOfResult);
if (g_tablesList) {
free(g_tablesList);
}
return ret;
}
taos1_6="/root/mnt/work/test/td1.6/build/bin/taos"
taosdump1_6="/root/mnt/work/test/td1.6/build/bin/taosdump"
taoscfg1_6="/root/mnt/work/test/td1.6/test/cfg"
taos2_0="/root/mnt/work/test/td2.0/build/bin/taos"
taosdump2_0="/root/mnt/work/test/td2.0/build/bin/taosdump"
taoscfg2_0="/root/mnt/work/test/td2.0/test/cfg"
data_dir="/root/mnt/work/test/td1.6/output"
table_list="/root/mnt/work/test/td1.6/tables"
DBNAME="test"
NTABLES=$(wc -l ${table_list} | awk '{print $1;}')
NTABLES_PER_DUMP=101
mkdir -p ${data_dir}
i=0
round=0
command="${taosdump1_6} -c ${taoscfg1_6} -o ${data_dir} -N 100 -T 20 ${DBNAME}"
while IFS= read -r line
do
i=$((i+1))
command="${command} ${line}"
if [[ "$i" -eq ${NTABLES_PER_DUMP} ]]; then
round=$((round+1))
echo "Starting round ${round} dump out..."
rm -f ${data_dir}/*
${command}
echo "Starting round ${round} dump in..."
${taosdump2_0} -c ${taoscfg2_0} -i ${data_dir}
# Reset variables
# command="${taosdump1_6} -c ${taoscfg1_6} -o ${data_dir} -N 100 ${DBNAME}"
command="${taosdump1_6} -c ${taoscfg1_6} -o ${data_dir} -N 100 -T 20 ${DBNAME}"
i=0
fi
done < "${table_list}"
if [[ ${i} -ne "0" ]]; then
round=$((round+1))
echo "Starting round ${round} dump out..."
rm -f ${data_dir}/*
${command}
echo "Starting round ${round} dump in..."
${taosdump2_0} -c ${taoscfg2_0} -i ${data_dir}
fi
......@@ -103,6 +103,8 @@ int32_t parseAbsoluteDuration(char* token, int32_t tokenlen, int64_t* ts, char*
int32_t parseNatualDuration(const char* token, int32_t tokenLen, int64_t* duration, char* unit, int32_t timePrecision);
int32_t taosParseTime(char* timestr, int64_t* time, int32_t len, int32_t timePrec, int8_t dayligth);
int32_t taos_parse_time(char* timestr, int64_t* time, int32_t len, int32_t timePrec, int8_t dayligth);
void deltaToUtcInitOnce();
int64_t convertTimePrecision(int64_t time, int32_t fromPrecision, int32_t toPrecision);
......
......@@ -121,6 +121,10 @@ bool checkTzPresent(char *str, int32_t len) {
}
inline int32_t taos_parse_time(char* timestr, int64_t* time, int32_t len, int32_t timePrec, int8_t day_light) {
return taosParseTime(timestr, time, len, timePrec, day_light);
}
char* forwardToTimeStringEnd(char* str) {
int32_t i = 0;
int32_t numOfSep = 0;
......
......@@ -8,11 +8,11 @@ INCLUDE_DIRECTORIES(inc)
AUX_SOURCE_DIRECTORY(src SRC)
ADD_LIBRARY(query ${SRC})
SET_SOURCE_FILES_PROPERTIES(src/sql.c PROPERTIES COMPILE_FLAGS -w)
TARGET_LINK_LIBRARIES(query tsdb tutil lua)
TARGET_LINK_LIBRARIES(query tsdb tutil ${LINK_LUA})
IF (TD_LINUX)
IF (TD_BUILD_LUA)
TARGET_LINK_LIBRARIES(query m rt lua)
TARGET_LINK_LIBRARIES(query m rt ${LINK_LUA})
ELSE ()
TARGET_LINK_LIBRARIES(query m rt)
ENDIF ()
......@@ -21,7 +21,7 @@ ENDIF ()
IF (TD_DARWIN)
IF (TD_BUILD_LUA)
TARGET_LINK_LIBRARIES(query m lua)
TARGET_LINK_LIBRARIES(query m ${LINK_LUA})
ELSE ()
TARGET_LINK_LIBRARIES(query m)
ENDIF ()
......
......@@ -5,7 +5,7 @@
#include "queryLog.h"
static int32_t getDataStartOffset();
static void TSBufUpdateGroupInfo(STSBuf* pTSBuf, int32_t index, STSGroupBlockInfo* pBlockInfo);
static void TSBufUpdateGroupInfo(STSBuf* pTSBuf, int32_t qry_index, STSGroupBlockInfo* pBlockInfo);
static STSBuf* allocResForTSBuf(STSBuf* pTSBuf);
static int32_t STSBufUpdateHeader(STSBuf* pTSBuf, STSBufFileHeader* pHeader);
......@@ -697,8 +697,8 @@ bool tsBufNextPos(STSBuf* pTSBuf) {
int32_t groupIndex = pTSBuf->numOfGroups - 1;
pCur->vgroupIndex = groupIndex;
int32_t id = pTSBuf->pData[pCur->vgroupIndex].info.id;
STSGroupBlockInfo* pBlockInfo = tsBufGetGroupBlockInfo(pTSBuf, id);
// get current vgroupIndex BlockInfo
STSGroupBlockInfo* pBlockInfo = &pTSBuf->pData[pCur->vgroupIndex].info;
int32_t blockIndex = pBlockInfo->numOfBlocks - 1;
tsBufGetBlock(pTSBuf, groupIndex, blockIndex);
......@@ -718,32 +718,43 @@ bool tsBufNextPos(STSBuf* pTSBuf) {
while (1) {
assert(pTSBuf->tsData.len == pTSBuf->block.numOfElem * TSDB_KEYSIZE);
// tsIndex is last
if ((pCur->order == TSDB_ORDER_ASC && pCur->tsIndex >= pTSBuf->block.numOfElem - 1) ||
(pCur->order == TSDB_ORDER_DESC && pCur->tsIndex <= 0)) {
int32_t id = pTSBuf->pData[pCur->vgroupIndex].info.id;
STSGroupBlockInfo* pBlockInfo = tsBufGetGroupBlockInfo(pTSBuf, id);
if (pBlockInfo == NULL || (pCur->blockIndex >= pBlockInfo->numOfBlocks - 1 && pCur->order == TSDB_ORDER_ASC) ||
// get current vgroupIndex BlockInfo
STSGroupBlockInfo* pBlockInfo = &pTSBuf->pData[pCur->vgroupIndex].info;
if (pBlockInfo == NULL) {
return false;
}
// blockIndex is last
if ((pCur->blockIndex >= pBlockInfo->numOfBlocks - 1 && pCur->order == TSDB_ORDER_ASC) ||
(pCur->blockIndex <= 0 && pCur->order == TSDB_ORDER_DESC)) {
// vgroupIndex is last
if ((pCur->vgroupIndex >= pTSBuf->numOfGroups - 1 && pCur->order == TSDB_ORDER_ASC) ||
(pCur->vgroupIndex <= 0 && pCur->order == TSDB_ORDER_DESC)) {
// this is end. both vgroupIndex and blockindex and tsIndex is last
pCur->vgroupIndex = -1;
return false;
}
if (pBlockInfo == NULL) {
return false;
}
// blockIndex must match with next group
int32_t nextGroupIdx = pCur->vgroupIndex + step;
pBlockInfo = &pTSBuf->pData[nextGroupIdx].info;
int32_t blockIndex = (pCur->order == TSDB_ORDER_ASC) ? 0 : (pBlockInfo->numOfBlocks - 1);
// vgroupIndex move next and set value in tsBufGetBlock()
tsBufGetBlock(pTSBuf, pCur->vgroupIndex + step, blockIndex);
break;
} else {
// blockIndex move next and set value in tsBufGetBlock()
tsBufGetBlock(pTSBuf, pCur->vgroupIndex, pCur->blockIndex + step);
break;
}
} else {
// tsIndex move next
pCur->tsIndex += step;
break;
}
......@@ -767,7 +778,7 @@ STSElem tsBufGetElem(STSBuf* pTSBuf) {
}
STSCursor* pCur = &pTSBuf->cur;
if (pCur != NULL && pCur->vgroupIndex < 0) {
if (pCur->vgroupIndex < 0) {
return elem1;
}
......@@ -796,7 +807,7 @@ int32_t tsBufMerge(STSBuf* pDestBuf, const STSBuf* pSrcBuf) {
return -1;
}
// src can only have one vnode index
// src can only have one vnode qry_index
assert(pSrcBuf->numOfGroups == 1);
// there are data in buffer, flush to disk first
......@@ -819,7 +830,7 @@ int32_t tsBufMerge(STSBuf* pDestBuf, const STSBuf* pSrcBuf) {
pDestBuf->pData = tmp;
}
// directly copy the vnode index information
// directly copy the vnode qry_index information
memcpy(&pDestBuf->pData[oldSize], pSrcBuf->pData, (size_t)pSrcBuf->numOfGroups * sizeof(STSGroupBlockInfoEx));
// set the new offset value
......@@ -1012,8 +1023,8 @@ static int32_t getDataStartOffset() {
}
// update prev vnode length info in file
static void TSBufUpdateGroupInfo(STSBuf* pTSBuf, int32_t index, STSGroupBlockInfo* pBlockInfo) {
int32_t offset = sizeof(STSBufFileHeader) + index * sizeof(STSGroupBlockInfo);
static void TSBufUpdateGroupInfo(STSBuf* pTSBuf, int32_t qry_index, STSGroupBlockInfo* pBlockInfo) {
int32_t offset = sizeof(STSBufFileHeader) + qry_index * sizeof(STSGroupBlockInfo);
doUpdateGroupInfo(pTSBuf, offset, pBlockInfo);
}
......
......@@ -10,13 +10,13 @@ IF (TD_LINUX)
ADD_EXECUTABLE(subscribe subscribe.c)
TARGET_LINK_LIBRARIES(subscribe taos_static trpc tutil pthread )
ADD_EXECUTABLE(epoll epoll.c)
TARGET_LINK_LIBRARIES(epoll taos_static trpc tutil pthread lua)
TARGET_LINK_LIBRARIES(epoll taos_static trpc tutil pthread ${LINK_LUA})
ENDIF ()
IF (TD_DARWIN)
INCLUDE_DIRECTORIES(. ${TD_COMMUNITY_DIR}/src/inc ${TD_COMMUNITY_DIR}/src/client/inc ${TD_COMMUNITY_DIR}/inc)
AUX_SOURCE_DIRECTORY(. SRC)
ADD_EXECUTABLE(demo demo.c)
TARGET_LINK_LIBRARIES(demo taos_static trpc tutil pthread lua)
TARGET_LINK_LIBRARIES(demo taos_static trpc tutil pthread ${LINK_LUA})
ADD_EXECUTABLE(epoll epoll.c)
TARGET_LINK_LIBRARIES(epoll taos_static trpc tutil pthread lua)
TARGET_LINK_LIBRARIES(epoll taos_static trpc tutil pthread ${LINK_LUA})
ENDIF ()
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_json_mixTbRows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="opentsdb_json_mixTbRows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&quot;metric&quot;: &quot;cpu.usage_user.rows&quot;, &quot;timestamp&quot;:${ts_counter}, &quot;value&quot;:32.261068286779754, &quot;tags&quot;:{&quot;arch&quot;:&quot;x64&quot;,&quot;datacenter&quot;:&quot;us-west-1b&quot;,&quot;hostname&quot;:&quot;host_5&quot;,&quot;os&quot;:&quot;Ubuntu16&quot;,&quot;rack&quot;:&quot;13&quot;,&quot;region&quot;:&quot;us-west-1&quot;,&quot;service&quot;:&quot;10&quot;,&quot;service_environment&quot;:&quot;staging&quot;,&quot;service_version&quot;:&quot;${rows_counter}&quot;,&quot;team&quot;:&quot;NYC&quot;}}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/json/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="ts_counter" enabled="true">
<stringProp name="CounterConfig.start">1614530008000</stringProp>
<stringProp name="CounterConfig.end"></stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">ts_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="rows_counter" enabled="true">
<stringProp name="CounterConfig.start">1</stringProp>
<stringProp name="CounterConfig.end">row_count</stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">rows_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_createStb" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create stb line" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&quot;metric&quot;: &quot;cpu.usage_user_${__Random(1,100000,)}_${__Random(1,100000,)}_${__Random(1,100000,)}&quot;, &quot;timestamp&quot;:1626006833640, &quot;value&quot;:32.261068286779754, &quot;tags&quot;:{&quot;arch&quot;:&quot;x64&quot;,&quot;datacenter&quot;:&quot;us-west-1b&quot;,&quot;hostname&quot;:&quot;host_5&quot;,&quot;os&quot;:&quot;Ubuntu16&quot;,&quot;rack&quot;:&quot;13&quot;,&quot;region&quot;:&quot;us-west-1&quot;,&quot;service&quot;:&quot;10&quot;,&quot;service_environment&quot;:&quot;staging&quot;,&quot;service_version&quot;:&quot;0&quot;,&quot;team&quot;:&quot;NYC&quot;}}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/json/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_createTb" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create tb line" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&quot;metric&quot;: &quot;cpu.usage_user&quot;, &quot;timestamp&quot;:1626006833640, &quot;value&quot;:32.261068286779754, &quot;tags&quot;:{&quot;arch&quot;:&quot;x64&quot;,&quot;datacenter&quot;:&quot;us-west-1b&quot;,&quot;hostname&quot;:&quot;host_5&quot;,&quot;os&quot;:&quot;Ubuntu16&quot;,&quot;rack&quot;:&quot;${__Random(1,100000,)}&quot;,&quot;region&quot;:&quot;us-west-1&quot;,&quot;service&quot;:&quot;${__Random(1,100000,)}&quot;,&quot;service_environment&quot;:&quot;staging&quot;,&quot;service_version&quot;:&quot;${__Random(1,100000,)}&quot;,&quot;team&quot;:&quot;NYC&quot;}}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/json/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_insertRows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">24</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="insert rows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&quot;metric&quot;: &quot;cpu.usage_user.rows&quot;, &quot;timestamp&quot;:${ts_counter}, &quot;value&quot;:32.261068286779754, &quot;tags&quot;:{&quot;arch&quot;:&quot;x64&quot;,&quot;datacenter&quot;:&quot;us-west-1b&quot;,&quot;hostname&quot;:&quot;host_5&quot;,&quot;os&quot;:&quot;Ubuntu16&quot;,&quot;rack&quot;:&quot;13&quot;,&quot;region&quot;:&quot;us-west-1&quot;,&quot;service&quot;:&quot;10&quot;,&quot;service_environment&quot;:&quot;staging&quot;,&quot;service_version&quot;:&quot;0&quot;,&quot;team&quot;:&quot;NYC&quot;}}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/json/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="ts_counter" enabled="true">
<stringProp name="CounterConfig.start">1614530008000</stringProp>
<stringProp name="CounterConfig.end"></stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">ts_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp_create_db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="json_mix_tb_rows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="json_mix_tb_rows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&quot;metric&quot;: &quot;cpu.usage_user.rows&quot;, &quot;timestamp&quot;:${ts_csv_count}, &quot;value&quot;:32.261068286779754, &quot;tags&quot;:{&quot;arch&quot;:&quot;x64&quot;,&quot;datacenter&quot;:&quot;us-west-1b&quot;,&quot;hostname&quot;:&quot;host_5&quot;,&quot;os&quot;:&quot;Ubuntu16&quot;,&quot;rack&quot;:&quot;13&quot;,&quot;region&quot;:&quot;us-west-1&quot;,&quot;service&quot;:&quot;10&quot;,&quot;service_environment&quot;:&quot;staging&quot;,&quot;service_version&quot;:&quot;${row_csv_count}&quot;,&quot;team&quot;:&quot;NYC&quot;}}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/json/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<CSVDataSet guiclass="TestBeanGUI" testclass="CSVDataSet" testname="CSV Data Set Config" enabled="true">
<stringProp name="delimiter">,</stringProp>
<stringProp name="fileEncoding">UTF-8</stringProp>
<stringProp name="filename">import_file_name</stringProp>
<boolProp name="ignoreFirstLine">false</boolProp>
<boolProp name="quotedData">false</boolProp>
<boolProp name="recycle">true</boolProp>
<stringProp name="shareMode">shareMode.all</stringProp>
<boolProp name="stopThread">false</boolProp>
<stringProp name="variableNames">ts_csv_count,row_csv_count</stringProp>
</CSVDataSet>
<hashTree/>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_100WTb100Rows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="opentsdb_100WTb100Rows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">cpu.usage_user.rows ${ts_counter} 22.345567 arch=x64 datacenter=us-west-1b hostname=host_5 os=Ubuntu16 rack=13 region=us-west-1 service=10 service_environment=staging service_version=${rows_counter} team=NYC</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/telnet/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="ts_counter" enabled="true">
<stringProp name="CounterConfig.start">1614530008000</stringProp>
<stringProp name="CounterConfig.end"></stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">ts_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="rows_counter" enabled="true">
<stringProp name="CounterConfig.start">1</stringProp>
<stringProp name="CounterConfig.end">row_count</stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">rows_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_createStb" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create stb line" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">cpu.usage_user_${__Random(1,100000,)}_${__Random(1,100000,)}_${__Random(1,100000,)} 1626006833640 32.261068286779754 arch=x64 datacenter=us-west-1b hostname=host_5 os=Ubuntu16 rack=13 region=us-west-1 service=10 service_environment=staging service_version=0 team=NYC</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/telnet/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_createTb" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create tb line" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">cpu.usage_user 1626006833640 32.261068286779754 arch=x64 datacenter=us-west-1b hostname=host_5 os=Ubuntu16 rack=${__Random(1,100000,)} region=us-west-1 service=${__Random(1,100000,)} service_environment=staging service_version=${__Random(1,100000,)} team=NYC</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/telnet/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="opentsdb_insertRows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">24</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="insert rows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">cpu.usage_user.rows ${ts_counter} 22.345567 arch=x64 datacenter=us-west-1b hostname=host_5 os=Ubuntu16 rack=13 region=us-west-1 service=10 service_environment=staging service_version=0 team=NYC</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/telnet/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
<CounterConfig guiclass="CounterConfigGui" testclass="CounterConfig" testname="ts_counter" enabled="true">
<stringProp name="CounterConfig.start">1614530008000</stringProp>
<stringProp name="CounterConfig.end"></stringProp>
<stringProp name="CounterConfig.incr">1</stringProp>
<stringProp name="CounterConfig.name">ts_counter</stringProp>
<stringProp name="CounterConfig.format"></stringProp>
<boolProp name="CounterConfig.per_user">false</boolProp>
</CounterConfig>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.4.1">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.tearDown_on_shutdown">true</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<SetupThreadGroup guiclass="SetupThreadGroupGui" testclass="SetupThreadGroup" testname="setUp create db" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</SetupThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="create db" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">create database if not exists test precision &apos;ms&apos;</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/rest/sql</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="json_mix_tb_rows" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">looptimes</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">100</stringProp>
<stringProp name="ThreadGroup.ramp_time"></stringProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
<boolProp name="ThreadGroup.same_user_on_next_iteration">true</boolProp>
</ThreadGroup>
<hashTree>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="json_mix_tb_rows" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">cpu.usage_user.rows ${ts_csv_count} 22.345567 arch=x64 datacenter=us-west-1b hostname=host_5 os=Ubuntu16 rack=13 region=us-west-1 service=10 service_environment=staging service_version=${row_csv_count} team=NYC</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">http://192.168.1.85:6041/opentsdb/v1/put/telnet/test</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<ResultCollector guiclass="StatVisualizer" testclass="ResultCollector" testname="Aggregate Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>true</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>true</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
<sentBytes>true</sentBytes>
<url>true</url>
<threadCounts>true</threadCounts>
<idleTime>true</idleTime>
<connectTime>true</connectTime>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<HeaderManager guiclass="HeaderPanel" testclass="HeaderManager" testname="HTTP Header Manager" enabled="true">
<collectionProp name="HeaderManager.headers">
<elementProp name="" elementType="Header">
<stringProp name="Header.name">Authorization</stringProp>
<stringProp name="Header.value">Basic cm9vdDp0YW9zZGF0YQ==</stringProp>
</elementProp>
</collectionProp>
</HeaderManager>
<hashTree/>
</hashTree>
<CSVDataSet guiclass="TestBeanGUI" testclass="CSVDataSet" testname="CSV Data Set Config" enabled="true">
<stringProp name="delimiter">,</stringProp>
<stringProp name="fileEncoding">UTF-8</stringProp>
<stringProp name="filename">import_file_name</stringProp>
<boolProp name="ignoreFirstLine">false</boolProp>
<boolProp name="quotedData">false</boolProp>
<boolProp name="recycle">true</boolProp>
<stringProp name="shareMode">shareMode.all</stringProp>
<boolProp name="stopThread">false</boolProp>
<stringProp name="variableNames">ts_csv_count,row_csv_count</stringProp>
</CSVDataSet>
<hashTree/>
</hashTree>
</hashTree>
</jmeterTestPlan>
from fabric import Connection
from loguru import logger
import shutil
import os
import time
class TaosadapterPerftest():
def __init__(self):
self.ip = "192.168.1.85"
self.port = "22"
self.user = "root"
self.passwd = "tbase125!"
self.telnetCreateStbJmxFile = "opentsdb_telnet_createStb.jmx"
self.telnetCreateTbJmxFile = "opentsdb_telnet_createTb.jmx"
self.telnetInsertRowsFile = "opentsdb_telnet_insertRows.jmx"
# self.telnetMixJmxFile = "opentsdb_telnet_MixTbRows.jmx"
self.telnetMixJmxFile = "opentsdb_telnet_jmeter_csv_import.jmx"
self.jsonCreateStbJmxFile = "opentsdb_json_createStb.jmx"
self.jsonCreateTbJmxFile = "opentsdb_json_createTb.jmx"
self.jsonInsertRowsFile = "opentsdb_json_insertRows.jmx"
# self.jsonMixJmxFile = "opentsdb_json_MixTbRows.jmx"
self.jsonMixJmxFile = "opentsdb_json_jmeter_csv_import.jmx"
self.logfile = "taosadapter_perftest.log"
self.createStbThreads = 100
self.createTbThreads = 100
self.insertRowsThreads = 24
logger.add(self.logfile)
def exec_remote_cmd(self, cmd):
"""
remote exec shell cmd
"""
try:
c = Connection(self.ip, user=self.user, port=self.port, connect_timeout=120, connect_kwargs={"password": self.passwd})
result = c.run(cmd, pty=False, warn=True, hide=True).stdout
c.close()
return result
except Exception as e:
logger.error(f"exec cmd {cmd} failed:{e}");
def exec_local_cmd(self, shell_cmd):
'''
exec local shell cmd
'''
result = os.popen(shell_cmd).read().strip()
return result
def modifyJxmLooptimes(self, filename, looptimes, row_count=None, import_file_name=None):
'''
modify looptimes
'''
with open(filename, "r", encoding="utf-8") as f:
lines = f.readlines()
with open(filename, "w", encoding="utf-8") as f_w:
for line in lines:
if "looptimes" in line:
line = line.replace("looptimes", looptimes)
if row_count is not None:
if "row_count" in line:
line = line.replace("row_count", row_count)
if import_file_name is not None:
if "import_file_name" in line:
line = line.replace("import_file_name", import_file_name)
f_w.write(line)
def cleanAndRestartTaosd(self):
'''
restart taosd and clean env
'''
logger.info("---- restarting taosd and taosadapter ----")
self.exec_remote_cmd("systemctl stop taosd")
self.exec_remote_cmd("rm -rf /var/lib/taos/* /var/log/taos/*")
self.exec_remote_cmd("systemctl start taosd")
logger.info("---- finish restart ----")
time.sleep(60)
def recreateReportDir(self, path):
'''
recreate jmeter report path
'''
if os.path.exists(path):
self.exec_local_cmd(f'rm -rf {path}/*')
else:
os.makedirs(path)
def cleanLog(self):
'''
clean log
'''
with open(self.logfile, 'w') as f:
f.seek(0)
f.truncate()
def genMixTbRows(self, filename, table_count, row_count):
logger.info('generating import data file')
ts_start = 1614530008000
with open(filename, "w", encoding="utf-8") as f_w:
for i in range(table_count):
for j in range(row_count):
input_line = str(ts_start) + "," + str(i) + '\n'
ts_start += 1
f_w.write(input_line)
def outputParams(self, protocol, create_type):
'''
procotol is "telnet" or "json"
create_type is "stb" or "tb" or "rows"
'''
if protocol == "telnet":
if create_type == "stb":
return self.telnetCreateStbJmxFile, self.createStbThreads
elif create_type == "tb":
return self.telnetCreateTbJmxFile, self.createTbThreads
elif create_type == "rows":
return self.telnetInsertRowsFile, self.insertRowsThreads
else:
logger.error("create type error!")
else:
if create_type == "stb":
return self.jsonCreateStbJmxFile, self.createStbThreads
elif create_type == "tb":
return self.jsonCreateTbJmxFile, self.createTbThreads
elif create_type == "rows":
return self.jsonInsertRowsFile, self.insertRowsThreads
else:
logger.error("create type error!")
def insertTDengine(self, procotol, create_type, count):
'''
create stb/tb or insert rows
'''
self.cleanAndRestartTaosd()
jmxfile, threads = self.outputParams(procotol, create_type)
handle_file = str(count) + jmxfile
report_dir = f'testreport/{handle_file}'
self.recreateReportDir(report_dir)
shutil.copyfile(jmxfile, handle_file)
replace_count = int(count/threads)
self.modifyJxmLooptimes(handle_file, str(replace_count))
logger.info(f'jmeter running ----- jmeter -n -t {handle_file} -l {report_dir}/{handle_file}.txt -e -o {report_dir}')
result = self.exec_local_cmd(f"jmeter -n -t {handle_file} -l {report_dir}/{handle_file}.txt -e -o {report_dir}")
logger.info(result)
logger.info("----- sleep 120s and please record data -----")
time.sleep(120)
def insertMixTbRows(self, procotol, table_count, row_count):
self.cleanAndRestartTaosd()
local_path = os.getcwd()
jmxfile = f"opentsdb_{procotol}_{table_count}Tb{row_count}Rows.jmx"
import_file_name = f"import_opentsdb_{procotol}_{table_count}Tb{row_count}Rows.txt"
import_file_path = local_path + '/' + import_file_name
self.genMixTbRows(import_file_name, table_count, row_count)
report_dir = f'testreport/{jmxfile}'
self.recreateReportDir(report_dir)
if procotol == "telnet":
shutil.copyfile(self.telnetMixJmxFile, jmxfile)
else:
shutil.copyfile(self.jsonMixJmxFile, jmxfile)
self.modifyJxmLooptimes(jmxfile, str(int(table_count*row_count/100)), import_file_name=import_file_path)
logger.info(f'jmeter running ----- jmeter -n -t {jmxfile} -l {report_dir}/{jmxfile}.txt -e -o {report_dir}')
result = self.exec_local_cmd(f"jmeter -n -t {jmxfile} -l {report_dir}/{jmxfile}.txt -e -o {report_dir}")
logger.info(result)
logger.info("----- sleep 120s and please record data -----")
time.sleep(120)
# def insertMixTbRows(self, procotol, looptimes, row_count):
# self.cleanAndRestartTaosd()
# jmxfile = f"opentsdb_{procotol}_{looptimes}Tb100Rows.jmx"
# report_dir = f'testreport/{jmxfile}'
# self.recreateReportDir(report_dir)
# if procotol == "telnet":
# shutil.copyfile(self.telnetMixJmxFile, jmxfile)
# else:
# shutil.copyfile(self.jsonMixJmxFile, jmxfile)
# self.modifyJxmLooptimes(jmxfile, str(looptimes), str(row_count))
# result = self.exec_local_cmd(f"jmeter -n -t {jmxfile} -l {report_dir}/{jmxfile}.txt -e -o {report_dir}")
# logger.info(result)
# logger.info("----- sleep 120s and please record data -----")
# time.sleep(120)
if __name__ == '__main__':
taosadapterPerftest = TaosadapterPerftest()
taosadapterPerftest.cleanLog()
logger.info('------------ Start testing the scenarios in the report chapter 3.4.1 ------------')
for procotol in ["telnet", "json"]:
logger.info(f'----- {procotol} protocol ------- Creating 30W stable ------------')
taosadapterPerftest.insertTDengine(procotol, "stb", 300000)
logger.info(f'----- {procotol} protocol ------- Creating 100W table with stb "cpu.usage_user" ------------')
taosadapterPerftest.insertTDengine(procotol, "tb", 1000000)
logger.info(f'----- {procotol} protocol ------- inserting 100W rows ------------')
taosadapterPerftest.insertTDengine(procotol, "rows", 1000000)
logger.info(f'----- {procotol} protocol ------- Creating 50W stable ------------')
taosadapterPerftest.insertTDengine(procotol, "stb", 500000)
logger.info(f'----- {procotol} protocol ------- Creating 500W table with stb "cpu.usage_user" ------------')
taosadapterPerftest.insertTDengine(procotol, "tb", 5000000)
logger.info(f'----- {procotol} protocol ------- inserting 500W rows ------------')
taosadapterPerftest.insertTDengine(procotol, "rows", 5000000)
logger.info(f'----- {procotol} protocol ------- Creating 100W stable ------------')
taosadapterPerftest.insertTDengine(procotol, "stb", 1000000)
logger.info(f'----- {procotol} protocol ------- Creating 1000W table with stb "cpu.usage_user" ------------')
taosadapterPerftest.insertTDengine(procotol, "tb", 10000000)
logger.info(f'----- {procotol} protocol ------- inserting 1000W rows ------------')
taosadapterPerftest.insertTDengine(procotol, "rows", 10000000)
logger.info(f'----- {procotol} protocol ------- Creating 10W stable 1000Rows ------------')
taosadapterPerftest.insertMixTbRows(procotol, 100000, 1000)
logger.info(f'----- {procotol} protocol ------- Creating 100W stable 100Rows ------------')
taosadapterPerftest.insertMixTbRows(procotol, 1000000, 100)
logger.info(f'----- {procotol} protocol ------- Creating 500W stable 20Rows ------------')
taosadapterPerftest.insertMixTbRows(procotol, 5000000, 20)
logger.info(f'----- {procotol} protocol ------- Creating 1000W stable 10Rows ------------')
taosadapterPerftest.insertMixTbRows(procotol, 10000000, 10)
......@@ -182,7 +182,7 @@ python3 test.py -f tools/taosdemoAllTest/NanoTestCase/taosdemoTestSupportNanoIns
python3 test.py -f tools/taosdemoAllTest/NanoTestCase/taosdemoTestSupportNanoQuery.py
python3 test.py -f tools/taosdemoAllTest/NanoTestCase/taosdemoTestSupportNanosubscribe.py
python3 test.py -f tools/taosdemoAllTest/NanoTestCase/taosdemoTestInsertTime_step.py
python3 test.py -f tools/taosdumpTestNanoSupport.py
# disable due to taosdump don't support sql format. python3 test.py -f tools/taosdumpTestNanoSupport.py
#
python3 ./test.py -f tsdb/tsdbComp.py
......@@ -305,7 +305,7 @@ python3 ./test.py -f client/client.py
python3 ./test.py -f client/version.py
python3 ./test.py -f client/alterDatabase.py
python3 ./test.py -f client/noConnectionErrorTest.py
python3 ./test.py -f client/taoshellCheckCase.py
# disable due to taosdump don't support sql format. python3 ./test.py -f client/taoshellCheckCase.py
# python3 test.py -f client/change_time_1_1.py
# python3 test.py -f client/change_time_1_2.py
......
......@@ -123,8 +123,33 @@ class TDTestCase:
tdSql.execute("insert into t1 values(now, 1, 'abc');")
tdLog.info("select stddev(k) from t1 where b <> 'abc' interval(1s);")
tdSql.query("select stddev(k) from t1 where b <> 'abc' interval(1s);")
tdSql.execute("create table stdtable(ts timestamp, col1 int) tags(loc nchar(64))")
tdSql.execute("create table std1 using stdtable tags('beijing')")
tdSql.execute("create table std2 using stdtable tags('shanghai')")
tdSql.execute("create table std3 using stdtable tags('河南')")
tdSql.execute("insert into std1 values(now + 1s, 1)")
tdSql.execute("insert into std1 values(now + 2s, 2);")
tdSql.execute("insert into std2 values(now + 3s, 1);")
tdSql.execute("insert into std2 values(now + 4s, 2);")
tdSql.execute("insert into std3 values(now + 5s, 4);")
tdSql.execute("insert into std3 values(now + 6s, 8);")
tdSql.query("select stddev(col1) from stdtable group by loc;")
tdSql.checkData(0, 0, 2.0)
tdSql.checkData(1, 0, 0.5)
tdSql.checkData(2, 0, 0.5)
tdSql.execute("create table stdtableint(ts timestamp, col1 int) tags(num int)")
tdSql.execute("create table stdint1 using stdtableint tags(1)")
tdSql.execute("create table stdint2 using stdtableint tags(2)")
tdSql.execute("insert into stdint1 values(now + 1s, 1)")
tdSql.execute("insert into stdint1 values(now + 2s, 2);")
tdSql.execute("insert into stdint2 values(now + 3s, 1);")
tdSql.execute("insert into stdint2 values(now + 4s, 2);")
tdSql.query("select stddev(col1) from stdtableint group by num")
tdSql.checkData(0, 0, 0.5)
tdSql.checkData(1, 0, 0.5)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
......
......@@ -79,16 +79,22 @@ class TDTestCase:
tdSql.execute('insert into st1 values (now, 1, 2, 1.1, 2.2, "a", 1, 1, false, "bb");')
# select as cname with cname_list
sql_seq = f'select count(ts) as {cname_list[0]}, sum(pi1) as {cname_list[1]}, avg(pi2) as {cname_list[2]}, count(pf1) as {cname_list[3]}, count(pf2) as {cname_list[4]}, count(ps1) as {cname_list[5]}, min(pi3) as {cname_list[6]}, max(pi4) as {cname_list[7]}, count(pb1) as {cname_list[8]}, count(ps2) as {cname_list[9]}, count(si1) as {cname_list[10]}, count(si2) as {cname_list[11]}, count(sf1) as {cname_list[12]}, count(sf2) as {cname_list[13]}, count(ss1) as {cname_list[14]}, count(si3) as {cname_list[15]}, count(si4) as {cname_list[16]}, count(sb1) as {cname_list[17]}, count(ss2) as {cname_list[18]} from super_table_cname_check'
sql_seq_no_as = sql_seq.replace(' as ', ' ')
res = tdSql.getColNameList(sql_seq)
res_no_as = tdSql.getColNameList(sql_seq_no_as)
sql_seq1 = f'select count(ts) as {cname_list[0]}, sum(pi1) as {cname_list[1]}, avg(pi2) as {cname_list[2]}, count(pf1) as {cname_list[3]}, count(pf2) as {cname_list[4]}, count(ps1) as {cname_list[5]}, min(pi3) as {cname_list[6]}, max(pi4) as {cname_list[7]}, count(pb1) as {cname_list[8]}, count(ps2) as {cname_list[9]} from super_table_cname_check'
sql_seq2 = f'select count(si1) as {cname_list[10]}, count(si2) as {cname_list[11]}, count(sf1) as {cname_list[12]}, count(sf2) as {cname_list[13]}, count(ss1) as {cname_list[14]}, count(si3) as {cname_list[15]}, count(si4) as {cname_list[16]}, count(sb1) as {cname_list[17]}, count(ss2) as {cname_list[18]} from super_table_cname_check'
sql_seq_no_as1 = sql_seq1.replace(' as ', ' ')
sql_seq_no_as2 = sql_seq2.replace(' as ', ' ')
res1 = tdSql.getColNameList(sql_seq1)
res2 = tdSql.getColNameList(sql_seq2)
res_no_as1 = tdSql.getColNameList(sql_seq_no_as1)
res_no_as2 = tdSql.getColNameList(sql_seq_no_as2)
# cname[1] > 64, it is expected to be equal to 64
cname_list_1_expected = cname_list[1][:-1]
cname_list[1] = cname_list_1_expected
checkColNameList = tdSql.checkColNameList(res, cname_list)
checkColNameList = tdSql.checkColNameList(res_no_as, cname_list)
tdSql.checkColNameList(res1, cname_list[:10])
tdSql.checkColNameList(res2, cname_list[10:])
tdSql.checkColNameList(res_no_as1, cname_list[:10])
tdSql.checkColNameList(res_no_as2, cname_list[10:])
def run(self):
tdSql.prepare()
......
......@@ -97,7 +97,7 @@ class TDTestCase:
self.checkColumnSorted(0, "desc")
print("======= step 2: verify order for special column =========")
tdSql.query("select tbcol1 from st order by ts desc")
tdSql.query("select tbcol6 from st order by ts desc")
......@@ -122,6 +122,44 @@ class TDTestCase:
(i, i))
self.checkColumnSorted(1, "desc")
# order by rules: https://jira.taosdata.com:18090/pages/viewpage.action?pageId=123455481
tdSql.error("select tbcol1 from st order by 123")
tdSql.error("select tbcol1 from st order by tbname")
tdSql.error("select tbcol1 from st order by tagcol1")
tdSql.error("select tbcol1 from st order by ''")
tdSql.error("select top(tbcol1, 12) from st1 order by tbcol1,ts")
tdSql.error("select top(tbcol1, 12) from st order by tbcol1,ts,tbcol2")
tdSql.error("select top(tbcol1, 12) from st order by ts, tbcol1")
tdSql.error("select top(tbcol1, 2) from st1 group by tbcol1 order by tbcol2")
tdSql.query("select top(tbcol1, 2) from st1 group by tbcol2 order by tbcol2")
tdSql.query("select top(tbcol1, 12) from st order by tbcol1, ts")
tdSql.query("select avg(tbcol1) from st group by tbname order by tbname")
tdSql.checkData(1, 0, 5.5)
tdSql.checkData(5, 1, "st6")
tdSql.query("select top(tbcol1, 2) from st group by tbname order by tbname")
tdSql.checkData(1, 1, 10)
tdSql.checkData(2, 2, "st2")
tdSql.query("select top(tbcol1, 12) from st order by tbcol1")
tdSql.checkData(1, 1, 9)
tdSql.error("select top(tbcol1, 12) from st1 order by tbcol1,ts")
tdSql.error("select top(tbcol1, 12),tbname from st order by tbcol1,tbname")
tdSql.query("select top(tbcol1, 12) from st group by tbname order by tbname desc")
tdSql.checkData(1, 2, "st10")
tdSql.checkData(10, 2, "st9")
tdSql.query("select top(tbcol1, 2) from st group by tbname order by tbname desc,ts")
tdSql.checkData(1, 2, "st10")
tdSql.checkData(10, 2, "st5")
tdSql.checkData(0, 0, "2018-09-17 09:00:00.109")
tdSql.checkData(1, 0, "2018-09-17 09:00:00.110")
tdSql.checkData(2, 0, "2018-09-17 09:00:00.099")
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
......
......@@ -112,7 +112,7 @@ class TDTestCase:
tdSql.checkRows(11)
tdSql.query("show create table test.`%s1` ; " %self.tsdemo)
tdSql.checkData(0, 0, self.tsdemo+str(1))
tdSql.checkData(0, 1, "create table `%s1` (ts TIMESTAMP,c0 FLOAT,c1 INT,c2 INT,c3 INT,c4 INT,c5 INT,c6 INT,c7 INT,c8 INT,c9 INT)" %self.tsdemo)
tdSql.checkData(0, 1, "create table `%s1` (ts TIMESTAMP,c0 FLOAT,c1 INT,c2 FLOAT,c3 INT,c4 INT,c5 INT,c6 INT,c7 INT,c8 INT,c9 INT)" %self.tsdemo)
print("==============drop table\stable")
try:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册