提交 600e6afe 编写于 作者: A Alex Duan

merge master and fix conflicts

上级 0bef64a3
version: 1.0.{build}
image:
- Visual Studio 2015
- macos
environment:
matrix:
- ARCH: amd64
- ARCH: x86
matrix:
exclude:
- image: macos
ARCH: x86
for:
-
matrix:
only:
- image: Visual Studio 2015
clone_folder: c:\dev\TDengine
clone_depth: 1
init:
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %ARCH%
before_build:
- cd c:\dev\TDengine
- md build
build_script:
- cd build
- cmake -G "NMake Makefiles" .. -DBUILD_JDBC=false
- nmake install
-
matrix:
only:
- image: macos
clone_depth: 1
build_script:
- mkdir debug
- cd debug
- cmake .. > /dev/null
- make > /dev/null
notifications:
- provider: Email
to:
- sangshuduo@gmail.com
on_build_success: true
on_build_failure: true
on_build_status_changed: true
...@@ -81,6 +81,10 @@ tests/comparisonTest/opentsdb/opentsdbtest/.settings/ ...@@ -81,6 +81,10 @@ tests/comparisonTest/opentsdb/opentsdbtest/.settings/
tests/examples/JDBC/JDBCDemo/.classpath tests/examples/JDBC/JDBCDemo/.classpath
tests/examples/JDBC/JDBCDemo/.project tests/examples/JDBC/JDBCDemo/.project
tests/examples/JDBC/JDBCDemo/.settings/ tests/examples/JDBC/JDBCDemo/.settings/
tests/script/api/batchprepare
tests/script/api/stmt
tests/script/api/stmtBatchTest
tests/script/api/stmtTest
# Emacs # Emacs
# -*- mode: gitignore; -*- # -*- mode: gitignore; -*-
......
import hudson.model.Result import hudson.model.Result
import hudson.model.*; import hudson.model.*;
import jenkins.model.CauseOfInterruption import jenkins.model.CauseOfInterruption
properties([pipelineTriggers([githubPush()])])
node { node {
git url: 'https://github.com/taosdata/TDengine.git'
} }
def skipbuild=0 def skipbuild=0
...@@ -40,7 +38,8 @@ def pre_test(){ ...@@ -40,7 +38,8 @@ def pre_test(){
sudo rmtaos || echo "taosd has not installed" sudo rmtaos || echo "taosd has not installed"
''' '''
sh ''' sh '''
killall -9 taosd ||echo "no taosd running" kill -9 $(pidof taosd) ||echo "no taosd running"
kill -9 $(pidof taosadapter) ||echo "no taosadapter running"
killall -9 gdb || echo "no gdb running" killall -9 gdb || echo "no gdb running"
killall -9 python3.8 || echo "no python program running" killall -9 python3.8 || echo "no python program running"
cd ${WKC} cd ${WKC}
...@@ -72,6 +71,7 @@ def pre_test(){ ...@@ -72,6 +71,7 @@ def pre_test(){
git fetch origin +refs/pull/${CHANGE_ID}/merge git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD git checkout -qf FETCH_HEAD
git clean -dfx git clean -dfx
git submodule update --init --recursive
cd ${WK} cd ${WK}
git reset --hard HEAD~10 git reset --hard HEAD~10
''' '''
...@@ -112,8 +112,149 @@ def pre_test(){ ...@@ -112,8 +112,149 @@ def pre_test(){
''' '''
return 1 return 1
} }
def pre_test_noinstall(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. > /dev/null
make
'''
return 1
}
def pre_test_ningsi(){
sh'hostname'
sh'''
cd ${WKC}
git reset --hard HEAD~10 >/dev/null
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WKC}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WKC}
git checkout 2.0
'''
}
else{
sh '''
cd ${WKC}
git checkout develop
'''
}
}
sh'''
cd ${WKC}
git pull >/dev/null
git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD
git clean -dfx
git submodule update --init --recursive
cd ${WK}
git reset --hard HEAD~10
'''
script {
if (env.CHANGE_TARGET == 'master') {
sh '''
cd ${WK}
git checkout master
'''
}
else if(env.CHANGE_TARGET == '2.0'){
sh '''
cd ${WK}
git checkout 2.0
'''
}
else{
sh '''
cd ${WK}
git checkout develop
'''
}
}
sh '''
cd ${WK}
git pull >/dev/null
export TZ=Asia/Harbin
date
git clean -dfx
mkdir debug
cd debug
cmake .. -DOSTYPE=Ningsi60 > /dev/null
make
'''
return 1
}
def pre_test_win(){ def pre_test_win(){
bat ''' bat '''
taskkill /f /t /im python.exe
cd C:\\ cd C:\\
rd /s /Q C:\\TDengine rd /s /Q C:\\TDengine
cd C:\\workspace\\TDinternal cd C:\\workspace\\TDinternal
...@@ -147,6 +288,7 @@ def pre_test_win(){ ...@@ -147,6 +288,7 @@ def pre_test_win(){
git fetch origin +refs/pull/%CHANGE_ID%/merge git fetch origin +refs/pull/%CHANGE_ID%/merge
git checkout -qf FETCH_HEAD git checkout -qf FETCH_HEAD
git clean -dfx git clean -dfx
git submodule update --init --recursive
cd C:\\workspace\\TDinternal cd C:\\workspace\\TDinternal
git reset --hard HEAD~10 git reset --hard HEAD~10
''' '''
...@@ -171,7 +313,6 @@ def pre_test_win(){ ...@@ -171,7 +313,6 @@ def pre_test_win(){
} }
} }
bat ''' bat '''
taskkill /f /t /im python.exe
cd C:\\workspace\\TDinternal cd C:\\workspace\\TDinternal
git pull git pull
...@@ -179,9 +320,9 @@ def pre_test_win(){ ...@@ -179,9 +320,9 @@ def pre_test_win(){
git clean -dfx git clean -dfx
mkdir debug mkdir debug
cd debug cd debug
call "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\vcvarsall.bat" amd64 call "C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Auxiliary\\Build\\vcvarsall.bat" amd64
cmake ../ -G "NMake Makefiles" cmake ../ -G "NMake Makefiles"
nmake || exit 8 set CL=/MP nmake nmake || exit 8
nmake install || exit 8 nmake install || exit 8
xcopy /e/y/i/f C:\\workspace\\TDinternal\\debug\\build\\lib\\taos.dll C:\\Windows\\System32 || exit 8 xcopy /e/y/i/f C:\\workspace\\TDinternal\\debug\\build\\lib\\taos.dll C:\\Windows\\System32 || exit 8
cd C:\\workspace\\TDinternal\\community\\src\\connector\\python cd C:\\workspace\\TDinternal\\community\\src\\connector\\python
...@@ -192,6 +333,7 @@ def pre_test_win(){ ...@@ -192,6 +333,7 @@ def pre_test_win(){
} }
pipeline { pipeline {
agent none agent none
options { skipDefaultCheckout() }
environment{ environment{
WK = '/var/lib/jenkins/workspace/TDinternal' WK = '/var/lib/jenkins/workspace/TDinternal'
WKC= '/var/lib/jenkins/workspace/TDinternal/community' WKC= '/var/lib/jenkins/workspace/TDinternal/community'
...@@ -199,6 +341,7 @@ pipeline { ...@@ -199,6 +341,7 @@ pipeline {
stages { stages {
stage('pre_build'){ stage('pre_build'){
agent{label 'master'} agent{label 'master'}
options { skipDefaultCheckout() }
when { when {
changeRequest() changeRequest()
} }
...@@ -207,52 +350,52 @@ pipeline { ...@@ -207,52 +350,52 @@ pipeline {
abort_previous() abort_previous()
abortPreviousBuilds() abortPreviousBuilds()
} }
sh''' // sh'''
rm -rf ${WORKSPACE}.tes // rm -rf ${WORKSPACE}.tes
cp -r ${WORKSPACE} ${WORKSPACE}.tes // cp -r ${WORKSPACE} ${WORKSPACE}.tes
cd ${WORKSPACE}.tes // cd ${WORKSPACE}.tes
git fetch // git fetch
''' // '''
script { // script {
if (env.CHANGE_TARGET == 'master') { // if (env.CHANGE_TARGET == 'master') {
sh ''' // sh '''
git checkout master // git checkout master
''' // '''
} // }
else if(env.CHANGE_TARGET == '2.0'){ // else if(env.CHANGE_TARGET == '2.0'){
sh ''' // sh '''
git checkout 2.0 // git checkout 2.0
''' // '''
} // }
else{ // else{
sh ''' // sh '''
git checkout develop // git checkout develop
''' // '''
} // }
} // }
sh''' // sh'''
git fetch origin +refs/pull/${CHANGE_ID}/merge // git fetch origin +refs/pull/${CHANGE_ID}/merge
git checkout -qf FETCH_HEAD // git checkout -qf FETCH_HEAD
''' // '''
script{ // script{
skipbuild='2' // skipbuild='2'
skipbuild=sh(script: "git log -2 --pretty=%B | fgrep -ie '[skip ci]' -e '[ci skip]' && echo 1 || echo 2", returnStdout:true) // skipbuild=sh(script: "git log -2 --pretty=%B | fgrep -ie '[skip ci]' -e '[ci skip]' && echo 1 || echo 2", returnStdout:true)
println skipbuild // println skipbuild
} // }
sh''' // sh'''
rm -rf ${WORKSPACE}.tes // rm -rf ${WORKSPACE}.tes
''' // '''
// }
} }
} }
stage('Parallel test stage') { stage('Parallel test stage') {
//only build pr //only build pr
options { skipDefaultCheckout() }
when { when {
allOf{ allOf{
changeRequest() changeRequest()
expression{ not{ expression { env.CHANGE_BRANCH =~ /docs\// }}
return skipbuild.trim() == '2'
}
} }
} }
parallel { parallel {
...@@ -288,7 +431,7 @@ pipeline { ...@@ -288,7 +431,7 @@ pipeline {
stage('python_3_s6') { stage('python_3_s6') {
agent{label " slave6 || slave16 "} agent{label " slave6 || slave16 "}
steps { steps {
timeout(time: 55, unit: 'MINUTES'){ timeout(time: 65, unit: 'MINUTES'){
pre_test() pre_test()
sh ''' sh '''
date date
...@@ -314,11 +457,13 @@ pipeline { ...@@ -314,11 +457,13 @@ pipeline {
npm install td2.0-connector > /dev/null 2>&1 npm install td2.0-connector > /dev/null 2>&1
node nodejsChecker.js host=localhost node nodejsChecker.js host=localhost
''' '''
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh ''' sh '''
cd ${WKC}/tests/examples/C#/taosdemo cd ${WKC}/tests/examples/C#/taosdemo
mcs -out:taosdemo *.cs > /dev/null 2>&1 mcs -out:taosdemo *.cs > /dev/null 2>&1
./taosdemo -c /etc/taos -y echo '' |./taosdemo -c /etc/taos
''' '''
}
sh ''' sh '''
cd ${WKC}/tests/gotest cd ${WKC}/tests/gotest
bash batchtest.sh bash batchtest.sh
...@@ -394,7 +539,7 @@ pipeline { ...@@ -394,7 +539,7 @@ pipeline {
stage('test_b4_s7') { stage('test_b4_s7') {
agent{label " slave7 || slave17 "} agent{label " slave7 || slave17 "}
steps { steps {
timeout(time: 55, unit: 'MINUTES'){ timeout(time: 105, unit: 'MINUTES'){
pre_test() pre_test()
sh ''' sh '''
date date
...@@ -426,7 +571,7 @@ pipeline { ...@@ -426,7 +571,7 @@ pipeline {
stage('test_b6_s9') { stage('test_b6_s9') {
agent{label " slave9 || slave19 "} agent{label " slave9 || slave19 "}
steps { steps {
timeout(time: 55, unit: 'MINUTES'){ timeout(time: 105, unit: 'MINUTES'){
pre_test() pre_test()
sh ''' sh '''
date date
...@@ -449,36 +594,97 @@ pipeline { ...@@ -449,36 +594,97 @@ pipeline {
} }
} }
} }
stage('arm64centos7') {
agent{label " arm64centos7 "}
steps {
pre_test_noinstall()
}
}
stage('arm64centos8') {
agent{label " arm64centos8 "}
steps {
pre_test_noinstall()
}
}
stage('arm32bionic') {
agent{label " arm32bionic "}
steps {
pre_test_noinstall()
}
}
stage('arm64bionic') {
agent{label " arm64bionic "}
steps {
pre_test_noinstall()
}
}
stage('arm64focal') {
agent{label " arm64focal "}
steps {
pre_test_noinstall()
}
}
stage('centos7') {
agent{label " centos7 "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:trusty') {
agent{label " trusty "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:xenial') {
agent{label " xenial "}
steps {
pre_test_noinstall()
}
}
stage('ubuntu:bionic') {
agent{label " bionic "}
steps {
pre_test_noinstall()
}
}
// stage('build'){ stage('ningsi') {
// agent{label " wintest "} agent{label "ningsi"}
// steps { steps {
// pre_test() pre_test_ningsi()
// script{ }
// while(win_stop == 0){ }
// sleep(1)
// }
// }
// }
// }
// stage('test'){
// agent{label "win"}
// steps{
// catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { stage('build'){
// pre_test_win() agent{label " wintest "}
// timeout(time: 20, unit: 'MINUTES'){ steps {
// bat''' pre_test()
// cd C:\\workspace\\TDinternal\\community\\tests\\pytest script{
// .\\test-all.bat Wintest while(win_stop == 0){
// ''' sleep(1)
// } }
// } }
// script{ }
// win_stop=1 }
// } stage('test'){
// } agent{label "win"}
// } steps{
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
pre_test_win()
timeout(time: 20, unit: 'MINUTES'){
bat'''
cd C:\\workspace\\TDinternal\\community\\tests\\pytest
.\\test-all.bat wintest
'''
}
}
script{
win_stop=1
}
}
}
} }
......
...@@ -49,6 +49,14 @@ IF (TD_PRO) ...@@ -49,6 +49,14 @@ IF (TD_PRO)
ADD_DEFINITIONS(-D_TD_PRO_) ADD_DEFINITIONS(-D_TD_PRO_)
ENDIF () ENDIF ()
IF (TD_KH)
ADD_DEFINITIONS(-D_TD_KH_)
ENDIF ()
IF (TD_JH)
ADD_DEFINITIONS(-D_TD_JH_)
ENDIF ()
IF (TD_MEM_CHECK) IF (TD_MEM_CHECK)
ADD_DEFINITIONS(-DTAOS_MEM_CHECK) ADD_DEFINITIONS(-DTAOS_MEM_CHECK)
ENDIF () ENDIF ()
...@@ -117,7 +125,7 @@ IF (TD_MIPS_32) ...@@ -117,7 +125,7 @@ IF (TD_MIPS_32)
SET(COMMON_FLAGS "-Wall -Werror -fPIC -D_FILE_OFFSET_BITS=64 -D_LARGE_FILE") SET(COMMON_FLAGS "-Wall -Werror -fPIC -D_FILE_OFFSET_BITS=64 -D_LARGE_FILE")
ENDIF () ENDIF ()
IF (TD_APLHINE) IF (TD_ALPINE)
SET(COMMON_FLAGS "${COMMON_FLAGS} -largp") SET(COMMON_FLAGS "${COMMON_FLAGS} -largp")
link_libraries(/usr/lib/libargp.a) link_libraries(/usr/lib/libargp.a)
ADD_DEFINITIONS(-D_ALPINE) ADD_DEFINITIONS(-D_ALPINE)
...@@ -136,7 +144,12 @@ IF (TD_LINUX) ...@@ -136,7 +144,12 @@ IF (TD_LINUX)
ENDIF () ENDIF ()
IF (TD_MEMORY_SANITIZER) IF (TD_MEMORY_SANITIZER)
IF (TD_ARCHLINUX)
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -O0 -g3 -DDEBUG")
ELSE ()
SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -static-libasan -O0 -g3 -DDEBUG") SET(DEBUG_FLAGS "-fsanitize=address -fsanitize=undefined -fno-sanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=null -fno-sanitize=alignment -static-libasan -O0 -g3 -DDEBUG")
ENDIF ()
MESSAGE(STATUS "${BoldRed}Will compile with memory sanitizer! ${ColourReset}")
ELSE () ELSE ()
SET(DEBUG_FLAGS "-O0 -g3 -DDEBUG") SET(DEBUG_FLAGS "-O0 -g3 -DDEBUG")
ENDIF () ENDIF ()
......
...@@ -52,6 +52,12 @@ ELSEIF (${DBNAME} MATCHES "tq") ...@@ -52,6 +52,12 @@ ELSEIF (${DBNAME} MATCHES "tq")
ELSEIF (${DBNAME} MATCHES "pro") ELSEIF (${DBNAME} MATCHES "pro")
SET(TD_PRO TRUE) SET(TD_PRO TRUE)
MESSAGE(STATUS "pro is true") MESSAGE(STATUS "pro is true")
ELSEIF (${DBNAME} MATCHES "kh")
SET(TD_KH TRUE)
MESSAGE(STATUS "kh is true")
ELSEIF (${DBNAME} MATCHES "jh")
SET(TD_JH TRUE)
MESSAGE(STATUS "jh is true")
ENDIF () ENDIF ()
IF (${DLLTYPE} MATCHES "go") IF (${DLLTYPE} MATCHES "go")
......
IF (TD_LINUX) IF (TD_LINUX)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh") SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")
INSTALL(CODE "MESSAGE(\"make install script: ${TD_MAKE_INSTALL_SH}\")") INSTALL(CODE "MESSAGE(\"make install script: ${TD_MAKE_INSTALL_SH}\")")
INSTALL(CODE "execute_process(COMMAND chmod 777 ${TD_MAKE_INSTALL_SH})") INSTALL(CODE "execute_process(COMMAND bash ${TD_MAKE_INSTALL_SH} ${TD_COMMUNITY_DIR} ${PROJECT_BINARY_DIR} Linux ${TD_VER_NUMBER})")
INSTALL(CODE "execute_process(COMMAND ${TD_MAKE_INSTALL_SH} ${TD_COMMUNITY_DIR} ${PROJECT_BINARY_DIR} Linux ${TD_VER_NUMBER})")
ELSEIF (TD_WINDOWS) ELSEIF (TD_WINDOWS)
IF (TD_POWER) IF (TD_POWER)
SET(CMAKE_INSTALL_PREFIX C:/PowerDB) SET(CMAKE_INSTALL_PREFIX C:/PowerDB)
ELSEIF (TD_TQ)
SET(CMAKE_INSTALL_PREFIX C:/TQueue)
ELSEIF (TD_PRO) ELSEIF (TD_PRO)
SET(CMAKE_INSTALL_PREFIX C:/ProDB) SET(CMAKE_INSTALL_PREFIX C:/ProDB)
ELSEIF (TD_KH)
SET(CMAKE_INSTALL_PREFIX C:/KingHistorian)
ELSEIF (TD_JH)
SET(CMAKE_INSTALL_PREFIX C:/jh_iot)
ELSE () ELSE ()
SET(CMAKE_INSTALL_PREFIX C:/TDengine) SET(CMAKE_INSTALL_PREFIX C:/TDengine)
ENDIF () ENDIF ()
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/go DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/nodejs DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/python DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/C\# DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/tests/examples DESTINATION .)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/packaging/cfg DESTINATION .)
INSTALL(FILES ${TD_COMMUNITY_DIR}/src/inc/taos.h DESTINATION include) INSTALL(FILES ${TD_COMMUNITY_DIR}/src/inc/taos.h DESTINATION include)
INSTALL(FILES ${TD_COMMUNITY_DIR}/src/inc/taoserror.h DESTINATION include) INSTALL(FILES ${TD_COMMUNITY_DIR}/src/inc/taoserror.h DESTINATION include)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos.lib DESTINATION driver) INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos.lib DESTINATION driver)
...@@ -25,10 +24,28 @@ ELSEIF (TD_WINDOWS) ...@@ -25,10 +24,28 @@ ELSEIF (TD_WINDOWS)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos.dll DESTINATION driver) INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos.dll DESTINATION driver)
IF (TD_POWER) IF (TD_POWER)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/taos.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/power.exe DESTINATION .) INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/power.exe DESTINATION .)
ELSEIF (TD_TQ)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/taos.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/tq.exe DESTINATION .)
ELSEIF (TD_PRO) ELSEIF (TD_PRO)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/taos.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/prodbc.exe DESTINATION .) INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/prodbc.exe DESTINATION .)
ELSEIF (TD_KH)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/kinghistorian.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/khclient.exe DESTINATION .)
ELSEIF (TD_JH)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/taos.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/jh_taos.exe DESTINATION .)
ELSE () ELSE ()
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/go DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/nodejs DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/python DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/src/connector/C\# DESTINATION connector)
INSTALL(DIRECTORY ${TD_COMMUNITY_DIR}/tests/examples DESTINATION .)
INSTALL(FILES ${TD_COMMUNITY_DIR}/packaging/cfg/taos.cfg DESTINATION cfg)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/taos.exe DESTINATION .) INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/taos.exe DESTINATION .)
INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/taosdemo.exe DESTINATION .) INSTALL(FILES ${EXECUTABLE_OUTPUT_PATH}/taosdemo.exe DESTINATION .)
ENDIF () ENDIF ()
...@@ -41,6 +58,5 @@ ELSEIF (TD_WINDOWS) ...@@ -41,6 +58,5 @@ ELSEIF (TD_WINDOWS)
ELSEIF (TD_DARWIN) ELSEIF (TD_DARWIN)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh") SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")
INSTALL(CODE "MESSAGE(\"make install script: ${TD_MAKE_INSTALL_SH}\")") INSTALL(CODE "MESSAGE(\"make install script: ${TD_MAKE_INSTALL_SH}\")")
INSTALL(CODE "execute_process(COMMAND chmod 777 ${TD_MAKE_INSTALL_SH})") INSTALL(CODE "execute_process(COMMAND bash ${TD_MAKE_INSTALL_SH} ${TD_COMMUNITY_DIR} ${PROJECT_BINARY_DIR} Darwin ${TD_VER_NUMBER})")
INSTALL(CODE "execute_process(COMMAND ${TD_MAKE_INSTALL_SH} ${TD_COMMUNITY_DIR} ${PROJECT_BINARY_DIR} Darwin ${TD_VER_NUMBER})")
ENDIF () ENDIF ()
...@@ -21,7 +21,7 @@ SET(TD_LINUX FALSE) ...@@ -21,7 +21,7 @@ SET(TD_LINUX FALSE)
SET(TD_ARM_32 FALSE) SET(TD_ARM_32 FALSE)
SET(TD_MIPS_64 FALSE) SET(TD_MIPS_64 FALSE)
SET(TD_MIPS_32 FALSE) SET(TD_MIPS_32 FALSE)
SET(TD_APLHINE FALSE) SET(TD_ALPINE FALSE)
SET(TD_NINGSI FALSE) SET(TD_NINGSI FALSE)
SET(TD_NINGSI_60 FALSE) SET(TD_NINGSI_60 FALSE)
SET(TD_NINGSI_80 FALSE) SET(TD_NINGSI_80 FALSE)
...@@ -36,7 +36,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") ...@@ -36,7 +36,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# Get OS information and store in variable TD_OS_INFO. # Get OS information and store in variable TD_OS_INFO.
# #
execute_process(COMMAND chmod 777 ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh) execute_process(COMMAND chmod 777 ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh)
execute_process(COMMAND ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO) execute_process(COMMAND sh ${TD_COMMUNITY_DIR}/packaging/tools/get_os.sh "" OUTPUT_VARIABLE TD_OS_INFO)
MESSAGE(STATUS "The current os is " ${TD_OS_INFO}) MESSAGE(STATUS "The current os is " ${TD_OS_INFO})
SET(TD_LINUX TRUE) SET(TD_LINUX TRUE)
...@@ -52,8 +52,13 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux") ...@@ -52,8 +52,13 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
ENDIF () ENDIF ()
IF (${TD_OS_INFO} MATCHES "Alpine") IF (${TD_OS_INFO} MATCHES "Alpine")
SET(TD_APLHINE TRUE) SET(TD_ALPINE TRUE)
MESSAGE(STATUS "The current OS is Alpine, append extra flags") MESSAGE(STATUS "The current OS is Alpine Linux, append extra flags")
ELSEIF (${TD_OS_INFO} MATCHES "Arch")
SET(TD_ARCHLINUX TRUE)
MESSAGE(STATUS "The current OS is Arch Linux")
ELSE ()
MESSAGE(STATUS "Ths distro is " ${TD_OS_INFO})
ENDIF() ENDIF()
ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin") ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
SET(TD_DARWIN TRUE) SET(TD_DARWIN TRUE)
...@@ -155,7 +160,7 @@ ELSEIF (${OSTYPE} MATCHES "Linux") ...@@ -155,7 +160,7 @@ ELSEIF (${OSTYPE} MATCHES "Linux")
MESSAGE(STATUS "input osType: Linux") MESSAGE(STATUS "input osType: Linux")
ELSEIF (${OSTYPE} MATCHES "Alpine") ELSEIF (${OSTYPE} MATCHES "Alpine")
MESSAGE(STATUS "input osType: Alpine") MESSAGE(STATUS "input osType: Alpine")
SET(TD_APLHINE TRUE) SET(TD_ALPINE TRUE)
ELSE () ELSE ()
MESSAGE(STATUS "The user specified osType is unknown: " ${OSTYPE}) MESSAGE(STATUS "The user specified osType is unknown: " ${OSTYPE})
ENDIF () ENDIF ()
...@@ -4,7 +4,7 @@ PROJECT(TDengine) ...@@ -4,7 +4,7 @@ PROJECT(TDengine)
IF (DEFINED VERNUMBER) IF (DEFINED VERNUMBER)
SET(TD_VER_NUMBER ${VERNUMBER}) SET(TD_VER_NUMBER ${VERNUMBER})
ELSE () ELSE ()
SET(TD_VER_NUMBER "2.2.1.0") SET(TD_VER_NUMBER "2.2.2.4")
ENDIF () ENDIF ()
IF (DEFINED VERCOMPATIBLE) IF (DEFINED VERCOMPATIBLE)
......
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
A Unix time_t is the number of 1-second intervals since January 1, 1970. A Unix time_t is the number of 1-second intervals since January 1, 1970.
time_ts are expressed in the GMT time zone. DOS times in the current local time. time_ts are expressed in the GMT time zone. DOS times in the current local time.
*/ */
time_t Filetime2Timet(uint16_t date, uint16_t time) { time_t Filetime2Timet(uint16_t date, uint16_t deps_time) {
unsigned int year, month, day, hour, minute, second; unsigned int year, month, day, hour, minute, second;
struct tm stm; struct tm stm;
...@@ -37,9 +37,9 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) { ...@@ -37,9 +37,9 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) {
year = 1980 + ((date & 0xFE00) >> 9); year = 1980 + ((date & 0xFE00) >> 9);
month = (date & 0x1E0) >> 5; month = (date & 0x1E0) >> 5;
day = date & 0x1F; day = date & 0x1F;
hour = (time & 0xF800) >> 11; hour = (deps_time & 0xF800) >> 11;
minute = (time & 0x7E0) >> 5; minute = (deps_time & 0x7E0) >> 5;
second = 2 * (time & 0x1F); second = 2 * (deps_time & 0x1F);
stm.tm_year = (int)year - 1900; stm.tm_year = (int)year - 1900;
stm.tm_mon = (int)month - 1; stm.tm_mon = (int)month - 1;
...@@ -55,7 +55,7 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) { ...@@ -55,7 +55,7 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) {
#if 0 #if 0
/* Older version of the same, trying to generate the time_t manually. /* Older version of the same, trying to generate the time_t manually.
Did not handle DST well */ Did not handle DST well */
time_t Filetime2Timet(uint16_t date, uint16_t time) { time_t Filetime2Timet(uint16_t date, uint16_t deps_time) {
unsigned int year, month, day, hour, minute, second; unsigned int year, month, day, hour, minute, second;
unsigned int olympiads; /* 4-year periods */ unsigned int olympiads; /* 4-year periods */
unsigned long t = 0; unsigned long t = 0;
...@@ -64,9 +64,9 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) { ...@@ -64,9 +64,9 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) {
year = 1980 + ((date & 0xFE00) >> 9); year = 1980 + ((date & 0xFE00) >> 9);
month = (date & 0x1E0) >> 5; month = (date & 0x1E0) >> 5;
day = date & 0x1F; day = date & 0x1F;
hour = (time & 0xF800) >> 11; hour = (deps_time & 0xF800) >> 11;
minute = (time & 0x7E0) >> 5; minute = (deps_time & 0x7E0) >> 5;
second = 2 * (time & 0x1F); second = 2 * (deps_time & 0x1F);
/* Count days */ /* Count days */
year -= 1970; /* Start of Unix time_t epoch */ year -= 1970; /* Start of Unix time_t epoch */
...@@ -111,16 +111,16 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) { ...@@ -111,16 +111,16 @@ time_t Filetime2Timet(uint16_t date, uint16_t time) {
#endif #endif
/* Generate a string with the local file time, in the ISO 8601 date/time format */ /* Generate a string with the local file time, in the ISO 8601 date/time format */
char *Filetime2String(uint16_t date, uint16_t time, char *pBuf, size_t nBufSize) { char *Filetime2String(uint16_t date, uint16_t deps_time, char *pBuf, size_t nBufSize) {
unsigned int year, month, day, hour, minute, second; unsigned int year, month, day, hour, minute, second;
/* Decode fields */ /* Decode fields */
year = 1980 + ((date & 0xFE00) >> 9); year = 1980 + ((date & 0xFE00) >> 9);
month = (date & 0x1E0) >> 5; month = (date & 0x1E0) >> 5;
day = date & 0x1F; day = date & 0x1F;
hour = (time & 0xF800) >> 11; hour = (deps_time & 0xF800) >> 11;
minute = (time & 0x7E0) >> 5; minute = (deps_time & 0x7E0) >> 5;
second = 2 * (time & 0x1F); second = 2 * (deps_time & 0x1F);
if (nBufSize >= 20) { if (nBufSize >= 20) {
sprintf(pBuf, "%04d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second); sprintf(pBuf, "%04d-%02d-%02d %02d:%02d:%02d", year, month, day, hour, minute, second);
......
...@@ -49,8 +49,8 @@ ...@@ -49,8 +49,8 @@
typedef struct { typedef struct {
const unsigned char *json; const unsigned char *json;
size_t position; size_t position;
} error; } deps_error;
static error global_error = { NULL, 0 }; static deps_error global_error = { NULL, 0 };
CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void) CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void)
{ {
...@@ -210,8 +210,8 @@ typedef struct ...@@ -210,8 +210,8 @@ typedef struct
#define can_read(buffer, size) ((buffer != NULL) && (((buffer)->offset + size) <= (buffer)->length)) #define can_read(buffer, size) ((buffer != NULL) && (((buffer)->offset + size) <= (buffer)->length))
#define cannot_read(buffer, size) (!can_read(buffer, size)) #define cannot_read(buffer, size) (!can_read(buffer, size))
/* check if the buffer can be accessed at the given index (starting with 0) */ /* check if the buffer can be accessed at the given index (starting with 0) */
#define can_access_at_index(buffer, index) ((buffer != NULL) && (((buffer)->offset + index) < (buffer)->length)) #define can_access_at_index(buffer, deps_index) ((buffer != NULL) && (((buffer)->offset + deps_index) < (buffer)->length))
#define cannot_access_at_index(buffer, index) (!can_access_at_index(buffer, index)) #define cannot_access_at_index(buffer, deps_index) (!can_access_at_index(buffer, deps_index))
/* get a pointer to the buffer at the position */ /* get a pointer to the buffer at the position */
#define buffer_at_offset(buffer) ((buffer)->content + (buffer)->offset) #define buffer_at_offset(buffer) ((buffer)->content + (buffer)->offset)
...@@ -992,7 +992,7 @@ fail: ...@@ -992,7 +992,7 @@ fail:
if (value != NULL) if (value != NULL)
{ {
error local_error; deps_error local_error;
local_error.json = (const unsigned char*)value; local_error.json = (const unsigned char*)value;
local_error.position = 0; local_error.position = 0;
...@@ -1683,7 +1683,7 @@ CJSON_PUBLIC(int) cJSON_GetArraySize(const cJSON *array) ...@@ -1683,7 +1683,7 @@ CJSON_PUBLIC(int) cJSON_GetArraySize(const cJSON *array)
return (int)size; return (int)size;
} }
static cJSON* get_array_item(const cJSON *array, size_t index) static cJSON* get_array_item(const cJSON *array, size_t deps_index)
{ {
cJSON *current_child = NULL; cJSON *current_child = NULL;
...@@ -1693,23 +1693,23 @@ static cJSON* get_array_item(const cJSON *array, size_t index) ...@@ -1693,23 +1693,23 @@ static cJSON* get_array_item(const cJSON *array, size_t index)
} }
current_child = array->child; current_child = array->child;
while ((current_child != NULL) && (index > 0)) while ((current_child != NULL) && (deps_index > 0))
{ {
index--; deps_index--;
current_child = current_child->next; current_child = current_child->next;
} }
return current_child; return current_child;
} }
CJSON_PUBLIC(cJSON *) cJSON_GetArrayItem(const cJSON *array, int index) CJSON_PUBLIC(cJSON *) cJSON_GetArrayItem(const cJSON *array, int deps_index)
{ {
if (index < 0) if (deps_index < 0)
{ {
return NULL; return NULL;
} }
return get_array_item(array, (size_t)index); return get_array_item(array, (size_t)deps_index);
} }
static cJSON *get_object_item(const cJSON * const object, const char * const name, const cJSON_bool case_sensitive) static cJSON *get_object_item(const cJSON * const object, const char * const name, const cJSON_bool case_sensitive)
......
...@@ -30,7 +30,7 @@ int main (int argc, char *argv[]) ...@@ -30,7 +30,7 @@ int main (int argc, char *argv[])
{ {
unsigned int data[0x100000]; unsigned int data[0x100000];
int uni2index[0x110000]; int uni2index[0x110000];
int index; int deps_index;
if (argc != 1) if (argc != 1)
exit(1); exit(1);
...@@ -64,7 +64,7 @@ int main (int argc, char *argv[]) ...@@ -64,7 +64,7 @@ int main (int argc, char *argv[])
int j; int j;
for (j = 0; j < 0x110000; j++) for (j = 0; j < 0x110000; j++)
uni2index[j] = -1; uni2index[j] = -1;
index = 0; deps_index = 0;
for (;;) { for (;;) {
c = getc(stdin); c = getc(stdin);
if (c == EOF) if (c == EOF)
...@@ -86,8 +86,8 @@ int main (int argc, char *argv[]) ...@@ -86,8 +86,8 @@ int main (int argc, char *argv[])
if (c == '\t') if (c == '\t')
break; break;
if (uni2index[j] < 0) { if (uni2index[j] < 0) {
uni2index[j] = index; uni2index[j] = deps_index;
data[index++] = 0; data[deps_index++] = 0;
} }
if (c >= 0x80) { if (c >= 0x80) {
/* Finish reading an UTF-8 character. */ /* Finish reading an UTF-8 character. */
...@@ -104,17 +104,17 @@ int main (int argc, char *argv[]) ...@@ -104,17 +104,17 @@ int main (int argc, char *argv[])
} }
} }
} }
data[index++] = (unsigned int) c; data[deps_index++] = (unsigned int) c;
} }
if (uni2index[j] >= 0) if (uni2index[j] >= 0)
data[uni2index[j]] = index - uni2index[j] - 1; data[uni2index[j]] = deps_index - uni2index[j] - 1;
do { c = getc(stdin); } while (!(c == EOF || c == '\n')); do { c = getc(stdin); } while (!(c == EOF || c == '\n'));
} }
} }
printf("static const unsigned int translit_data[%d] = {",index); printf("static const unsigned int translit_data[%d] = {",deps_index);
{ {
int i; int i;
for (i = 0; i < index; i++) { for (i = 0; i < deps_index; i++) {
if (data[i] < 32) if (data[i] < 32)
printf("\n %3d,",data[i]); printf("\n %3d,",data[i]);
else if (data[i] == '\'') else if (data[i] == '\'')
......
...@@ -491,7 +491,7 @@ const char * iconv_canonicalize (const char * name) ...@@ -491,7 +491,7 @@ const char * iconv_canonicalize (const char * name)
char* bp; char* bp;
const struct alias * ap; const struct alias * ap;
unsigned int count; unsigned int count;
unsigned int index; unsigned int deps_index;
const char* pool; const char* pool;
/* Before calling aliases_lookup, convert the input string to upper case, /* Before calling aliases_lookup, convert the input string to upper case,
...@@ -555,23 +555,23 @@ const char * iconv_canonicalize (const char * name) ...@@ -555,23 +555,23 @@ const char * iconv_canonicalize (const char * name)
This is also the case on native Woe32 systems. */ This is also the case on native Woe32 systems. */
#if __STDC_ISO_10646__ || ((defined _WIN32 || defined __WIN32__) && !defined __CYGWIN__) #if __STDC_ISO_10646__ || ((defined _WIN32 || defined __WIN32__) && !defined __CYGWIN__)
if (sizeof(wchar_t) == 4) { if (sizeof(wchar_t) == 4) {
index = ei_ucs4internal; deps_index = ei_ucs4internal;
break; break;
} }
if (sizeof(wchar_t) == 2) { if (sizeof(wchar_t) == 2) {
index = ei_ucs2internal; deps_index = ei_ucs2internal;
break; break;
} }
if (sizeof(wchar_t) == 1) { if (sizeof(wchar_t) == 1) {
index = ei_iso8859_1; deps_index = ei_iso8859_1;
break; break;
} }
#endif #endif
} }
index = ap->encoding_index; deps_index = ap->encoding_index;
break; break;
} }
return all_canonical[index] + pool; return all_canonical[deps_index] + pool;
invalid: invalid:
return name; return name;
} }
......
...@@ -569,9 +569,9 @@ extern "C" { ...@@ -569,9 +569,9 @@ extern "C" {
return FALSE; return FALSE;
} }
AccessibleContext GetAccessibleChildFromContext(long vmID, AccessibleContext ac, jint index) { AccessibleContext GetAccessibleChildFromContext(long vmID, AccessibleContext ac, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.GetAccessibleChildFromContext(vmID, ac, index); return theAccessBridge.GetAccessibleChildFromContext(vmID, ac, deps_index);
} }
return (AccessibleContext) 0; return (AccessibleContext) 0;
} }
...@@ -711,9 +711,9 @@ extern "C" { ...@@ -711,9 +711,9 @@ extern "C" {
* return the row number for a cell at a given index * return the row number for a cell at a given index
*/ */
jint jint
getAccessibleTableRow(long vmID, AccessibleTable table, jint index) { getAccessibleTableRow(long vmID, AccessibleTable table, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.getAccessibleTableRow(vmID, table, index); return theAccessBridge.getAccessibleTableRow(vmID, table, deps_index);
} }
return -1; return -1;
} }
...@@ -722,9 +722,9 @@ extern "C" { ...@@ -722,9 +722,9 @@ extern "C" {
* return the column number for a cell at a given index * return the column number for a cell at a given index
*/ */
jint jint
getAccessibleTableColumn(long vmID, AccessibleTable table, jint index) { getAccessibleTableColumn(long vmID, AccessibleTable table, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.getAccessibleTableColumn(vmID, table, index); return theAccessBridge.getAccessibleTableColumn(vmID, table, deps_index);
} }
return -1; return -1;
} }
...@@ -753,9 +753,9 @@ extern "C" { ...@@ -753,9 +753,9 @@ extern "C" {
return FALSE; return FALSE;
} }
BOOL GetAccessibleTextItems(long vmID, AccessibleText at, AccessibleTextItemsInfo *textItems, jint index) { BOOL GetAccessibleTextItems(long vmID, AccessibleText at, AccessibleTextItemsInfo *textItems, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.GetAccessibleTextItems(vmID, at, textItems, index); return theAccessBridge.GetAccessibleTextItems(vmID, at, textItems, deps_index);
} }
return FALSE; return FALSE;
} }
...@@ -767,23 +767,23 @@ extern "C" { ...@@ -767,23 +767,23 @@ extern "C" {
return FALSE; return FALSE;
} }
BOOL GetAccessibleTextAttributes(long vmID, AccessibleText at, jint index, AccessibleTextAttributesInfo *attributes) { BOOL GetAccessibleTextAttributes(long vmID, AccessibleText at, jint deps_index, AccessibleTextAttributesInfo *attributes) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.GetAccessibleTextAttributes(vmID, at, index, attributes); return theAccessBridge.GetAccessibleTextAttributes(vmID, at, deps_index, attributes);
} }
return FALSE; return FALSE;
} }
BOOL GetAccessibleTextRect(long vmID, AccessibleText at, AccessibleTextRectInfo *rectInfo, jint index) { BOOL GetAccessibleTextRect(long vmID, AccessibleText at, AccessibleTextRectInfo *rectInfo, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.GetAccessibleTextRect(vmID, at, rectInfo, index); return theAccessBridge.GetAccessibleTextRect(vmID, at, rectInfo, deps_index);
} }
return FALSE; return FALSE;
} }
BOOL GetAccessibleTextLineBounds(long vmID, AccessibleText at, jint index, jint *startIndex, jint *endIndex) { BOOL GetAccessibleTextLineBounds(long vmID, AccessibleText at, jint deps_index, jint *startIndex, jint *endIndex) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.GetAccessibleTextLineBounds(vmID, at, index, startIndex, endIndex); return theAccessBridge.GetAccessibleTextLineBounds(vmID, at, deps_index, startIndex, endIndex);
} }
return FALSE; return FALSE;
} }
...@@ -844,7 +844,7 @@ extern "C" { ...@@ -844,7 +844,7 @@ extern "C" {
/* /*
* This method is used to iterate through the hyperlinks in a component. It * This method is used to iterate through the hyperlinks in a component. It
* returns hypertext information for a component starting at hyperlink index * returns hypertext information for a component starting at hyperlink deps_index
* nStartIndex. No more than MAX_HYPERLINKS AccessibleHypertextInfo objects will * nStartIndex. No more than MAX_HYPERLINKS AccessibleHypertextInfo objects will
* be returned for each call to this method. * be returned for each call to this method.
* returns FALSE on error. * returns FALSE on error.
...@@ -1109,9 +1109,9 @@ extern "C" { ...@@ -1109,9 +1109,9 @@ extern "C" {
/** /**
* Gets the text caret location * Gets the text caret location
*/ */
BOOL getCaretLocation(long vmID, AccessibleContext ac, AccessibleTextRectInfo *rectInfo, jint index) { BOOL getCaretLocation(long vmID, AccessibleContext ac, AccessibleTextRectInfo *rectInfo, jint deps_index) {
if (theAccessBridgeInitializedFlag == TRUE) { if (theAccessBridgeInitializedFlag == TRUE) {
return theAccessBridge.getCaretLocation(vmID, ac, rectInfo, index); return theAccessBridge.getCaretLocation(vmID, ac, rectInfo, deps_index);
} }
return FALSE; return FALSE;
} }
......
...@@ -114,13 +114,13 @@ static void setprogdir (lua_State *L) { ...@@ -114,13 +114,13 @@ static void setprogdir (lua_State *L) {
static void pusherror (lua_State *L) { static void pusherror (lua_State *L) {
int error = GetLastError(); int lua_error = GetLastError();
char buffer[128]; char buffer[128];
if (FormatMessage(FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_FROM_SYSTEM, if (FormatMessage(FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_FROM_SYSTEM,
NULL, error, 0, buffer, sizeof(buffer), NULL)) NULL, lua_error, 0, buffer, sizeof(buffer), NULL))
lua_pushstring(L, buffer); lua_pushstring(L, buffer);
else else
lua_pushfstring(L, "system error %d\n", error); lua_pushfstring(L, "system error %d\n", lua_error);
} }
static void ll_unloadlib (void *lib) { static void ll_unloadlib (void *lib) {
......
此差异已折叠。
此差异已折叠。
...@@ -120,7 +120,7 @@ static reg_errcode_t build_charclass (re_bitset_ptr_t sbcset, ...@@ -120,7 +120,7 @@ static reg_errcode_t build_charclass (re_bitset_ptr_t sbcset,
static bin_tree_t *build_word_op (re_dfa_t *dfa, int not, reg_errcode_t *err); static bin_tree_t *build_word_op (re_dfa_t *dfa, int not, reg_errcode_t *err);
static void free_bin_tree (bin_tree_t *tree); static void free_bin_tree (bin_tree_t *tree);
static bin_tree_t *create_tree (bin_tree_t *left, bin_tree_t *right, static bin_tree_t *create_tree (bin_tree_t *left, bin_tree_t *right,
re_token_type_t type, int index); re_token_type_t type, int deps_index);
static bin_tree_t *duplicate_tree (const bin_tree_t *src, re_dfa_t *dfa); static bin_tree_t *duplicate_tree (const bin_tree_t *src, re_dfa_t *dfa);
/* This table gives an error message for each of the error codes listed /* This table gives an error message for each of the error codes listed
...@@ -3447,11 +3447,11 @@ free_charset (re_charset_t *cset) ...@@ -3447,11 +3447,11 @@ free_charset (re_charset_t *cset)
Note: This function automatically free left and right if malloc fails. */ Note: This function automatically free left and right if malloc fails. */
static bin_tree_t * static bin_tree_t *
create_tree (left, right, type, index) create_tree (left, right, type, deps_index)
bin_tree_t *left; bin_tree_t *left;
bin_tree_t *right; bin_tree_t *right;
re_token_type_t type; re_token_type_t type;
int index; int deps_index;
{ {
bin_tree_t *tree; bin_tree_t *tree;
tree = re_malloc (bin_tree_t, 1); tree = re_malloc (bin_tree_t, 1);
...@@ -3465,7 +3465,7 @@ create_tree (left, right, type, index) ...@@ -3465,7 +3465,7 @@ create_tree (left, right, type, index)
tree->left = left; tree->left = left;
tree->right = right; tree->right = right;
tree->type = type; tree->type = type;
tree->node_idx = index; tree->node_idx = deps_index;
tree->first = -1; tree->first = -1;
tree->next = -1; tree->next = -1;
re_node_set_init_empty (&tree->eclosure); re_node_set_init_empty (&tree->eclosure);
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
## <a class="anchor" id="grafana"></a>Grafana ## <a class="anchor" id="grafana"></a>Grafana
TDengine能够与开源数据可视化系统[Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现 TDengine 能够与开源数据可视化系统 [Grafana](https://www.grafana.com/)快速集成搭建数据监测报警系统,整个过程无需任何代码开发,TDengine 中数据表中内容可以在仪表盘(DashBoard)上进行可视化展现。关于TDengine插件的使用您可以在[GitHub](https://github.com/taosdata/grafanaplugin/blob/master/README.md)中了解更多
### 安装Grafana ### 安装Grafana
...@@ -11,12 +11,24 @@ TDengine能够与开源数据可视化系统[Grafana](https://www.grafana.com/) ...@@ -11,12 +11,24 @@ TDengine能够与开源数据可视化系统[Grafana](https://www.grafana.com/)
### 配置Grafana ### 配置Grafana
TDengine的Grafana插件在安装包的/usr/local/taos/connector/grafanaplugin目录下 TDengine 的 Grafana 插件请从 <https://github.com/taosdata/grafanaplugin/releases/latest> 下载
以CentOS 7.2操作系统为例,将grafanaplugin目录拷贝到/var/lib/grafana/plugins目录下,重新启动grafana即可。 ```bash
GF_VERSION=3.1.1
wget https://github.com/taosdata/grafanaplugin/releases/download/v$GF_VERSION/tdengine-datasource-$GF_VERSION.zip
```
以 CentOS 7.2 操作系统为例,将插件包解压到 /var/lib/grafana/plugins 目录下,重新启动 grafana 即可。
```bash ```bash
sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tdengine sudo unzip tdengine-datasource-$GF_VERSION.zip -d /var/lib/grafana/plugins/
```
Grafana 7.3+ / 8.x 版本会对插件进行签名检查,因此还需要在 grafana.ini 文件中增加如下行,才能正确使用插件:
```ini
[plugins]
allow_loading_unsigned_plugins = tdengine-datasource
``` ```
### 使用 Grafana ### 使用 Grafana
...@@ -55,7 +67,6 @@ sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tde ...@@ -55,7 +67,6 @@ sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tde
* ALIAS BY:可设置当前查询别名。 * ALIAS BY:可设置当前查询别名。
* GENERATE SQL: 点击该按钮会自动替换相应变量,并生成最终执行的语句。 * GENERATE SQL: 点击该按钮会自动替换相应变量,并生成最终执行的语句。
按照默认提示查询当前 TDengine 部署所在服务器指定间隔系统内存平均使用量如下: 按照默认提示查询当前 TDengine 部署所在服务器指定间隔系统内存平均使用量如下:
![img](page://images/connections/create_dashboard2.jpg) ![img](page://images/connections/create_dashboard2.jpg)
...@@ -64,16 +75,15 @@ sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tde ...@@ -64,16 +75,15 @@ sudo cp -rf /usr/local/taos/connector/grafanaplugin /var/lib/grafana/plugins/tde
#### 导入 Dashboard #### 导入 Dashboard
在 Grafana 插件目录 /usr/local/taos/connector/grafana/tdengine/dashboard/ 下提供了一个 `tdengine-grafana.json` 可导入的 dashboard 我们提供一个 TDengine Dashboard 可以作为 TDengine 集群的监控可视化工具使用,见 [Grafana Dashboard 15146](https://grafana.com/grafana/dashboards/15146)
点击左侧 `Import` 按钮,并上传 `tdengine-grafana.json` 文件 点击左侧 `Import` 按钮,选择 **Grafana.com Dashboard**,j将id `15146` 填入并加载
![img](page://images/connections/import_dashboard1.jpg) ![img](page://images/connections/import_dashboard1.jpg)
导入完成之后可看到如下效果: 导入完成之后可看到如下效果:
![img](page://images/connections/import_dashboard2.jpg) ![img](../images/connections/dashboard-15146.png)
## <a class="anchor" id="matlab"></a>MATLAB ## <a class="anchor" id="matlab"></a>MATLAB
......
# 使用 TDengine + Telegraf + Grafana 快速搭建 IT 运维展示系统
## 背景介绍
TDengine是涛思数据专为物联网、车联网、工业互联网、IT运维等设计和优化的大数据平台。自从 2019年 7 月开源以来,凭借创新的数据建模设计、快捷的安装方式、易用的编程接口和强大的数据写入查询性能博得了大量时序数据开发者的青睐。
IT 运维监测数据通常都是对时间特性比较敏感的数据,例如:
- 系统资源指标:CPU、内存、IO、带宽等。
- 软件系统指标:存活状态、连接数目、请求数目、超时数目、错误数目、响应时间、服务类型及其他与业务有关的指标。
当前主流的 IT 运维系统通常包含一个数据采集模块,一个数据存储模块,和一个可视化显示模块。Telegraf 和 Grafana 分别是当前最流行的数据采集模块和可视化显示模块之一。而数据存储模块可供选择的软件比较多,其中 OpenTSDB 或 InfluxDB 比较流行。而 TDengine 作为新兴的时序大数据平台,具备极强的高性能、高可靠、易管理、易维护的优势。
本文介绍不需要写一行代码,通过简单修改几行配置文件,就可以快速搭建一个基于 TDengine + Telegraf + Grafana 的 IT 运维系统。架构如下图:
![IT-DevOps-Solutions-Telegraf.png](../../images/IT-DevOps-Solutions-Telegraf.png)
## 安装步骤
### 安装 Telegraf,Grafana 和 TDengine
安装 Telegraf、Grafana 和 TDengine 请参考相关官方文档。
### Telegraf
请参考[官方文档](https://portal.influxdata.com/downloads/)
### Grafana
请参考[官方文档](https://grafana.com/grafana/download)
### 安装 TDengine
从涛思数据官网[下载](http://taosdata.com/cn/all-downloads/)页面下载最新 TDengine-server 2.3.0.0 或以上版本安装。
## 数据链路设置
### 下载 TDengine 插件到 grafana 插件目录
```bash
1. wget -c https://github.com/taosdata/grafanaplugin/releases/download/v3.1.1/tdengine-datasource-3.1.1.zip
2. sudo unzip tdengine-datasource-3.1.1.zip -d /var/lib/grafana/plugins/
3. sudo chown grafana:grafana -R /var/lib/grafana/plugins/tdengine
4. echo -e "[plugins]\nallow_loading_unsigned_plugins = tdengine-datasource\n" | sudo tee -a /etc/grafana/grafana.ini
5. sudo systemctl restart grafana-server.service
```
### 修改 /etc/telegraf/telegraf.conf
配置方法,在 /etc/telegraf/telegraf.conf 增加如下文字,其中 database name 请填写希望在 TDengine 保存 Telegraf 数据的数据库名,TDengine server/cluster host、username和 password 填写 TDengine 实际值:
```
[[outputs.http]]
url = "http://<TDengine server/cluster host>:6041/influxdb/v1/write?db=<database name>"
method = "POST"
timeout = "5s"
username = "<TDengine's username>"
password = "<TDengine's password>"
data_format = "influx"
influx_max_line_bytes = 250
```
然后重启 telegraf:
```
sudo systemctl start telegraf
```
### 导入 Dashboard
使用 Web 浏览器访问 IP:3000 登录 Grafana 界面,系统初始用户名密码为 admin/admin。
点击左侧齿轮图标并选择 Plugins,应该可以找到 TDengine data source 插件图标。
点击左侧加号图标并选择 Import,从 https://github.com/taosdata/grafanaplugin/blob/master/examples/telegraf/grafana/dashboards/telegraf-dashboard-v0.1.0.json 下载 dashboard JSON 文件后导入。之后可以看到如下界面的仪表盘:
![IT-DevOps-Solutions-telegraf-dashboard.png](../../images/IT-DevOps-Solutions-telegraf-dashboard.png)
## 总结
以上演示如何快速搭建一个完整的 IT 运维展示系统。得力于 TDengine 2.3.0.0 版本中新增的 schemaless 协议解析功能,以及强大的生态软件适配能力,用户可以短短数分钟就可以搭建一个高效易用的 IT 运维系统。TDengine 强大的数据写入查询性能和其他丰富功能请参考官方文档和产品落地案例。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册