未验证 提交 e23a96a0 编写于 作者: S Shengliang Guan 提交者: GitHub

Merge pull request #3207 from taosdata/develop

Merge from develop into master
[submodule "src/connector/go"] [submodule "src/connector/go"]
path = src/connector/go path = src/connector/go
url = https://github.com/taosdata/driver-go url = https://github.com/taosdata/driver-go
[submodule "src/connector/grafanaplugin"]
path = src/connector/grafanaplugin
url = https://github.com/taosdata/grafanaplugin
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
[![Build status](https://ci.appveyor.com/api/projects/status/kf3pwh2or5afsgl9/branch/master?svg=true)](https://ci.appveyor.com/project/sangshuduo/tdengine-2n8ge/branch/master) [![Build status](https://ci.appveyor.com/api/projects/status/kf3pwh2or5afsgl9/branch/master?svg=true)](https://ci.appveyor.com/project/sangshuduo/tdengine-2n8ge/branch/master)
[![Coverage Status](https://coveralls.io/repos/github/taosdata/TDengine/badge.svg?branch=develop)](https://coveralls.io/github/taosdata/TDengine?branch=develop) [![Coverage Status](https://coveralls.io/repos/github/taosdata/TDengine/badge.svg?branch=develop)](https://coveralls.io/github/taosdata/TDengine?branch=develop)
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/4201/badge)](https://bestpractices.coreinfrastructure.org/projects/4201) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/4201/badge)](https://bestpractices.coreinfrastructure.org/projects/4201)
![Docker Pulls](https://img.shields.io/docker/pulls/tdengine/tdengine)
[![TDengine](TDenginelogo.png)](https://www.taosdata.com) [![TDengine](TDenginelogo.png)](https://www.taosdata.com)
......
...@@ -115,7 +115,7 @@ IF (TD_WINDOWS) ...@@ -115,7 +115,7 @@ IF (TD_WINDOWS)
ADD_DEFINITIONS(-D_MBCS -D_CRT_SECURE_NO_DEPRECATE -D_CRT_NONSTDC_NO_DEPRECATE) ADD_DEFINITIONS(-D_MBCS -D_CRT_SECURE_NO_DEPRECATE -D_CRT_NONSTDC_NO_DEPRECATE)
SET(CMAKE_GENERATOR "NMake Makefiles" CACHE INTERNAL "" FORCE) SET(CMAKE_GENERATOR "NMake Makefiles" CACHE INTERNAL "" FORCE)
IF (NOT TD_GODLL) IF (NOT TD_GODLL)
SET(COMMON_FLAGS "/nologo /WX /Oi /Oy- /Gm- /EHsc /MT /GS /Gy /fp:precise /Zc:wchar_t /Zc:forScope /Gd /errorReport:prompt /analyze-") SET(COMMON_FLAGS "/nologo /WX /wd4018 /wd2220 /Oi /Oy- /Gm- /EHsc /MT /GS /Gy /fp:precise /Zc:wchar_t /Zc:forScope /Gd /errorReport:prompt /analyze-")
SET(DEBUG_FLAGS "/Zi /W3 /GL") SET(DEBUG_FLAGS "/Zi /W3 /GL")
SET(RELEASE_FLAGS "/W0 /GL") SET(RELEASE_FLAGS "/W0 /GL")
ENDIF () ENDIF ()
......
...@@ -59,7 +59,7 @@ ...@@ -59,7 +59,7 @@
/* Generate the OS-and-debug-mode-specific library name */ /* Generate the OS-and-debug-mode-specific library name */
#define _MSVCLIBX_LIB "MsvcLibX" _MSVCLIBX_LIB_OS_SUFFIX _MSVCLIBX_LIB_DBG_SUFFIX ".lib" #define _MSVCLIBX_LIB "MsvcLibX" _MSVCLIBX_LIB_OS_SUFFIX _MSVCLIBX_LIB_DBG_SUFFIX ".lib"
//#pragma message("Adding pragma comment(lib, \"" _MSVCLIBX_LIB "\")") //#pragma message("Adding pragma comment(lib, \"" _MSVCLIBX_LIB "\")")
#pragma comment(lib, _MSVCLIBX_LIB) //#pragma comment(lib, _MSVCLIBX_LIB)
/* Library-specific routine used internally by many standard routines */ /* Library-specific routine used internally by many standard routines */
#if defined(_WIN32) #if defined(_WIN32)
......
...@@ -15,6 +15,9 @@ TDengine的模块之一是时序数据库。但除此之外,为减少研发的 ...@@ -15,6 +15,9 @@ TDengine的模块之一是时序数据库。但除此之外,为减少研发的
采用TDengine,可将典型的物联网、车联网、工业互联网大数据平台的总拥有成本大幅降低。但需要指出的是,因充分利用了物联网时序数据的特点,它无法用来处理网络爬虫、微博、微信、电商、ERP、CRM等通用型数据。 采用TDengine,可将典型的物联网、车联网、工业互联网大数据平台的总拥有成本大幅降低。但需要指出的是,因充分利用了物联网时序数据的特点,它无法用来处理网络爬虫、微博、微信、电商、ERP、CRM等通用型数据。
<center> <img src="../assets/EcoSystem.png"> </center>
<center>图 1. TDengine技术生态图</center>
## TDengine 总体适用场景 ## TDengine 总体适用场景
作为一个IOT大数据平台,TDengine的典型适用场景是在IOT范畴,而且用户有一定的数据量。本文后续的介绍主要针对这个范畴里面的系统。范畴之外的系统,比如CRM,ERP等,不在本文讨论范围内。 作为一个IOT大数据平台,TDengine的典型适用场景是在IOT范畴,而且用户有一定的数据量。本文后续的介绍主要针对这个范畴里面的系统。范畴之外的系统,比如CRM,ERP等,不在本文讨论范围内。
......
...@@ -9,9 +9,9 @@ TDengine采用关系型数据模型,需要建库、建表。因此对于一个 ...@@ -9,9 +9,9 @@ TDengine采用关系型数据模型,需要建库、建表。因此对于一个
不同类型的数据采集点往往具有不同的数据特征,包括数据采集频率的高低,数据保留时间的长短,副本的数目,数据块的大小等等。为让各种场景下TDengine都能最大效率的工作,TDengine建议将不同数据特征的表创建在不同的库里,因为每个库可以配置不同的存储策略。创建一个库时,除SQL标准的选项外,应用还可以指定保留时长、副本数、内存块个数、时间精度、文件块里最大最小记录条数、是否压缩、一个数据文件覆盖的天数等多种参数。比如: 不同类型的数据采集点往往具有不同的数据特征,包括数据采集频率的高低,数据保留时间的长短,副本的数目,数据块的大小等等。为让各种场景下TDengine都能最大效率的工作,TDengine建议将不同数据特征的表创建在不同的库里,因为每个库可以配置不同的存储策略。创建一个库时,除SQL标准的选项外,应用还可以指定保留时长、副本数、内存块个数、时间精度、文件块里最大最小记录条数、是否压缩、一个数据文件覆盖的天数等多种参数。比如:
```cmd ```cmd
CREATE DATABASE power KEEP 365 DAYS 10 REPLICA 3 BLOCKS 4; CREATE DATABASE power KEEP 365 DAYS 10 BLOCKS 4;
``` ```
上述语句将创建一个名为power的库,这个库的数据将保留365天(超过365天将被自动删除),每10天一个数据文件,副本数为3, 内存块数为4。详细的语法及参数请见<a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL</a> 上述语句将创建一个名为power的库,这个库的数据将保留365天(超过365天将被自动删除),每10天一个数据文件,内存块数为4。详细的语法及参数请见<a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL</a>
创建库之后,需要使用SQL命令USE将当前库切换过来,例如: 创建库之后,需要使用SQL命令USE将当前库切换过来,例如:
...@@ -27,13 +27,15 @@ USE power; ...@@ -27,13 +27,15 @@ USE power;
- 处于两个不同库的表是不能进行JOIN操作的。 - 处于两个不同库的表是不能进行JOIN操作的。
## 创建超级表 ## 创建超级表
一个物联网系统,往往存在多种类型的设备,比如对于电网,存在智能电表、变压器、母线、开关等等。为便于多表之间的聚合,使用TDengine, 需要对每个类型的设备创建一超级表。以表一中的智能电表为例,可以使用如下的SQL命令创建超级表: 一个物联网系统,往往存在多种类型的设备,比如对于电网,存在智能电表、变压器、母线、开关等等。为便于多表之间的聚合,使用TDengine, 需要对每个类型的数据采集点创建一超级表。以表一中的智能电表为例,可以使用如下的SQL命令创建超级表:
```cmd ```cmd
CREATE TABLE meters (ts timestamp, current float, voltage int, phase float) TAGS (location binary(64), groupdId int); CREATE TABLE meters (ts timestamp, current float, voltage int, phase float) TAGS (location binary(64), groupdId int);
``` ```
与创建普通表一样,创建表时,需要提供表名(示例中为meters),表结构Schema,即数据列的定义,为采集的物理量(示例中为ts, current, voltage, phase),数据类型可以为整型、浮点型、字符串等。除此之外,还需要提供标签的schema (示例中为location, groupId),标签的数据类型可以为整型、浮点型、字符串等。采集点的静态属性往往可以作为标签,比如采集点的地理位置、设备型号、设备组ID、管理员ID等等。标签的schema可以事后增加、删除、修改。具体定义以及细节请见 <a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL </a>一节。 与创建普通表一样,创建表时,需要提供表名(示例中为meters),表结构Schema,即数据列的定义。第一列必须为时间戳(示例中为ts),其他列为采集的物理量(示例中为current, voltage, phase),数据类型可以为整型、浮点型、字符串等。除此之外,还需要提供标签的schema (示例中为location, groupId),标签的数据类型可以为整型、浮点型、字符串等。采集点的静态属性往往可以作为标签,比如采集点的地理位置、设备型号、设备组ID、管理员ID等等。标签的schema可以事后增加、删除、修改。具体定义以及细节请见 <a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL </a>一节。
每一种类型的数据采集点需要建立一个超级表,因此一个物联网系统,往往会有多个超级表。一个系统可以有多个DB,一个DB里可以有一到多个超级表。 每一种类型的数据采集点需要建立一个超级表,因此一个物联网系统,往往会有多个超级表。对于电网,我们就需要对智能电表、变压器、母线、开关等都建立一个超级表。在物联网中,一个设备就可能有多个数据采集点(比如一台风力发电的风机,有的采集点采集电流、电压等电参数,有的采集点采集温度、湿度、风向等环境参数),这个时候,对这一类型的设备,需要建立多张超级表。一张超级表里包含的采集物理量必须是同时采集的(时间戳是一致的)。
一张超级表最多容许1024列,如果一个采集点采集的物理量个数超过1024,需要建多张超级表来处理。一个系统可以有多个DB,一个DB里可以有一到多个超级表。
## 创建表 ## 创建表
TDengine对每个数据采集点需要独立建表。与标准的关系型数据一样,一张表有表名,Schema,但除此之外,还可以带有一到多个标签。创建时,需要使用超级表做模板,同时指定标签的具体值。以表一中的智能电表为例,可以使用如下的SQL命令建表: TDengine对每个数据采集点需要独立建表。与标准的关系型数据一样,一张表有表名,Schema,但除此之外,还可以带有一到多个标签。创建时,需要使用超级表做模板,同时指定标签的具体值。以表一中的智能电表为例,可以使用如下的SQL命令建表:
...@@ -51,5 +53,7 @@ INSERT INTO d1001 USING METERS TAGS ("Beijng.Chaoyang", 2) VALUES (now, 10.2, 21 ...@@ -51,5 +53,7 @@ INSERT INTO d1001 USING METERS TAGS ("Beijng.Chaoyang", 2) VALUES (now, 10.2, 21
``` ```
上述SQL语句将记录(now, 10.2, 219, 0.32) 插入进表d1001。如果表d1001还未创建,则使用超级表meters做模板自动创建,同时打上标签值“Beijing.Chaoyang", 2。 上述SQL语句将记录(now, 10.2, 219, 0.32) 插入进表d1001。如果表d1001还未创建,则使用超级表meters做模板自动创建,同时打上标签值“Beijing.Chaoyang", 2。
**多列模型**:TDengine支持多列模型,只要这些物理量是同时采集的,这些量就可以作为不同列放在同一张表里。有的数据采集点有多组采集量,每一组的数据采集时间是不一样的,这时需要对同一个采集点建多张表。但还有一种极限的设计,单列模型,无论是否同时采集,每个采集的物理量单独建表。TDengine建议,只要采集时间一致,就采用多列模型,因为插入效率以及存储效率更高。TDengine支持最大的列数为1024列。 ## 多列模型 vs 单列模型
TDengine支持多列模型,只要物理量是一个数据采集点同时采集的(时间戳一致),这些量就可以作为不同列放在一张超级表里。但还有一种极限的设计,单列模型,每个采集的物理量都单独建表,因此每种类型的物理量都单独建立一超级表。比如电流、电压、相位,就建三张超级表。
TDengine建议尽可能采用多列模型,因为插入效率以及存储效率更高。但对于有些场景,一个采集点的采集量的种类经常变化,这个时候,如果采用多列模型,就需要频繁修改超级表的结构定义,让应用变的复杂,这个时候,采用单列模型就会显得简单。
...@@ -12,7 +12,7 @@ TDengine 采用 SQL 作为查询语言。应用程序可以通过 C/C++, Java, G ...@@ -12,7 +12,7 @@ TDengine 采用 SQL 作为查询语言。应用程序可以通过 C/C++, Java, G
- 标签和数值的多种过滤条件:\>, \<, =, \<>, like 等 - 标签和数值的多种过滤条件:\>, \<, =, \<>, like 等
- 聚合结果的分组(Group by)、排序(Order by)、约束输出(Limit/Offset) - 聚合结果的分组(Group by)、排序(Order by)、约束输出(Limit/Offset)
- 数值列及聚合结果的四则运算 - 数值列及聚合结果的四则运算
- 时间戳对齐的连接查询(Join Query)操作 - 时间戳对齐的连接查询(Join Query: 隐式连接)操作
- 多种聚合/计算函数: count, max, min, avg, sum, twa, stddev, leastsquares, top, bottom, first, last, percentile, apercentile, last_row, spread, diff等 - 多种聚合/计算函数: count, max, min, avg, sum, twa, stddev, leastsquares, top, bottom, first, last, percentile, apercentile, last_row, spread, diff等
例如:在TAOS Shell中,从表d1001中查询出vlotage > 215的记录,按时间降序排列,仅仅输出2条。 例如:在TAOS Shell中,从表d1001中查询出vlotage > 215的记录,按时间降序排列,仅仅输出2条。
......
...@@ -82,7 +82,7 @@ TDengine系统后台服务由taosd提供,可以在配置文件taos.cfg里修 ...@@ -82,7 +82,7 @@ TDengine系统后台服务由taosd提供,可以在配置文件taos.cfg里修
- firstEp: taosd启动时,主动连接的集群中第一个dnode的end point, 默认值为localhost:6030。 - firstEp: taosd启动时,主动连接的集群中第一个dnode的end point, 默认值为localhost:6030。
- secondEp: taosd启动时,如果first连接不上,尝试连接集群中第二个dnode的end point, 默认值为空。 - secondEp: taosd启动时,如果first连接不上,尝试连接集群中第二个dnode的end point, 默认值为空。
- fqdn:数据节点的FQDN。如果为空,将自动获取操作系统配置的第一个, 默认值为空 - fqdn:数据节点的FQDN,缺省为操作系统配置的第一个hostname。如果习惯IP地址访问,可设置为该节点的IP地址
- serverPort:taosd启动后,对外服务的端口号,默认值为6030。 - serverPort:taosd启动后,对外服务的端口号,默认值为6030。
- httpPort: RESTful服务使用的端口号,所有的HTTP请求(TCP)都需要向该接口发起查询/写入请求, 默认值为6041。 - httpPort: RESTful服务使用的端口号,所有的HTTP请求(TCP)都需要向该接口发起查询/写入请求, 默认值为6041。
- dataDir: 数据文件目录,所有的数据文件都将写入该目录。默认值:/var/lib/taos。 - dataDir: 数据文件目录,所有的数据文件都将写入该目录。默认值:/var/lib/taos。
......
#TDengine 集群安装、管理 # TDengine 集群安装、管理
多个taosd的运行实例可以组成一个集群,以保证TDengine的高可靠运行,并提供水平扩展能力。要了解TDengine 2.0的集群管理,需要对集群的基本概念有所了解,请看TDengine 2.0整体架构一章。 多个taosd的运行实例可以组成一个集群,以保证TDengine的高可靠运行,并提供水平扩展能力。要了解TDengine 2.0的集群管理,需要对集群的基本概念有所了解,请看TDengine 2.0整体架构一章。而且在安装集群之前,请按照[《立即开始》](https://www.taosdata.com/cn/getting-started20/)一章安装并体验过单节点功能。
集群的每个节点是由End Point来唯一标识的,End Point是由FQDN(Fully Qualified Domain Name)外加Port组成,比如 h1.taosdata.com:6030。一般FQDN就是服务器的hostname,可通过Linux命令“hostname"获取。端口是这个节点对外服务的端口号,缺省是6030,但可以通过taos.cfg里配置参数serverPort进行修改 集群的每个节点是由End Point来唯一标识的,End Point是由FQDN(Fully Qualified Domain Name)外加Port组成,比如 h1.taosdata.com:6030。一般FQDN就是服务器的hostname,可通过Linux命令`hostname -f`获取。端口是这个节点对外服务的端口号,缺省是6030,但可以通过taos.cfg里配置参数serverPort进行修改。一个节点可能配置了多个hostname, TDengine会自动获取第一个,但也可以通过taos.cfg里配置参数fqdn进行指定。如果习惯IP地址直接访问,可以将参数fqdn设置为本节点的IP地址
TDengine的集群管理极其简单,除添加和删除节点需要人工干预之外,其他全部是自动完成,最大程度的降低了运维的工作量。本章对集群管理的操作做详细的描述。 TDengine的集群管理极其简单,除添加和删除节点需要人工干预之外,其他全部是自动完成,最大程度的降低了运维的工作量。本章对集群管理的操作做详细的描述。
##安装、创建第一个节点 ## 准备工作
集群是由一个一个dnode组成的,是从一个dnode的创建开始的。创建第一个节点很简单,就按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法进行安装、启动即可。 **第一步**:如果搭建集群的节点中,存有之前的测试数据、装过1.X的版本,或者装过其他版本的TDengine,请先将其删除,并清空所有数据,具体步骤请参考博客[《TDengine多种安装包的安装和卸载》](https://www.taosdata.com/blog/2019/08/09/566.html )
启动后,请执行taos, 启动taos shell,从shell里执行命令"show dnodes;",如下所示: **第二步**:建议关闭防火墙,至少保证端口:6030 - 6041的TCP和UDP端口都是开放的。**强烈建议**先关闭防火墙,集群搭建完毕之后,再来配置端口;
**第三步**:在所有节点安装TDengine,且版本必须是一致的,**但不要启动taosd**
**第四步**:检查、配置所有节点的FQDN:
1. 每个节点上执行命令`hostname -f`,查看和确认所有节点的hostname是不相同的;
2. 每个节点上执行`ping host`, 其中host是其他节点的hostname, 看能否ping通其它节点; 如果不能ping通,需要检查网络设置, 或/etc/hosts文件,或DNS的配置。如果无法ping通,是无法组成集群的。
3. 每个节点的FQDN就是输出的hostname外加端口号,比如h1.taosdata.com:6030
**第五步**:修改TDengine的配置文件(所有节点的文件/etc/taos/taos.cfg都需要修改)。假设准备启动的第一个节点End Point为 h1.taosdata.com:6030, 那么以下几个参数与集群相关:
```
// firstEp 是每个节点启动后连接的第一个节点
firstEp h1.taosdata.com:6030
// 配置本节点的FQDN,如果本机只有一个hostname, 无需配置
fqdn h1.taosdata.com
// 配置本节点的端口号,缺省是6030
serverPort 6030
// 副本数为偶数的时候,需要配置,请参考《Arbitrator的使用》的部分
arbitrator ha.taosdata.com:6030
```
一定要修改的参数是firstEp, 其他参数可不做任何修改,除非你很清楚为什么要修改。
## 启动第一个节点
按照[《立即开始》](https://www.taosdata.com/cn/getting-started20/)里的指示,启动第一个节点h1.taosdata.com,然后执行taos, 启动taos shell,从shell里执行命令"show dnodes;",如下所示:
``` ```
Welcome to the TDengine shell from Linux, Client Version:2.0.0.0 Welcome to the TDengine shell from Linux, Client Version:2.0.0.0
Copyright (c) 2017 by TAOS Data, Inc. All rights reserved. Copyright (c) 2017 by TAOS Data, Inc. All rights reserved.
...@@ -25,71 +55,64 @@ taos> ...@@ -25,71 +55,64 @@ taos>
``` ```
上述命令里,可以看到这个刚启动的这个节点的End Point是:h1.taos.com:6030 上述命令里,可以看到这个刚启动的这个节点的End Point是:h1.taos.com:6030
## 安装、创建后续节点 ## 启动后续节点
的节点添加到现有集群,具体有以下几步: 后续的节点添加到现有集群,具体有以下几步:
1. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法进行安装,**但不要启动taosd** 1. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法在每个节点启动taosd。
2. 如果是使用涛思数据的官方安装包进行安装,在安装结束时,会询问集群的End Port, 输入第一个节点的End Point即可。如果是源码安装,请编辑配置文件taos.cfg(缺省是在/etc/taos/目录),增加一行: 2. 在第一个节点,使用CLI程序taos, 登录进TDengine系统, 执行命令:
```
firstEp h1.taos.com:6030
```
请注意将示例的“h1.taos.com:6030" 替换为你自己第一个节点的End Point
3. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法启动taosd
4. 在Linux shell里执行命令"hostname"找出本机的FQDN, 假设为h2.taos.com。如果无法找到,可以查看taosd日志文件taosdlog.0里前面几行日志(一般在/var/log/taos目录),fqdn以及port都会打印出来。
5. 在第一个节点,使用CLI程序taos, 登录进TDengine系统, 使用命令:
``` ```
CREATE DNODE "h2.taos.com:6030"; CREATE DNODE "h2.taos.com:6030";
``` ```
将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。请注意将示例的“h2.taos.com:6030" 替换为你自己第一个节点的End Point 将新节点的End Point (准备工作中第四步获知的) 添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。请注意将示例的“h2.taos.com:6030" 替换为这个新节点的End Point。
6. 使用命令 3. 然后执行命令
``` ```
SHOW DNODES; SHOW DNODES;
``` ```
查看新节点是否被成功加入。 查看新节点是否被成功加入。如果该被加入的节点处于离线状态,请做两个检查
- 查看该节点的taosd是否正常工作,如果没有正常运行,需要先检查为什么
- 查看该节点taosd日志文件taosdlog.0里前面几行日志(一般在/var/log/taos目录),看日志里输出的该节点fqdn以及端口号是否为刚添加的End Point。如果不一致,需要将正确的End Point添加进去。
按照上述步骤可以源源不断的将新的节点加入到集群。 按照上述步骤可以源源不断的将新的节点加入到集群。
**提示:** **提示:**
- firstEp, secondEp这两个参数仅仅在该节点第一次加入集群时有作用,加入集群后,该节点会保存最新的mnode的End Point列表,不再依赖这两个参数。 - firstEp个参数仅仅在该节点第一次加入集群时有作用,加入集群后,该节点会保存最新的mnode的End Point列表,不再依赖这两个参数。
- 两个没有配置firstEp, secondEp参数的dnode启动后,会独立运行起来。这个时候,无法将其中一个节点加入到另外一个节点,形成集群。**无法将两个独立的集群合并成为新的集群** - 两个没有配置firstEp参数的dnode启动后,会独立运行起来。这个时候,无法将其中一个节点加入到另外一个节点,形成集群。**无法将两个独立的集群合并成为新的集群**
##节点管理 ## 节点管理
###添加节点 ### 添加节点
执行CLI程序taos, 使用root账号登录进系统, 执行: 执行CLI程序taos, 使用root账号登录进系统, 执行:
``` ```
CREATE DNODE "fqdn:port"; CREATE DNODE "fqdn:port";
``` ```
将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。一个节点对外服务的fqdn和port可以通过配置文件taos.cfg进行配置,缺省是自动获取。 将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。一个节点对外服务的fqdn和port可以通过配置文件taos.cfg进行配置,缺省是自动获取。
###删除节点 ### 删除节点
执行CLI程序taos, 使用root账号登录进TDengine系统,执行: 执行CLI程序taos, 使用root账号登录进TDengine系统,执行:
``` ```
DROP DNODE "fqdn:port"; DROP DNODE "fqdn:port";
``` ```
其中fqdn是被删除的节点的FQDN,port是其对外服务器的端口号 其中fqdn是被删除的节点的FQDN,port是其对外服务器的端口号
###查看节点 ### 查看节点
执行CLI程序taos,使用root账号登录进TDengine系统,执行: 执行CLI程序taos,使用root账号登录进TDengine系统,执行:
``` ```
SHOW DNODES; SHOW DNODES;
``` ```
它将列出集群中所有的dnode,每个dnode的fqdn:port, 状态(ready, offline等),vnode数目,还未使用的vnode数目等信息。在添加或删除一个节点后,可以使用该命令查看。 它将列出集群中所有的dnode,每个dnode的fqdn:port, 状态(ready, offline等),vnode数目,还未使用的vnode数目等信息。在添加或删除一个节点后,可以使用该命令查看。
###查看虚拟节点组 ### 查看虚拟节点组
为充分利用多核技术,并提供scalability,数据需要分片处理。因此TDengine会将一个DB的数据切分成多份,存放在多个vnode里。这些vnode可能分布在多个dnode里,这样就实现了水平扩展。一个vnode仅仅属于一个DB,但一个DB可以有多个vnode。vnode的是mnode根据当前系统资源的情况,自动进行分配的,无需任何人工干预。 为充分利用多核技术,并提供scalability,数据需要分片处理。因此TDengine会将一个DB的数据切分成多份,存放在多个vnode里。这些vnode可能分布在多个dnode里,这样就实现了水平扩展。一个vnode仅仅属于一个DB,但一个DB可以有多个vnode。vnode的是mnode根据当前系统资源的情况,自动进行分配的,无需任何人工干预。
...@@ -97,7 +120,7 @@ SHOW DNODES; ...@@ -97,7 +120,7 @@ SHOW DNODES;
``` ```
SHOW VGROUPS; SHOW VGROUPS;
``` ```
##vnode的高可用性 ## vnode的高可用性
TDengine通过多副本的机制来提供系统的高可用性,包括vnode和mnode的高可用性。 TDengine通过多副本的机制来提供系统的高可用性,包括vnode和mnode的高可用性。
vnode的副本数是与DB关联的,一个集群里可以有多个DB,根据运营的需求,每个DB可以配置不同的副本数。创建数据库时,通过参数replica 指定副本数(缺省为1)。如果副本数为1,系统的可靠性无法保证,只要数据所在的节点宕机,就将无法提供服务。集群的节点数必须大于等于副本数,否则创建表时将返回错误“more dnodes are needed"。比如下面的命令将创建副本数为3的数据库demo: vnode的副本数是与DB关联的,一个集群里可以有多个DB,根据运营的需求,每个DB可以配置不同的副本数。创建数据库时,通过参数replica 指定副本数(缺省为1)。如果副本数为1,系统的可靠性无法保证,只要数据所在的节点宕机,就将无法提供服务。集群的节点数必须大于等于副本数,否则创建表时将返回错误“more dnodes are needed"。比如下面的命令将创建副本数为3的数据库demo:
...@@ -111,7 +134,7 @@ CREATE DATABASE demo replica 3; ...@@ -111,7 +134,7 @@ CREATE DATABASE demo replica 3;
因为vnode的引入,无法简单的给出结论:“集群中过半dnode工作,集群就应该工作”。但是对于简单的情形,很好下结论。比如副本数为3,只有三个dnode,那如果仅有一个节点不工作,整个集群还是可以正常工作的,但如果有两个节点不工作,那整个集群就无法正常工作了。 因为vnode的引入,无法简单的给出结论:“集群中过半dnode工作,集群就应该工作”。但是对于简单的情形,很好下结论。比如副本数为3,只有三个dnode,那如果仅有一个节点不工作,整个集群还是可以正常工作的,但如果有两个节点不工作,那整个集群就无法正常工作了。
##Mnode的高可用性 ## Mnode的高可用性
TDengine集群是由mnode (taosd的一个模块,逻辑节点) 负责管理的,为保证mnode的高可用,可以配置多个mnode副本,副本数由系统配置参数numOfMnodes决定,有效范围为1-3。为保证元数据的强一致性,mnode副本之间是通过同步的方式进行数据复制的。 TDengine集群是由mnode (taosd的一个模块,逻辑节点) 负责管理的,为保证mnode的高可用,可以配置多个mnode副本,副本数由系统配置参数numOfMnodes决定,有效范围为1-3。为保证元数据的强一致性,mnode副本之间是通过同步的方式进行数据复制的。
一个集群有多个dnode, 但一个dnode至多运行一个mnode实例。多个dnode情况下,哪个dnode可以作为mnode呢?这是完全由系统根据整个系统资源情况,自动指定的。用户可通过CLI程序taos,在TDengine的console里,执行如下命令: 一个集群有多个dnode, 但一个dnode至多运行一个mnode实例。多个dnode情况下,哪个dnode可以作为mnode呢?这是完全由系统根据整个系统资源情况,自动指定的。用户可通过CLI程序taos,在TDengine的console里,执行如下命令:
...@@ -125,7 +148,7 @@ SHOW MNODES; ...@@ -125,7 +148,7 @@ SHOW MNODES;
**注意:**一个TDengine高可用系统,无论是vnode还是mnode, 都必须配置多个副本。 **注意:**一个TDengine高可用系统,无论是vnode还是mnode, 都必须配置多个副本。
##负载均衡 ## 负载均衡
有三种情况,将触发负载均衡,而且都无需人工干预。 有三种情况,将触发负载均衡,而且都无需人工干预。
...@@ -142,8 +165,9 @@ SHOW MNODES; ...@@ -142,8 +165,9 @@ SHOW MNODES;
**注意:**如果一个虚拟节点组(包括mnode组)里每个节点都处于离线或unsynced状态,必须等该虚拟节点组里的所有节点都上线、都能交换状态信息后,才能选出Master,该虚拟节点组才能对外提供服务。比如整个集群有3个节点,副本数为3,如果3个节点都宕机,然后2个节点重启,是无法工作的,只有等3个节点都重启成功,才能对外服务。 **注意:**如果一个虚拟节点组(包括mnode组)里每个节点都处于离线或unsynced状态,必须等该虚拟节点组里的所有节点都上线、都能交换状态信息后,才能选出Master,该虚拟节点组才能对外提供服务。比如整个集群有3个节点,副本数为3,如果3个节点都宕机,然后2个节点重启,是无法工作的,只有等3个节点都重启成功,才能对外服务。
##Arbitrator的使用 ## Arbitrator的使用
如果副本数为偶数,当一个vnode group里一半或超过一半的vnode不工作时,是无法从中选出master的。同理,一半或超过一半的mnode不工作时,是无法选出mnode的master的,因为存在“split brain”问题。为解决这个问题,TDengine引入了arbitrator的概念。Arbitrator模拟一个vnode或mnode在工作,但只简单的负责网络连接,不处理任何数据插入或访问。只要包含arbitrator在内,超过半数的vnode或mnode工作,那么该vnode group或mnode组就可以正常的提供数据插入或查询服务。比如对于副本数为2的情形,如果一个节点A离线,但另外一个节点B正常,而且能连接到arbitrator, 那么节点B就能正常工作。 如果副本数为偶数,当一个vnode group里一半或超过一半的vnode不工作时,是无法从中选出master的。同理,一半或超过一半的mnode不工作时,是无法选出mnode的master的,因为存在“split brain”问题。为解决这个问题,TDengine引入了arbitrator的概念。Arbitrator模拟一个vnode或mnode在工作,但只简单的负责网络连接,不处理任何数据插入或访问。只要包含arbitrator在内,超过半数的vnode或mnode工作,那么该vnode group或mnode组就可以正常的提供数据插入或查询服务。比如对于副本数为2的情形,如果一个节点A离线,但另外一个节点B正常,而且能连接到arbitrator, 那么节点B就能正常工作。
TDengine安装包里带有一个执行程序tarbitrator, 找任何一台Linux服务器运行它即可。该程序对系统资源几乎没有要求,只需要保证有网络连接即可。该应用的命令行参数`-p`可以指定其对外服务的端口号,缺省是6030。配置每个taosd实例时,可以在配置文件taos.cfg里将参数arbitrator设置为arbitrator的End Point。如果该参数配置了,当副本数为偶数数,系统将自动连接配置的arbitrator。 TDengine安装包里带有一个执行程序tarbitrator, 找任何一台Linux服务器运行它即可。该程序对系统资源几乎没有要求,只需要保证有网络连接即可。该应用的命令行参数`-p`可以指定其对外服务的端口号,缺省是6030。配置每个taosd实例时,可以在配置文件taos.cfg里将参数arbitrator设置为arbitrator的End Point。如果该参数配置了,当副本数为偶数数,系统将自动连接配置的arbitrator。
#常见问题 # 常见问题
#### 1. TDengine2.0之前的版本升级到2.0及以上的版本应该注意什么?☆☆☆ ## 1. TDengine2.0之前的版本升级到2.0及以上的版本应该注意什么?☆☆☆
2.0版本在之前版本的基础上,进行了完全的重构,配置文件和数据文件是不兼容的。在升级之前务必进行如下操作: 2.0版本在之前版本的基础上,进行了完全的重构,配置文件和数据文件是不兼容的。在升级之前务必进行如下操作:
...@@ -10,23 +10,23 @@ ...@@ -10,23 +10,23 @@
4. 安装最新稳定版本的TDengine 4. 安装最新稳定版本的TDengine
5. 如果数据需要迁移数据或者数据文件损坏,请联系涛思数据官方技术支持团队,进行协助解决 5. 如果数据需要迁移数据或者数据文件损坏,请联系涛思数据官方技术支持团队,进行协助解决
#### 2. Windows平台下JDBCDriver找不到动态链接库,怎么办? ## 2. Windows平台下JDBCDriver找不到动态链接库,怎么办?
请看为此问题撰写的<a href='blog/2019/12/03/jdbcdriver找不到动态链接库/'>技术博客 </a> 请看为此问题撰写的<a href='blog/2019/12/03/jdbcdriver找不到动态链接库/'>技术博客 </a>
#### 3. 创建数据表时提示more dnodes are needed ## 3. 创建数据表时提示more dnodes are needed
请看为此问题撰写的<a href='blog/2019/12/03/创建数据表时提示more-dnodes-are-needed/'>技术博客</a> 请看为此问题撰写的<a href='blog/2019/12/03/创建数据表时提示more-dnodes-are-needed/'>技术博客</a>
#### 4. 如何让TDengine crash时生成core文件? ## 4. 如何让TDengine crash时生成core文件?
请看为此问题撰写的<a href='blog/2019/12/06/tdengine-crash时生成core文件的方法/'>技术博客</a> 请看为此问题撰写的<a href='blog/2019/12/06/tdengine-crash时生成core文件的方法/'>技术博客</a>
#### 5. 遇到错误"Unable to establish connection", 我怎么办? ## 5. 遇到错误"Unable to establish connection", 我怎么办?
客户端遇到链接故障,请按照下面的步骤进行检查: 客户端遇到链接故障,请按照下面的步骤进行检查:
1. 确保客户端与服务端版本号是完全一致的,开源社区版和企业版也不能混用 1. 确保客户端与服务端版本号是完全一致的,开源社区版和企业版也不能混用
2. 在服务器,执行 `systemctl status taosd` 检查*taosd*运行状态。如果没有运行,启动*taosd* 2. 在服务器,执行 `systemctl status taosd` 检查*taosd*运行状态。如果没有运行,启动*taosd*
3. 确认客户端连接时指定了正确的服务器IP地址 3. 确认客户端连接时指定了正确的服务器FQDN (Fully Qualified Domain Name(可在服务器上执行Linux命令hostname -f获得)
4. ping服务器IP,如果没有反应,请检查你的网络 4. ping服务器FQDN,如果没有反应,请检查你的网络,DNS设置,或客户端所在计算机的系统hosts文件
5. 检查防火墙设置,确认TCP/UDP 端口6030-6039 是打开的 5. 检查防火墙设置,确认TCP/UDP 端口6030-6039 是打开的
6. 对于Linux上的JDBC(ODBC, Python, Go等接口类似)连接, 确保*libtaos.so*在目录*/usr/local/lib/taos*里, 并且*/usr/local/lib/taos*在系统库函数搜索路径*LD_LIBRARY_PATH* 6. 对于Linux上的JDBC(ODBC, Python, Go等接口类似)连接, 确保*libtaos.so*在目录*/usr/local/lib/taos*里, 并且*/usr/local/lib/taos*在系统库函数搜索路径*LD_LIBRARY_PATH*
7. 对于windows上的JDBC, ODBC, Python, Go等连接,确保*driver/c/taos.dll*在你的系统搜索目录里 (建议*taos.dll*放在目录 *C:\Windows\System32*) 7. 对于windows上的JDBC, ODBC, Python, Go等连接,确保*driver/c/taos.dll*在你的系统搜索目录里 (建议*taos.dll*放在目录 *C:\Windows\System32*)
...@@ -36,39 +36,36 @@ ...@@ -36,39 +36,36 @@
检查客户端侧TCP端口链接是否工作:`nc {hostIP} {port}` 检查客户端侧TCP端口链接是否工作:`nc {hostIP} {port}`
#### 6. 虽然语法正确,为什么我还是得到 "Invalid SQL" 错误 ## 6. 虽然语法正确,为什么我还是得到 "Invalid SQL" 错误
如果你确认语法正确,2.0之前版本,请检查SQL语句长度是否超过64K。如果超过,也会返回这个错误。 如果你确认语法正确,2.0之前版本,请检查SQL语句长度是否超过64K。如果超过,也会返回这个错误。
#### 7. 是否支持validation queries? ## 7. 是否支持validation queries?
TDengine还没有一组专用的validation queries。然而建议你使用系统监测的数据库”log"来做。 TDengine还没有一组专用的validation queries。然而建议你使用系统监测的数据库”log"来做。
#### 8. 我可以删除或更新一条记录吗? ## 8. 我可以删除或更新一条记录吗?
不能。因为TDengine是为联网设备采集的数据设计的,不容许修改。但TDengine提供数据保留策略,只要数据记录超过保留时长,就会被自动删除。 不能。因为TDengine是为联网设备采集的数据设计的,不容许修改。但TDengine提供数据保留策略,只要数据记录超过保留时长,就会被自动删除。
#### 10. 我怎么创建超过250列的表? ## 10. 我怎么创建超过250列的表?
使用2.0及其以上版本,默认支持1024列;2.0之前的版本,TDengine最大允许创建250列的表。但是如果确实超过限值,建议按照数据特性,逻辑地将这个宽表分解成几个小表。 使用2.0及其以上版本,默认支持1024列;2.0之前的版本,TDengine最大允许创建250列的表。但是如果确实超过限值,建议按照数据特性,逻辑地将这个宽表分解成几个小表。
#### 10. 最有效的写入数据的方法是什么? ## 10. 最有效的写入数据的方法是什么?
批量插入。每条写入语句可以一张表同时插入多条记录,也可以同时插入多张表的记录。 批量插入。每条写入语句可以一张表同时插入多条记录,也可以同时插入多张表的记录。
#### 11. 最有效的写入数据的方法是什么?windows系统下插入的nchar类数据中的汉字被解析成了乱码如何解决? ## 11. 最有效的写入数据的方法是什么?windows系统下插入的nchar类数据中的汉字被解析成了乱码如何解决?
windows下插入nchar类的数据中如果有中文,请先确认系统的地区设置成了中国(在Control Panel里可以设置),这时cmd中的`taos`客户端应该已经可以正常工作了;如果是在IDE里开发Java应用,比如Eclipse, Intellij,请确认IDE里的文件编码为GBK(这是Java默认的编码类型),然后在生成Connection时,初始化客户端的配置,具体语句如下: windows下插入nchar类的数据中如果有中文,请先确认系统的地区设置成了中国(在Control Panel里可以设置),这时cmd中的`taos`客户端应该已经可以正常工作了;如果是在IDE里开发Java应用,比如Eclipse, Intellij,请确认IDE里的文件编码为GBK(这是Java默认的编码类型),然后在生成Connection时,初始化客户端的配置,具体语句如下:
```JAVA
​ Class.forName("com.taosdata.jdbc.TSDBDriver"); Class.forName("com.taosdata.jdbc.TSDBDriver");
Properties properties = new Properties();
​ Properties properties = new Properties(); properties.setProperty(TSDBDriver.LOCALE_KEY, "UTF-8");
Connection = DriverManager.getConnection(url, properties);
​ properties.setProperty(TSDBDriver.LOCALE_KEY, "UTF-8"); ```
## 12.TDengine GO windows驱动的如何编译?
​ Connection = DriverManager.getConnection(url, properties);
#### 12.TDengine GO windows驱动的如何编译?
请看为此问题撰写的<a href='blog/2020/01/06/tdengine-go-windows驱动的编译/'>技术博客 请看为此问题撰写的<a href='blog/2020/01/06/tdengine-go-windows驱动的编译/'>技术博客
......
...@@ -24,6 +24,7 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6, ...@@ -24,6 +24,7 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6,
- 要提高写入效率,需要批量写入。一批写入的记录条数越多,插入效率就越高。但一条记录不能超过16K,一条SQL语句总长度不能超过64K(可通过参数maxSQLLength配置,最大可配置为8M)。 - 要提高写入效率,需要批量写入。一批写入的记录条数越多,插入效率就越高。但一条记录不能超过16K,一条SQL语句总长度不能超过64K(可通过参数maxSQLLength配置,最大可配置为8M)。
- TDengine支持多线程同时写入,要进一步提高写入速度,一个客户端需要打开20个以上的线程同时写。但线程数达到一定数量后,无法再提高,甚至还会下降,因为线程切频繁切换,带来额外开销。 - TDengine支持多线程同时写入,要进一步提高写入速度,一个客户端需要打开20个以上的线程同时写。但线程数达到一定数量后,无法再提高,甚至还会下降,因为线程切频繁切换,带来额外开销。
- 对同一张表,如果新插入记录的时间戳已经存在,新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。
## Prometheus直接写入 ## Prometheus直接写入
[Prometheus](https://www.prometheus.io/)作为Cloud Native Computing Fundation毕业的项目,在性能监控以及K8S性能监控领域有着非常广泛的应用。TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Prometheus做简单配置,无需任何代码,就可将Prometheus采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。 [Prometheus](https://www.prometheus.io/)作为Cloud Native Computing Fundation毕业的项目,在性能监控以及K8S性能监控领域有着非常广泛的应用。TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Prometheus做简单配置,无需任何代码,就可将Prometheus采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
......
...@@ -54,7 +54,7 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat ...@@ -54,7 +54,7 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
cp -r ${top_dir}/src/connector/grafana ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/grafanaplugin ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector
cp ${compile_dir}/build/lib/taos-jdbcdriver*dist.* ${pkg_dir}${install_home_path}/connector cp ${compile_dir}/build/lib/taos-jdbcdriver*dist.* ${pkg_dir}${install_home_path}/connector
......
...@@ -61,7 +61,7 @@ cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin ...@@ -61,7 +61,7 @@ cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
cp -r %{_compiledir}/../src/connector/grafana %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/grafanaplugin %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
cp %{_compiledir}/build/lib/taos-jdbcdriver*dist.* %{buildroot}%{homepath}/connector cp %{_compiledir}/build/lib/taos-jdbcdriver*dist.* %{buildroot}%{homepath}/connector
......
...@@ -237,7 +237,7 @@ function install_data() { ...@@ -237,7 +237,7 @@ function install_data() {
} }
function install_connector() { function install_connector() {
${csudo} cp -rf ${source_dir}/src/connector/grafana ${install_main_dir}/connector ${csudo} cp -rf ${source_dir}/src/connector/grafanaplugin ${install_main_dir}/connector
${csudo} cp -rf ${source_dir}/src/connector/python ${install_main_dir}/connector ${csudo} cp -rf ${source_dir}/src/connector/python ${install_main_dir}/connector
${csudo} cp -rf ${source_dir}/src/connector/go ${install_main_dir}/connector ${csudo} cp -rf ${source_dir}/src/connector/go ${install_main_dir}/connector
......
...@@ -110,7 +110,7 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then ...@@ -110,7 +110,7 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
if [ "$osType" != "Darwin" ]; then if [ "$osType" != "Darwin" ]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector cp ${build_dir}/lib/*.jar ${install_dir}/connector
fi fi
cp -r ${connector_dir}/grafana ${install_dir}/connector/ cp -r ${connector_dir}/grafanaplugin ${install_dir}/connector/
cp -r ${connector_dir}/python ${install_dir}/connector/ cp -r ${connector_dir}/python ${install_dir}/connector/
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
fi fi
......
...@@ -123,7 +123,7 @@ connector_dir="${code_dir}/connector" ...@@ -123,7 +123,7 @@ connector_dir="${code_dir}/connector"
mkdir -p ${install_dir}/connector mkdir -p ${install_dir}/connector
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
cp ${build_dir}/lib/*.jar ${install_dir}/connector cp ${build_dir}/lib/*.jar ${install_dir}/connector
cp -r ${connector_dir}/grafana ${install_dir}/connector/ cp -r ${connector_dir}/grafanaplugin ${install_dir}/connector/
cp -r ${connector_dir}/python ${install_dir}/connector/ cp -r ${connector_dir}/python ${install_dir}/connector/
cp -r ${connector_dir}/go ${install_dir}/connector cp -r ${connector_dir}/go ${install_dir}/connector
fi fi
......
<svg xmlns="http://www.w3.org/2000/svg" role="img" viewBox="-5.86 -5.86 304.72 93.47"><title>TDengine logo</title><g data-name="图层 2"><path fill="#718cc7" fill-rule="evenodd" d="M288.45 43.86q0 2.49-3 4.52-1.49 1-4.18 2.76A7.12 7.12 0 0 1 278 52h-20c-.72 0-1.28-.48-1.69-1.42a3.61 3.61 0 0 1-.13-1.35 5.64 5.64 0 0 1 .1-1.31A2.17 2.17 0 0 1 257 47a1.7 1.7 0 0 1 1-.54h16.77a5 5 0 0 0 3.2-1.55 6 6 0 0 0 2-3.16c.05-.36.07-1.37.07-3s0-2.76-.07-3a6.31 6.31 0 0 0-2.09-3.2 4.9 4.9 0 0 0-3.16-1.52h-8.02a8.39 8.39 0 0 1-4.18-1.35q-.4-.27-4.31-3a6 6 0 0 1-1.08-1.07 4.2 4.2 0 0 1-.94-2.63v-9q0-2.49 3.1-4.51c1.07-.76 2.47-1.68 4.17-2.76a7.43 7.43 0 0 1 3.37-.81h19.67a2.07 2.07 0 0 1 1.88 1.42 6.38 6.38 0 0 1 .07 1.34 5.63 5.63 0 0 1-.1 1.28 2.22 2.22 0 0 1-.67 1 1.64 1.64 0 0 1-1.05.54h-16.7a5.08 5.08 0 0 0-3.3 1.58 5.3 5.3 0 0 0-1.95 2.94 18.73 18.73 0 0 0-.14 3 16 16 0 0 0 .2 3.37 6.84 6.84 0 0 0 2.12 2.93 4.89 4.89 0 0 0 3.07 1.45H278a8.15 8.15 0 0 1 4.18 1.41q.4.2 4.24 3a4 4 0 0 1 1.68 2.06 6.18 6.18 0 0 1 .34 1.65v9zM42.62 0A5.58 5.58 0 1 1 37 5.58 5.59 5.59 0 0 1 42.62 0zm-37 26.81A5.58 5.58 0 1 1 0 32.39a5.58 5.58 0 0 1 5.58-5.58zm14.5 43.62A5.58 5.58 0 1 1 14.5 76a5.6 5.6 0 0 1 5.58-5.58zm45.09 0A5.58 5.58 0 1 1 59.59 76a5.59 5.59 0 0 1 5.58-5.58zM50.8 33.23H34.44l-5 15.56 13.19 9.61 13.21-9.6-5-15.57zM34.87 31.9l5.73-17.64a7.4 7.4 0 0 1-1.26-.4l-5.86 18h-19v1.33h18.57L28.26 48 13.19 37a8.31 8.31 0 0 1-.77 1.07l12.81 9.33 2.6 1.89-.83 2.48-5 15.53a7.76 7.76 0 0 1 1.27.38L29 50.1l12.5 9.12L26.66 70a10.12 10.12 0 0 1 .82 1l15.15-11 15.14 11a8.15 8.15 0 0 1 .82-1L43.75 59.22l12.52-9.1.54 1.67L62 67.67a8.85 8.85 0 0 1 1.27-.37l-5.85-18L60 47.42l12.83-9.32a10 10 0 0 1-.77-1.1l-12.47 9-.35.26L57 48l-4.82-14.77h18.61a8.06 8.06 0 0 1 0-.85v-.47h-19l-5.85-18a7.4 7.4 0 0 1-1.26.4l5.73 17.65zm-22.28-5l22.59-16.43a8.16 8.16 0 0 0 .81 1L13.33 28a8.46 8.46 0 0 0-.74-1.1zm4.07 40.88L8 41a8.72 8.72 0 0 0 1.24-.45l8.72 26.83a10.9 10.9 0 0 0-1.26.41zm39.6 8.6H29v-.36a7.89 7.89 0 0 0-.06-1H56.3V76.38zM77.29 41l-8.71 26.8a8.74 8.74 0 0 0-1.25-.42L76 40.53a8.84 8.84 0 0 0 1.25.45zM50.07 10.47l22.59 16.42a9.08 9.08 0 0 0-.74 1.11L49.26 11.52a10.46 10.46 0 0 0 .81-1zm29.6 16.34a5.58 5.58 0 1 1-5.58 5.58 5.58 5.58 0 0 1 5.58-5.58zM290.07 45V33.69a5.15 5.15 0 0 0-.37-1.48 3.06 3.06 0 0 0-.64-1.21c-1.4-1.08-2.81-2.16-4.25-3.23-2.47-1.75-4.37-2.63-5.72-2.63h-8.76a4.11 4.11 0 0 1-1.08-.24 5 5 0 0 1-1.85-1.41 3.29 3.29 0 0 1-1.11-1.92c-.09-1.57-.13-2.43-.13-2.56a14.37 14.37 0 0 1 .13-2.56 4.42 4.42 0 0 1 1.55-2.36 3.94 3.94 0 0 1 2.49-1.21h17.51a1.86 1.86 0 0 0 2-1.41 12.22 12.22 0 0 0 .21-3c0-2.15-.18-3.42-.54-3.8a2.43 2.43 0 0 0-1.82-.57h-22c-1.48 0-3.34.8-5.59 2.42-1.16.81-2.6 1.86-4.31 3.17a3.73 3.73 0 0 0-1.21 3V24a3.78 3.78 0 0 0 .94 2.7 43.57 43.57 0 0 0 3.64 2.76q4.3 3.11 6.33 3.1h8.82q2 0 3.91 3a2.31 2.31 0 0 1 .13.54 17.07 17.07 0 0 1 .14 2.56 18.92 18.92 0 0 1-.14 2.52 4.44 4.44 0 0 1-1.34 2.26 4.06 4.06 0 0 1-2.7 1.35h-17.52a1.91 1.91 0 0 0-1.68.6 9.3 9.3 0 0 0-.47 3.77 9 9 0 0 0 .44 3.71 1.89 1.89 0 0 0 1.71.67h22.3q2.09 0 6-2.76l3.76-2.78a3.75 3.75 0 0 0 1.22-3zm1.93 1V32.75a4.26 4.26 0 0 0-1.41-3.17c-1.8-1.48-3.19-2.55-4.18-3.23-2.83-2.06-4.94-3.1-6.33-3.1h-8.82a3.25 3.25 0 0 1-1.92-.77 2.69 2.69 0 0 1-1.18-1.85c-.05-.54-.07-1.08-.07-1.62a15.09 15.09 0 0 1 .07-1.62 2.81 2.81 0 0 1 1.28-1.88 2.94 2.94 0 0 1 1.82-.68h17.51a3 3 0 0 0 2.22-1 3.09 3.09 0 0 0 1-2.12V5.34a2.77 2.77 0 0 0-1.08-2.19 3.48 3.48 0 0 0-2.29-.91h-23.88a6.16 6.16 0 0 0-3.1.81c-2.15 1.39-3.72 2.45-4.71 3.16-2.83 1.93-4.24 3.78-4.24 5.53v13.2a4.24 4.24 0 0 0 1.41 3.16q3.11 2.56 4.72 3.64c2.46 1.8 4.4 2.69 5.79 2.69h8.82a3.21 3.21 0 0 1 1.95.78A2.57 2.57 0 0 1 276.5 37c.06.69.1 1.24.1 1.65a9.73 9.73 0 0 1-.14 1.61 2.58 2.58 0 0 1-1.14 1.86 3.38 3.38 0 0 1-1.89.7h-17.58a3 3 0 0 0-2.19 1 3.16 3.16 0 0 0-1 2.23v6.19a3.34 3.34 0 0 0 3.16 3.17h24.28a9 9 0 0 0 1.55-.2 11 11 0 0 0 3.16-1.62c1-.72 2-1.44 2.9-2.16Q292 48.65 292 46zm-64.91-6.2V17.93a3 3 0 0 0-.78-1.88 3.1 3.1 0 0 0-1.85-1.15c-.4 0-.94-.07-1.62-.07s-1.19 0-1.68.07a2.94 2.94 0 0 0-1.82 1.18 3.21 3.21 0 0 0-.74 1.92v21.69a3.15 3.15 0 0 0 .74 2 3 3 0 0 0 1.82 1.15q.54.06 1.68.06a16 16 0 0 0 1.62-.06 3.16 3.16 0 0 0 2-1.28 3.36 3.36 0 0 0 .61-1.76zm1.95 1a3.91 3.91 0 0 1-1.28 2.46 4.76 4.76 0 0 1-2.29 1.58c-.49 0-1.37.07-2.63.07-1 0-1.86 0-2.62-.07a3.76 3.76 0 0 1-1.89-1.11 5.13 5.13 0 0 1-1.41-1.79 3.6 3.6 0 0 1-.27-1.14V17a3.93 3.93 0 0 1 1.35-2.5 3.92 3.92 0 0 1 2.24-1.5c.94-.09 1.82-.14 2.62-.14a12.54 12.54 0 0 1 2.63.14 5.71 5.71 0 0 1 2.42 1.61 3.52 3.52 0 0 1 1.15 2.39v23.7zm1.55.4a5.11 5.11 0 0 1-1.79 3.37c-1.19 1.21-2.14 1.82-2.86 1.82l-1.65.06-1.45.07a19 19 0 0 1-3.43-.2 6.46 6.46 0 0 1-3-2.22 4.59 4.59 0 0 1-1.35-2.9V16.65a4.92 4.92 0 0 1 1.75-3.36 5.23 5.23 0 0 1 3-1.89 22 22 0 0 1 3.09-.13 17.61 17.61 0 0 1 3.44.2 5.91 5.91 0 0 1 2.83 2 5 5 0 0 1 1.48 3.07V41.1zm8.41 2.66q0 2.49-3 4.52-1.49 1-4.18 2.76a7.12 7.12 0 0 1-3.23.81h-12.66q-3.7 0-8-4.85a4.43 4.43 0 0 1-1.21-3.17V13.82q0-2.49 3.1-4.51c1.07-.76 2.47-1.68 4.17-2.76a7.43 7.43 0 0 1 3.37-.81h11A9 9 0 0 1 233 7.29a42.19 42.19 0 0 1 4.72 3.5 3.73 3.73 0 0 1 1.21 3v30zm1.58 1.14a3.75 3.75 0 0 1-1.22 3l-3.77 2.83q-3.9 2.76-6 2.76h-14.81a7 7 0 0 1-3.84-1.48q-.87-.6-4-3.3a4.7 4.7 0 0 1-.74-.81 5 5 0 0 1-1-2.89V12.68a3.73 3.73 0 0 1 1.21-3c1.71-1.31 3.15-2.36 4.31-3.17 2.25-1.62 4.11-2.42 5.59-2.42h13.2a9.52 9.52 0 0 1 5.12 2c.32.22 1.91 1.44 4.78 3.64a3.46 3.46 0 0 1 .78 1.31 4.71 4.71 0 0 1 .44 1.65V45zm1.95.94q0 2.7-4.31 5.52c-.94.72-1.91 1.44-2.9 2.16a11 11 0 0 1-3.16 1.62 9 9 0 0 1-1.55.2h-16.77q-3.84 0-9.57-6.47a4.33 4.33 0 0 1-1.07-3V11.74c0-1.75 1.41-3.6 4.24-5.53 1-.71 2.56-1.77 4.71-3.16a6.16 6.16 0 0 1 3.1-.81h15.16q2.49 0 6.13 2.76a42.2 42.2 0 0 1 4.71 3.71 4.81 4.81 0 0 1 1.28 3V46zM177.62 19c0-2.78-1.42-4.18-4.25-4.18s-4.24 1.4-4.24 4.18 1.41 4.24 4.24 4.24 4.25-1.41 4.25-4.24zm1.95 0a10 10 0 0 1-.3 3.17 7 7 0 0 1-1.42 1.75 3.47 3.47 0 0 1-1.85 1.15c-.81.05-1.68.07-2.63.07a14 14 0 0 1-3-.2 5.18 5.18 0 0 1-1.89-1.42 3.33 3.33 0 0 1-1.18-1.95c-.09-1.35-.13-2.2-.13-2.56a13.39 13.39 0 0 1 .2-3 4.75 4.75 0 0 1 1.41-1.89 3.36 3.36 0 0 1 2-1.14c1.39-.09 2.26-.14 2.62-.14a14.25 14.25 0 0 1 3 .2 5 5 0 0 1 1.88 1.38 3.72 3.72 0 0 1 1.22 2 24.55 24.55 0 0 1 .13 2.56zm1.55 0a14.45 14.45 0 0 1-.34 3.91 7 7 0 0 1-3.5 3.5 12.62 12.62 0 0 1-3.91.34 14.83 14.83 0 0 1-3.43-.21 6.39 6.39 0 0 1-2.7-1.81A5.33 5.33 0 0 1 165.7 22a19.12 19.12 0 0 1-.14-3 17.27 17.27 0 0 1 .2-3.37 6.07 6.07 0 0 1 1.85-2.63 4.82 4.82 0 0 1 2.66-1.58c1-.09 2-.13 3.1-.13a17.61 17.61 0 0 1 3.44.2 6.35 6.35 0 0 1 2.66 1.82 4.92 4.92 0 0 1 1.58 2.69c0 .58.07 1.59.07 3zm8.35 31.12a1.76 1.76 0 0 1-1.35 1.75c-.22 0-1.16.07-2.83.07-1.48 0-2.36 0-2.66-.07a1.81 1.81 0 0 1-1-.71 1.84 1.84 0 0 1-.54-1V36.25a5 5 0 0 0-1.52-3.2 5.5 5.5 0 0 0-3.13-2 19.89 19.89 0 0 0-3.1-.14c-1.39 0-2.42 0-3.1.1a4.09 4.09 0 0 0-2.22 1.25q-2.49 2.22-2.49 4v13.87a1.93 1.93 0 0 1-.44 1 1.53 1.53 0 0 1-.91.74 19.92 19.92 0 0 1-2.83.11c-1.48 0-2.36 0-2.66-.07a1.81 1.81 0 0 1-1-.71 1.84 1.84 0 0 1-.54-1V16.38q0-2.35 2.42-5 1.89-1.89 3.78-3.7c.09-.09.44-.36 1.07-.81a5.84 5.84 0 0 1 3.57-1.15h10.64a7 7 0 0 1 4.58 2c.54.44 2.09 2 4.65 4.71a5.69 5.69 0 0 1 1.55 4v33.7zm1.62 1.21a2.81 2.81 0 0 1-.44 1.55 2 2 0 0 1-1.79.67h-7.13a2.47 2.47 0 0 1-1.55-.47 2.09 2.09 0 0 1-.61-1.75V36.66a3.8 3.8 0 0 0-1.38-2.66 4.19 4.19 0 0 0-2.26-1.41c-.94 0-1.79-.07-2.56-.07a16 16 0 0 0-3 .2 4.23 4.23 0 0 0-1.65 1.14 5.52 5.52 0 0 0-1.31 1.69 3.11 3.11 0 0 0-.27 1.08v14.71a2.81 2.81 0 0 1-.44 1.55 2 2 0 0 1-1.79.67h-7.14a2.46 2.46 0 0 1-1.54-.47 2.09 2.09 0 0 1-.61-1.75v-36.1a5.21 5.21 0 0 1 .81-2.7 8.46 8.46 0 0 1 .81-1.07Q159.64 9 162 6.55c1.8-1.62 3.44-2.42 4.92-2.42h12.93a6.79 6.79 0 0 1 4.38 2l4.58 4.58 1.14 1.34a4.89 4.89 0 0 1 1.15 3.17v36.1zm1.95 1a3 3 0 0 1-1 2.09 3.09 3.09 0 0 1-2.22 1h-9a3.07 3.07 0 0 1-2.15-1 3 3 0 0 1-1-2.19V37.67a3.21 3.21 0 0 0-.74-1.92 3 3 0 0 0-1.93-1.18 7.65 7.65 0 0 0-1.62-.14 14.71 14.71 0 0 0-1.68.14 2.64 2.64 0 0 0-1.82 1.11 3.21 3.21 0 0 0-.74 1.92v14.75a3 3 0 0 1-1 2.09 3.09 3.09 0 0 1-2.22 1h-9a3.34 3.34 0 0 1-3.16-3.17v-38a5.21 5.21 0 0 1 .81-2.7c.17-.27.44-.65.8-1.14 1.75-1.75 3.51-3.53 5.26-5.32q3-2.89 5.38-2.9h14.82a5.42 5.42 0 0 1 4 1.62l5.11 5Q193 12.28 193 14.3v38.05zM141.8 8.5a5.63 5.63 0 0 1-.1 1.28 2.17 2.17 0 0 1-.68 1 1.62 1.62 0 0 1-1 .54h-5.39a5.15 5.15 0 0 0-3.53 1.88 5.25 5.25 0 0 0-1.86 3.44v33.49a2 2 0 0 1-.43 1 1.61 1.61 0 0 1-.88.74 12.82 12.82 0 0 1-2.19.11 19.35 19.35 0 0 1-2.09-.07 1.86 1.86 0 0 1-.94-.68 1.72 1.72 0 0 1-.54-1.07V16.65a5.19 5.19 0 0 0-1.88-3.53 5.25 5.25 0 0 0-3.44-1.85h-5.45c-.72 0-1.28-.45-1.69-1.35a4.32 4.32 0 0 1-.13-1.42 3.58 3.58 0 0 1 .13-1.34 2.17 2.17 0 0 1 1.89-1.42h28.29a2.08 2.08 0 0 1 1.88 1.42 6.38 6.38 0 0 1 .07 1.34zm1.62 0c0-2.15-.18-3.42-.54-3.8a2.43 2.43 0 0 0-1.82-.57h-30.71a2.09 2.09 0 0 0-2.23 1.41 18.24 18.24 0 0 0-.13 3c0 2 .14 3.26.44 3.71a1.89 1.89 0 0 0 1.71.67h6.27a2.73 2.73 0 0 1 1.61.47q2.57 1.89 2.56 3.64v34.31a2.12 2.12 0 0 0 .61 1.75 2.47 2.47 0 0 0 1.55.47h5.86a2 2 0 0 0 1.78-.67 2.81 2.81 0 0 0 .44-1.55V17a4.19 4.19 0 0 1 1.38-2.63 3.93 3.93 0 0 1 2.8-1.48h6.19a1.87 1.87 0 0 0 2-1.41 12.22 12.22 0 0 0 .21-3zM287.72 76l-2.93-5.6a.52.52 0 0 0-.5-.3.54.54 0 0 0-.51.3l-3 5.6a.58.58 0 0 0 0 .62.62.62 0 0 0 .55.31h5.83a.62.62 0 0 0 .54-.31.56.56 0 0 0 0-.62zm5.46 6h-2.26c-.11-.19-.54-1-1.3-2.45a1.35 1.35 0 0 0-1.27-.79h-8.15a1.36 1.36 0 0 0-1.29.79L277.66 82h-2.28l7.49-14.15h2.81q.26.45 7.5 14.15zM240 69.7h-6.1a.94.94 0 0 0-1 .93V82h-1.82V70.63a.94.94 0 0 0-.93-.93h-6.08v-1.84H240v1.84zM183.28 76l-2.93-5.6a.52.52 0 0 0-.5-.3.54.54 0 0 0-.51.3l-3 5.6a.58.58 0 0 0 0 .62.61.61 0 0 0 .55.31h5.83a.62.62 0 0 0 .54-.31.58.58 0 0 0 0-.62zm5.46 6h-2.26c-.11-.19-.54-1-1.3-2.45a1.35 1.35 0 0 0-1.27-.79h-8.15a1.36 1.36 0 0 0-1.29.79L173.22 82h-2.28l7.49-14.15h2.81l7.5 14.15zm-55-7a5 5 0 0 0-1.56-3.85 5.61 5.61 0 0 0-4-1.45h-6.38a.91.91 0 0 0-.66.28 1 1 0 0 0-.27.67v8.63a.94.94 0 0 0 .27.68.9.9 0 0 0 .66.27h6.89a5 5 0 0 0 3.66-1.44 5.14 5.14 0 0 0 1.43-3.79zm1.8.06a6.46 6.46 0 0 1-2.29 5.09A7.66 7.66 0 0 1 128 82h-9V67.88h8.51a8.41 8.41 0 0 1 5.85 2 6.71 6.71 0 0 1 2.15 5.2zm9.79-63.37V5.34a2.77 2.77 0 0 0-1.08-2.19 3.48 3.48 0 0 0-2.25-.91h-32.6a3.44 3.44 0 0 0-2.32.91 2.87 2.87 0 0 0-1 2.26v6.19a3.12 3.12 0 0 0 1 2.22 3 3 0 0 0 2.19 1h6.26a3.12 3.12 0 0 1 2.16.95 3 3 0 0 1 1 2.22v34.29a3 3 0 0 0 1 2.19 3.15 3.15 0 0 0 2.13 1h7.74a3.09 3.09 0 0 0 2.22-1 3 3 0 0 0 1-2.09V17.93a3.31 3.31 0 0 1 3.17-3.1h6.2a3 3 0 0 0 2.22-1 3.09 3.09 0 0 0 1-2.16z" data-name="图层 1"/></g></svg>
\ No newline at end of file
#!/bin/sh
if [ ! -d /var/lib/taos ]; then
mkdir -p /var/lib/taos
fi
if [ ! -d /var/log/taos ]; then
mkdir -p -m777 /var/log/taos
fi
if [ ! -d /etc/taos ]; then
mkdir -p /etc/taos
fi
if [ ! -f /etc/taos/taos.cfg ]; then
cp $SNAP/etc/taos/taos.cfg /etc/taos/taos.cfg
fi
#!/bin/sh
# Wrapper to check for custom config in $SNAP_USER_COMMON or $SNAP_COMMON and
# use it otherwise fall back to the included basic config which will at least
# allow mosquitto to run and do something.
# This script will also copy the full example config in to SNAP_USER_COMMON or
# SNAP_COMMON so that people can refer to it.
#
# The decision about whether to use SNAP_USER_COMMON or SNAP_COMMON is taken
# based on the user that runs the command. If the user is root, it is assumed
# that mosquitto is being run as a system daemon, and SNAP_COMMON will be used.
# If a non-root user runs the command, then SNAP_USER_COMMON will be used.
case "$SNAP_USER_COMMON" in
*/root/snap/tdengine/common*) COMMON=$SNAP_COMMON ;;
*) COMMON=$SNAP_USER_COMMON ;;
esac
if [ -d /etc/taos ]; then
CONFIG_FILE="/etc/taos"
else
CONFIG_FILE="$SNAP/etc/taos"
fi
# Launch the snap
$SNAP/usr/bin/taosd -c $CONFIG_FILE $@
#!/bin/sh
# Wrapper to check for custom config in $SNAP_USER_COMMON or $SNAP_COMMON and
# use it otherwise fall back to the included basic config which will at least
# allow mosquitto to run and do something.
# This script will also copy the full example config in to SNAP_USER_COMMON or
# SNAP_COMMON so that people can refer to it.
#
# The decision about whether to use SNAP_USER_COMMON or SNAP_COMMON is taken
# based on the user that runs the command. If the user is root, it is assumed
# that mosquitto is being run as a system daemon, and SNAP_COMMON will be used.
# If a non-root user runs the command, then SNAP_USER_COMMON will be used.
case "$SNAP_USER_COMMON" in
*/root/snap/tdengine/common*) COMMON=$SNAP_COMMON ;;
*) COMMON=$SNAP_USER_COMMON ;;
esac
if [ -d /etc/taos ]; then
CONFIG_FILE="/etc/taos"
else
CONFIG_FILE="$SNAP/etc/taos"
fi
# Launch the snap
$SNAP/usr/bin/taos -c $CONFIG_FILE $@
name: tdengine
base: core18 # the base snap is the execution environment for this snap
version: '2.0.0.6' # just for humans, typically '1.2+git' or '1.3.2'
icon: snap/gui/t-dengine.svg
summary: an open-source big data platform designed and optimized for IoT.
description: |
TDengine is an open-source big data platform designed and optimized for Internet of Things (IoT), Connected Vehicles, and Industrial IoT. Besides the 10x faster time-series database, it provides caching, stream computing, message queuing and other functionalities to reduce the complexity and costs of development and operations.
grade: stable
confinement: classic
apps:
tdengine:
command: launcher.sh
daemon: simple
restart-condition: always
plugs:
- network
- network-bind
- system-observe
- systemfiles
taos:
command: taoswrapper.sh
plugs:
- network
- systemfiles
taosdemo:
command: usr/bin/taosdemo
plugs:
- network
plugs:
systemfiles:
interface: system-files
read:
- /etc/taos
- /var/lib/taos
- /tmp
write:
- /var/log/taos
- /var/lib/taos
- /tmp
parts:
script:
plugin: dump
source: snap/local/
prime:
- launcher.sh
- taoswrapper.sh
tdengine:
source: .
source-type: local
plugin: cmake
build-packages:
- gcc
- g++
- make
- cmake
override-build: |
snapcraftctl build
if [ ! -d $SNAPCRAFT_STAGE/usr ]; then
mkdir $SNAPCRAFT_STAGE/usr
fi
if [ ! -d $SNAPCRAFT_STAGE/etc/taos ]; then
mkdir -p $SNAPCRAFT_STAGE/etc/taos
fi
cp $SNAPCRAFT_PART_BUILD/build/* -rf $SNAPCRAFT_STAGE/usr/
cp $SNAPCRAFT_PART_SRC/packaging/cfg/taos.cfg -rf $SNAPCRAFT_STAGE/etc/taos/
if [ ! -d $SNAPCRAFT_STAGE/var/lib/taos ]; then
mkdir -p $SNAPCRAFT_STAGE/var/lib/taos
fi
if [ ! -d $SNAPCRAFT_STAGE/var/log/taos ]; then
mkdir -p $SNAPCRAFT_STAGE/var/log/taos
fi
prime:
- etc/taos/taos.cfg
- usr/bin/taosd
- usr/bin/taos
- usr/bin/taosdemo
- usr/lib/libtaos.so.2.0.0.6
- usr/lib/libtaos.so.1
- usr/lib/libtaos.so
override-prime: |
snapcraftctl prime
if [ ! -d $SNAPCRAFT_STAGE/var/lib/taos ]; then
cp -rf $SNAPCRAFT_STAGE/var/lib/taos $SNAPCRAFT_PRIME
fi
if [ ! -d $SNAPCRAFT_STAGE/var/log/taos ]; then
cp -rf $SNAPCRAFT_STAGE/var/log/taos $SNAPCRAFT_PRIME
fi
layout:
/var/lib/taos:
bind: $SNAP_DATA/var/lib/taos
/var/log/taos:
bind: $SNAP_DATA/var/log/taos
/etc/taos/taos.cfg:
bind-file: $SNAP_DATA/etc/taos/taos.cfg
hooks:
install:
plugs: [systemfiles]
...@@ -907,7 +907,7 @@ static void genFinalResWithoutFill(SSqlRes* pRes, SLocalReducer *pLocalReducer, ...@@ -907,7 +907,7 @@ static void genFinalResWithoutFill(SSqlRes* pRes, SLocalReducer *pLocalReducer,
savePrevRecordAndSetupFillInfo(pLocalReducer, pQueryInfo, pLocalReducer->pFillInfo); savePrevRecordAndSetupFillInfo(pLocalReducer, pQueryInfo, pLocalReducer->pFillInfo);
} }
memcpy(pRes->data, pBeforeFillData->data, pRes->numOfRows * pLocalReducer->finalRowSize); memcpy(pRes->data, pBeforeFillData->data, (size_t)(pRes->numOfRows * pLocalReducer->finalRowSize));
pRes->numOfClauseTotal += pRes->numOfRows; pRes->numOfClauseTotal += pRes->numOfRows;
pBeforeFillData->num = 0; pBeforeFillData->num = 0;
...@@ -943,7 +943,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO ...@@ -943,7 +943,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) { for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i); TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
memmove(pResPages[i]->data, pResPages[i]->data + pField->bytes * pQueryInfo->limit.offset, memmove(pResPages[i]->data, pResPages[i]->data + pField->bytes * pQueryInfo->limit.offset,
newRows * pField->bytes); (size_t)(newRows * pField->bytes));
} }
} }
...@@ -988,7 +988,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO ...@@ -988,7 +988,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) { for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i); TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
int16_t offset = getColumnModelOffset(pLocalReducer->resColModel, i); int16_t offset = getColumnModelOffset(pLocalReducer->resColModel, i);
memcpy(pRes->data + offset * pRes->numOfRows, pResPages[i]->data, pField->bytes * pRes->numOfRows); memcpy(pRes->data + offset * pRes->numOfRows, pResPages[i]->data, (size_t)(pField->bytes * pRes->numOfRows));
} }
} else { // todo bug?? } else { // todo bug??
reversedCopyFromInterpolationToDstBuf(pQueryInfo, pRes, pResPages, pLocalReducer); reversedCopyFromInterpolationToDstBuf(pQueryInfo, pRes, pResPages, pLocalReducer);
......
...@@ -904,6 +904,11 @@ static int32_t tscCheckIfCreateTable(char **sqlstr, SSqlObj *pSql) { ...@@ -904,6 +904,11 @@ static int32_t tscCheckIfCreateTable(char **sqlstr, SSqlObj *pSql) {
sToken = tStrGetToken(sql, &index, true, numOfIgnoreToken, &ignoreTokenTypes); sToken = tStrGetToken(sql, &index, true, numOfIgnoreToken, &ignoreTokenTypes);
sql += index; sql += index;
if (TK_ILLEGAL == sToken.type) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return TSDB_CODE_TSC_INVALID_SQL;
}
if (sToken.n == 0 || sToken.type == TK_RP) { if (sToken.n == 0 || sToken.type == TK_RP) {
break; break;
} }
......
...@@ -1158,8 +1158,9 @@ static int32_t handleArithmeticExpr(SSqlCmd* pCmd, int32_t clauseIndex, int32_t ...@@ -1158,8 +1158,9 @@ static int32_t handleArithmeticExpr(SSqlCmd* pCmd, int32_t clauseIndex, int32_t
int32_t ret = exprTreeFromSqlExpr(pCmd, &pNode, pItem->pNode, pQueryInfo->exprList, pQueryInfo, colList); int32_t ret = exprTreeFromSqlExpr(pCmd, &pNode, pItem->pNode, pQueryInfo->exprList, pQueryInfo, colList);
if (ret != TSDB_CODE_SUCCESS) { if (ret != TSDB_CODE_SUCCESS) {
tExprTreeDestroy(&pNode, NULL);
taosTFree(arithmeticExprStr); taosTFree(arithmeticExprStr);
taosArrayDestroy(colList);
tExprTreeDestroy(&pNode, NULL);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg2); return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg2);
} }
...@@ -1168,6 +1169,8 @@ static int32_t handleArithmeticExpr(SSqlCmd* pCmd, int32_t clauseIndex, int32_t ...@@ -1168,6 +1169,8 @@ static int32_t handleArithmeticExpr(SSqlCmd* pCmd, int32_t clauseIndex, int32_t
SColIndex* pIndex = taosArrayGet(colList, k); SColIndex* pIndex = taosArrayGet(colList, k);
if (pIndex->flag == 1) { if (pIndex->flag == 1) {
taosTFree(arithmeticExprStr); taosTFree(arithmeticExprStr);
taosArrayDestroy(colList);
tExprTreeDestroy(&pNode, NULL);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3); return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
} }
} }
...@@ -1649,10 +1652,12 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col ...@@ -1649,10 +1652,12 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
} }
SColumnIndex index = COLUMN_INDEX_INITIALIZER; SColumnIndex index = COLUMN_INDEX_INITIALIZER;
if ((getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) || if ((getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS)) {
index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3); return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
} }
if (index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg6);
}
// 2. check if sql function can be applied on this column data type // 2. check if sql function can be applied on this column data type
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex); pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
...@@ -1861,7 +1866,10 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col ...@@ -1861,7 +1866,10 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
if (getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) { if (getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3); return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
} }
if (index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg6);
}
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex); pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
SSchema* pSchema = tscGetTableSchema(pTableMetaInfo->pTableMeta); SSchema* pSchema = tscGetTableSchema(pTableMetaInfo->pTableMeta);
......
...@@ -43,6 +43,14 @@ void tscUpdateSubscriptionProgress(void* sub, int64_t uid, TSKEY ts); ...@@ -43,6 +43,14 @@ void tscUpdateSubscriptionProgress(void* sub, int64_t uid, TSKEY ts);
void tscSaveSubscriptionProgress(void* sub); void tscSaveSubscriptionProgress(void* sub);
static int32_t minMsgSize() { return tsRpcHeadSize + 100; } static int32_t minMsgSize() { return tsRpcHeadSize + 100; }
static int32_t getWaitingTimeInterval(int32_t count) {
int32_t initial = 100; // 100 ms by default
if (count <= 1) {
return 0;
}
return initial * (2<<(count - 2));
}
static void tscSetDnodeEpSet(SSqlObj* pSql, SCMVgroupInfo* pVgroupInfo) { static void tscSetDnodeEpSet(SSqlObj* pSql, SCMVgroupInfo* pVgroupInfo) {
assert(pSql != NULL && pVgroupInfo != NULL && pVgroupInfo->numOfEps > 0); assert(pSql != NULL && pVgroupInfo != NULL && pVgroupInfo->numOfEps > 0);
...@@ -100,7 +108,7 @@ static void tscDumpEpSetFromVgroupInfo(SCMCorVgroupInfo *pVgroupInfo, SRpcEpSet ...@@ -100,7 +108,7 @@ static void tscDumpEpSetFromVgroupInfo(SCMCorVgroupInfo *pVgroupInfo, SRpcEpSet
pEpSet->inUse = (inUse >= 0 && inUse < TSDB_MAX_REPLICA) ? inUse: 0; pEpSet->inUse = (inUse >= 0 && inUse < TSDB_MAX_REPLICA) ? inUse: 0;
pEpSet->numOfEps = pVgroupInfo->numOfEps; pEpSet->numOfEps = pVgroupInfo->numOfEps;
for (int32_t i = 0; i < pVgroupInfo->numOfEps; ++i) { for (int32_t i = 0; i < pVgroupInfo->numOfEps; ++i) {
strncpy(pEpSet->fqdn[i], pVgroupInfo->epAddr[i].fqdn, TSDB_FQDN_LEN); tstrncpy(pEpSet->fqdn[i], pVgroupInfo->epAddr[i].fqdn, sizeof(pEpSet->fqdn[i]));
pEpSet->port[i] = pVgroupInfo->epAddr[i].port; pEpSet->port[i] = pVgroupInfo->epAddr[i].port;
} }
taosCorEndRead(&pVgroupInfo->version); taosCorEndRead(&pVgroupInfo->version);
...@@ -117,7 +125,7 @@ static void tscUpdateVgroupInfo(SSqlObj *pObj, SRpcEpSet *pEpSet) { ...@@ -117,7 +125,7 @@ static void tscUpdateVgroupInfo(SSqlObj *pObj, SRpcEpSet *pEpSet) {
pVgroupInfo->inUse = pEpSet->inUse; pVgroupInfo->inUse = pEpSet->inUse;
pVgroupInfo->numOfEps = pEpSet->numOfEps; pVgroupInfo->numOfEps = pEpSet->numOfEps;
for (int32_t i = 0; i < pVgroupInfo->numOfEps; i++) { for (int32_t i = 0; i < pVgroupInfo->numOfEps; i++) {
strncpy(pVgroupInfo->epAddr[i].fqdn, pEpSet->fqdn[i], TSDB_FQDN_LEN); tstrncpy(pVgroupInfo->epAddr[i].fqdn, pEpSet->fqdn[i], sizeof(pEpSet->fqdn[i]));
pVgroupInfo->epAddr[i].port = pEpSet->port[i]; pVgroupInfo->epAddr[i].port = pEpSet->port[i];
} }
tscDebug("after: EndPoint in use: %d", pVgroupInfo->inUse); tscDebug("after: EndPoint in use: %d", pVgroupInfo->inUse);
...@@ -158,6 +166,7 @@ void tscProcessHeartBeatRsp(void *param, TAOS_RES *tres, int code) { ...@@ -158,6 +166,7 @@ void tscProcessHeartBeatRsp(void *param, TAOS_RES *tres, int code) {
if (pRsp->killConnection) { if (pRsp->killConnection) {
tscKillConnection(pObj); tscKillConnection(pObj);
return;
} else { } else {
if (pRsp->queryId) tscKillQuery(pObj, htonl(pRsp->queryId)); if (pRsp->queryId) tscKillQuery(pObj, htonl(pRsp->queryId));
if (pRsp->streamId) tscKillStream(pObj, htonl(pRsp->streamId)); if (pRsp->streamId) tscKillStream(pObj, htonl(pRsp->streamId));
...@@ -274,6 +283,7 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) { ...@@ -274,6 +283,7 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) {
(rpcMsg->code == TSDB_CODE_TDB_INVALID_TABLE_ID || (rpcMsg->code == TSDB_CODE_TDB_INVALID_TABLE_ID ||
rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID || rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID ||
rpcMsg->code == TSDB_CODE_RPC_NETWORK_UNAVAIL || rpcMsg->code == TSDB_CODE_RPC_NETWORK_UNAVAIL ||
rpcMsg->code == TSDB_CODE_APP_NOT_READY ||
rpcMsg->code == TSDB_CODE_TDB_TABLE_RECONFIGURE)) { rpcMsg->code == TSDB_CODE_TDB_TABLE_RECONFIGURE)) {
tscWarn("%p it shall renew table meta, code:%s, retry:%d", pSql, tstrerror(rpcMsg->code), ++pSql->retry); tscWarn("%p it shall renew table meta, code:%s, retry:%d", pSql, tstrerror(rpcMsg->code), ++pSql->retry);
...@@ -286,6 +296,12 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) { ...@@ -286,6 +296,12 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) {
if (pSql->retry > pSql->maxRetry) { if (pSql->retry > pSql->maxRetry) {
tscError("%p max retry %d reached, give up", pSql, pSql->maxRetry); tscError("%p max retry %d reached, give up", pSql, pSql->maxRetry);
} else { } else {
// wait for a little bit moment and then retry
if (rpcMsg->code == TSDB_CODE_APP_NOT_READY || rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID) {
int32_t duration = getWaitingTimeInterval(pSql->retry);
taosMsleep(duration);
}
rpcMsg->code = tscRenewTableMeta(pSql, pTableMetaInfo->name); rpcMsg->code = tscRenewTableMeta(pSql, pTableMetaInfo->name);
// if there is an error occurring, proceed to the following error handling procedure. // if there is an error occurring, proceed to the following error handling procedure.
...@@ -707,7 +723,7 @@ int tscBuildQueryMsg(SSqlObj *pSql, SSqlInfo *pInfo) { ...@@ -707,7 +723,7 @@ int tscBuildQueryMsg(SSqlObj *pSql, SSqlInfo *pInfo) {
if (pColFilter->filterstr) { if (pColFilter->filterstr) {
pFilterMsg->len = htobe64(pColFilter->len); pFilterMsg->len = htobe64(pColFilter->len);
memcpy(pMsg, (void *)pColFilter->pz, pColFilter->len + 1); memcpy(pMsg, (void *)pColFilter->pz, (size_t)(pColFilter->len + 1));
pMsg += (pColFilter->len + 1); // append the additional filter binary info pMsg += (pColFilter->len + 1); // append the additional filter binary info
} else { } else {
pFilterMsg->lowerBndi = htobe64(pColFilter->lowerBndi); pFilterMsg->lowerBndi = htobe64(pColFilter->lowerBndi);
......
...@@ -625,7 +625,7 @@ static void tidTagRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow ...@@ -625,7 +625,7 @@ static void tidTagRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow
// keep the results in memory // keep the results in memory
if (numOfRows > 0) { if (numOfRows > 0) {
size_t validLen = pSupporter->tagSize * pRes->numOfRows; size_t validLen = (size_t)(pSupporter->tagSize * pRes->numOfRows);
size_t length = pSupporter->totalLen + validLen; size_t length = pSupporter->totalLen + validLen;
// todo handle memory error // todo handle memory error
...@@ -686,6 +686,8 @@ static void tidTagRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow ...@@ -686,6 +686,8 @@ static void tidTagRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow
freeJoinSubqueryObj(pParentSql); freeJoinSubqueryObj(pParentSql);
pParentSql->res.code = code; pParentSql->res.code = code;
tscQueueAsyncRes(pParentSql); tscQueueAsyncRes(pParentSql);
taosArrayDestroy(s1);
taosArrayDestroy(s2);
return; return;
} }
...@@ -748,7 +750,7 @@ static void tsCompRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow ...@@ -748,7 +750,7 @@ static void tsCompRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow
} }
if (numOfRows > 0) { // write the compressed timestamp to disk file if (numOfRows > 0) { // write the compressed timestamp to disk file
fwrite(pRes->data, pRes->numOfRows, 1, pSupporter->f); fwrite(pRes->data, (size_t)pRes->numOfRows, 1, pSupporter->f);
fclose(pSupporter->f); fclose(pSupporter->f);
pSupporter->f = NULL; pSupporter->f = NULL;
...@@ -1298,7 +1300,9 @@ int32_t tscHandleMasterJoinQuery(SSqlObj* pSql) { ...@@ -1298,7 +1300,9 @@ int32_t tscHandleMasterJoinQuery(SSqlObj* pSql) {
tscError("%p tableIndex:%d, failed to allocate join support object, abort further query", pSql, i); tscError("%p tableIndex:%d, failed to allocate join support object, abort further query", pSql, i);
pState->numOfRemain = i; pState->numOfRemain = i;
pSql->res.code = TSDB_CODE_TSC_OUT_OF_MEMORY; pSql->res.code = TSDB_CODE_TSC_OUT_OF_MEMORY;
if (0 == i) {
taosTFree(pState);
}
return pSql->res.code; return pSql->res.code;
} }
...@@ -1306,7 +1310,9 @@ int32_t tscHandleMasterJoinQuery(SSqlObj* pSql) { ...@@ -1306,7 +1310,9 @@ int32_t tscHandleMasterJoinQuery(SSqlObj* pSql) {
if (code != TSDB_CODE_SUCCESS) { // failed to create subquery object, quit query if (code != TSDB_CODE_SUCCESS) { // failed to create subquery object, quit query
tscDestroyJoinSupporter(pSupporter); tscDestroyJoinSupporter(pSupporter);
pSql->res.code = TSDB_CODE_TSC_OUT_OF_MEMORY; pSql->res.code = TSDB_CODE_TSC_OUT_OF_MEMORY;
if (0 == i) {
taosTFree(pState);
}
break; break;
} }
} }
...@@ -2093,17 +2099,17 @@ void tscBuildResFromSubqueries(SSqlObj *pSql) { ...@@ -2093,17 +2099,17 @@ void tscBuildResFromSubqueries(SSqlObj *pSql) {
// return; // return;
// } // }
tscFetchDatablockFromSubquery(pSql); // tscFetchDatablockFromSubquery(pSql);
if (pRes->code != TSDB_CODE_SUCCESS) { // if (pRes->code != TSDB_CODE_SUCCESS) {
return; // return;
} // }
} }
if (pSql->res.code == TSDB_CODE_SUCCESS) { // if (pSql->res.code == TSDB_CODE_SUCCESS) {
(*pSql->fp)(pSql->param, pSql, pRes->numOfRows); // (*pSql->fp)(pSql->param, pSql, pRes->numOfRows);
} else { // } else {
tscQueueAsyncRes(pSql); // tscQueueAsyncRes(pSql);
} // }
} }
static void transferNcharData(SSqlObj *pSql, int32_t columnIndex, TAOS_FIELD *pField) { static void transferNcharData(SSqlObj *pSql, int32_t columnIndex, TAOS_FIELD *pField) {
......
node_modules
npm-debug.log
coverage/
.aws-config.json
awsconfig
/emails/dist
/public_gen
/tmp
vendor/phantomjs/phantomjs
docs/AWS_S3_BUCKET
docs/GIT_BRANCH
docs/VERSION
docs/GITCOMMIT
docs/changed-files
docs/changed-files
# locally required config files
public/css/*.min.css
# Editor junk
*.sublime-workspace
*.swp
.idea/
*.iml
/data/*
/bin/*
conf/custom.ini
fig.yml
profile.cov
grafana
.notouch
# Test artifacts
/dist/test/
{
"esnext": true,
"disallowImplicitTypeConversion": ["string"],
"disallowKeywords": ["with"],
"disallowMultipleLineBreaks": true,
"disallowMixedSpacesAndTabs": true,
"disallowTrailingWhitespace": true,
"requireSpacesInFunctionExpression": {
"beforeOpeningCurlyBrace": true
},
"disallowSpacesInsideArrayBrackets": true,
"disallowSpacesInsideParentheses": true,
"validateIndentation": 2
}
module.exports = function(grunt) {
require('load-grunt-tasks')(grunt);
grunt.loadNpmTasks('grunt-execute');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.initConfig({
clean: ["dist"],
copy: {
src_to_dist: {
cwd: 'src',
expand: true,
src: ['**/*', '!**/*.js', '!**/*.scss'],
dest: 'dist'
},
dashboard_to_dist: {
expand: true,
src: ['dashboard/*'],
dest: 'dist'
},
pluginDef: {
expand: true,
src: ['README.md'],
dest: 'dist'
}
},
watch: {
rebuild_all: {
files: ['src/**/*'],
tasks: ['default'],
options: {spawn: false}
}
},
babel: {
options: {
sourceMap: true,
presets: ['env'],
plugins: ['transform-object-rest-spread']
},
dist: {
files: [{
cwd: 'src',
expand: true,
src: ['**/*.js'],
dest: 'dist',
ext:'.js'
}]
},
distTestNoSystemJs: {
files: [{
cwd: 'src',
expand: true,
src: ['**/*.js'],
dest: 'dist/test',
ext:'.js'
}]
},
distTestsSpecsNoSystemJs: {
files: [{
expand: true,
cwd: 'spec',
src: ['**/*.js'],
dest: 'dist/test/spec',
ext:'.js'
}]
}
},
mochaTest: {
test: {
options: {
reporter: 'spec'
},
src: ['dist/test/spec/test-main.js', 'dist/test/spec/*_spec.js']
}
}
});
grunt.registerTask('default', ['clean', 'copy:src_to_dist', 'copy:dashboard_to_dist', 'copy:pluginDef', 'babel', 'mochaTest']);
};
此差异已折叠。
TDengine Datasource - build by Taosdata Inc. www.taosdata.com
TDengine backend server implement 2 urls:
* `/heartbeat` return 200 ok. Used for "Test connection" on the datasource config page.
* `/query` return data based on input sqls.
## Installation
To install this plugin:
Copy the data source you want to /var/lib/grafana/plugins/. Then restart grafana-server. The new data source should now be available in the data source type dropdown in the Add Data Source View.
```
cp -r <tdengine-extrach-dir>/connector/grafana/tdengine /var/lib/grafana/plugins/
sudo service grafana-server restart
```
### Query API
Example request
``` javascript
[{
"refId": "A",
"alias": "taosd-memory",
"sql": "select avg(mem_taosd) from sys.dn where ts > now-5m and ts < now interval(500a)"
},
{
"refId": "B",
"alias": "system-memory",
"sql": "select avg(mem_system) from sys.dn where ts > now-5m and ts < now interval(500a)"
}]
```
Example response
``` javascript
[{
"datapoints": [
[206.488281, 1538137825000],
[206.488281, 1538137855000],
[206.488281, 1538137885500],
[210.609375, 1538137915500],
[210.867188, 1538137945500]
],
"refId": "A",
"target": "taosd-memory"
},
{
"datapoints": [
[2910.218750, 1538137825000],
[2912.265625, 1538137855000],
[2912.437500, 1538137885500],
[2916.644531, 1538137915500],
[2917.066406, 1538137945500]
],
"refId": "B",
"target": "system-memory"
}]
```
### Heartbeat API
Example request
``` javascript
Get request /heartbeat
```
Example response
``` javascript
{
"message": "Grafana server receive a quest from you!"
}
```
### Dev setup
This plugin requires node 6.10.0
``` javascript
npm install -g yarn
yarn install
npm run build
```
### Import Dashboard
after login `http://localhost:3000 `, then you can import the tdengine demo dashboard to monitor the system metrics.
you can import the `dashboard/tdengine-grafana.json`:
![import_dashboard](dashboard/import_dashboard.png)
after finished import:
![import_dashboard](dashboard/tdengine_dashboard.png)
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 3,
"links": [],
"panels": [
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": true,
"colors": [
"#299c46",
"rgba(237, 129, 40, 0.89)",
"#d44a3a"
],
"datasource": "TDengine",
"description": "total select request per minute last hour",
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"gridPos": {
"h": 6,
"w": 12,
"x": 0,
"y": 0
},
"id": 8,
"interval": null,
"links": [],
"mappingType": 1,
"mappingTypes": [
{
"name": "value to text",
"value": 1
},
{
"name": "range to text",
"value": 2
}
],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"options": {},
"postfix": "次数/min",
"postfixFontSize": "20%",
"prefix": "",
"prefixFontSize": "50%",
"rangeMaps": [
{
"from": "null",
"text": "N/A",
"to": "null"
}
],
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": true,
"lineColor": "rgb(31, 120, 193)",
"show": true,
"ymax": null,
"ymin": null
},
"tableColumn": "",
"targets": [
{
"alias": "req_select",
"refId": "A",
"sql": "select sum(req_select) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": "120,240",
"timeFrom": null,
"timeShift": null,
"title": "req select",
"type": "singlestat",
"valueFontSize": "150%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "total"
},
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": true,
"colors": [
"#299c46",
"rgba(237, 129, 40, 0.89)",
"#d44a3a"
],
"datasource": "TDengine",
"description": "total insert request per minute for last hour",
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"gridPos": {
"h": 6,
"w": 12,
"x": 12,
"y": 0
},
"id": 6,
"interval": null,
"links": [],
"mappingType": 1,
"mappingTypes": [
{
"name": "value to text",
"value": 1
},
{
"name": "range to text",
"value": 2
}
],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"options": {},
"postfix": "次数/min",
"postfixFontSize": "20%",
"prefix": "",
"prefixFontSize": "50%",
"rangeMaps": [
{
"from": "null",
"text": "N/A",
"to": "null"
}
],
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": true,
"ymax": null,
"ymin": null
},
"tableColumn": "",
"targets": [
{
"alias": "req_insert",
"refId": "A",
"sql": "select sum(req_insert) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": "110,240",
"timeFrom": null,
"timeShift": null,
"title": "req insert",
"type": "singlestat",
"valueFontSize": "150%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "total"
},
{
"datasource": "TDengine",
"description": "taosd max memery last 10 minutes",
"gridPos": {
"h": 6,
"w": 8,
"x": 0,
"y": 6
},
"id": 12,
"options": {
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 4096,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
},
{
"color": "#EAB839",
"value": 2048
}
],
"unit": "decmbytes"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "mem_taosd",
"refId": "A",
"sql": "select max(mem_taosd) from log.dn where ts >= now -10m and ts < now",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "taosd memery",
"type": "gauge"
},
{
"datasource": "TDengine",
"description": "max System Memory last 1 hour",
"gridPos": {
"h": 6,
"w": 8,
"x": 8,
"y": 6
},
"id": 10,
"options": {
"fieldOptions": {
"calcs": [
"last"
],
"defaults": {
"mappings": [],
"max": 4,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "semi-dark-orange",
"value": 60
},
{
"color": "dark-red",
"value": 80
}
],
"title": "",
"unit": "decmbytes"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "mem_system",
"refId": "A",
"sql": "select max(mem_system) from log.dn where ts >= now -10h and ts < now",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "system memory",
"type": "gauge"
},
{
"datasource": "TDengine",
"description": "avg band speed last one minute",
"gridPos": {
"h": 6,
"w": 8,
"x": 16,
"y": 6
},
"id": 14,
"options": {
"fieldOptions": {
"calcs": [
"last"
],
"defaults": {
"mappings": [],
"max": 8192,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 4916
},
{
"color": "red",
"value": 6554
}
],
"unit": "Kbits"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "band_speed",
"refId": "A",
"sql": "select avg(band_speed) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "band speed",
"type": "gauge"
},
{
"aliasColors": {},
"bars": false,
"cacheTimeout": null,
"dashLength": 10,
"dashes": false,
"datasource": "TDengine",
"description": "monitor system cpu",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 11,
"w": 12,
"x": 0,
"y": 12
},
"hideTimeOverride": true,
"id": 2,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pluginVersion": "6.4.3",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "cpu_system11",
"hide": false,
"refId": "A",
"sql": "select avg(cpu_system) from log.dn where ts >= now-1h and ts < now interval(1s)",
"target": "select metric",
"type": "timeserie"
},
{
"alias": "cpu_taosd",
"hide": false,
"refId": "B",
"sql": "select avg(cpu_taosd) from log.dn where ts >= now-1h and ts < now interval(1s)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": [],
"timeFrom": "1h",
"timeRegions": [],
"timeShift": "30s",
"title": "cpu_system",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"decimals": null,
"format": "percent",
"label": "使用占比",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": false
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "TDengine",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 11,
"w": 12,
"x": 12,
"y": 12
},
"id": 18,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"refId": "A",
"sql": "select avg(disk_used) disk_used from log.dn where ts >= $from and ts < $to interval(1s) group by ipaddr",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "avg_disk_used",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "decgbytes",
"label": "",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": "5s",
"schemaVersion": 20,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "TDengine",
"uid": "FE-vpe0Wk",
"version": 1
}
\ No newline at end of file
TDengine Datasource - build by Taosdata Inc. www.taosdata.com
TDengine backend server implement 2 urls:
* `/heartbeat` return 200 ok. Used for "Test connection" on the datasource config page.
* `/query` return data based on input sqls.
## Installation
To install this plugin:
Copy the data source you want to /var/lib/grafana/plugins/. Then restart grafana-server. The new data source should now be available in the data source type dropdown in the Add Data Source View.
```
cp -r <tdengine-extrach-dir>/connector/grafana/tdengine /var/lib/grafana/plugins/
sudo service grafana-server restart
```
### Query API
Example request
``` javascript
[{
"refId": "A",
"alias": "taosd-memory",
"sql": "select avg(mem_taosd) from sys.dn where ts > now-5m and ts < now interval(500a)"
},
{
"refId": "B",
"alias": "system-memory",
"sql": "select avg(mem_system) from sys.dn where ts > now-5m and ts < now interval(500a)"
}]
```
Example response
``` javascript
[{
"datapoints": [
[206.488281, 1538137825000],
[206.488281, 1538137855000],
[206.488281, 1538137885500],
[210.609375, 1538137915500],
[210.867188, 1538137945500]
],
"refId": "A",
"target": "taosd-memory"
},
{
"datapoints": [
[2910.218750, 1538137825000],
[2912.265625, 1538137855000],
[2912.437500, 1538137885500],
[2916.644531, 1538137915500],
[2917.066406, 1538137945500]
],
"refId": "B",
"target": "system-memory"
}]
```
### Heartbeat API
Example request
``` javascript
Get request /heartbeat
```
Example response
``` javascript
{
"message": "Grafana server receive a quest from you!"
}
```
### Dev setup
This plugin requires node 6.10.0
``` javascript
npm install -g yarn
yarn install
npm run build
```
### Import Dashboard
after login `http://localhost:3000 `, then you can import the tdengine demo dashboard to monitor the system metrics.
you can import the `dashboard/tdengine-grafana.json`:
![import_dashboard](dashboard/import_dashboard.png)
after finished import:
![import_dashboard](dashboard/tdengine_dashboard.png)
.generic-datasource-query-row .query-keyword {
width: 75px;
}
\ No newline at end of file
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 3,
"links": [],
"panels": [
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": true,
"colors": [
"#299c46",
"rgba(237, 129, 40, 0.89)",
"#d44a3a"
],
"datasource": "TDengine",
"description": "total select request per minute last hour",
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"gridPos": {
"h": 6,
"w": 12,
"x": 0,
"y": 0
},
"id": 8,
"interval": null,
"links": [],
"mappingType": 1,
"mappingTypes": [
{
"name": "value to text",
"value": 1
},
{
"name": "range to text",
"value": 2
}
],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"options": {},
"postfix": "次数/min",
"postfixFontSize": "20%",
"prefix": "",
"prefixFontSize": "50%",
"rangeMaps": [
{
"from": "null",
"text": "N/A",
"to": "null"
}
],
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": true,
"lineColor": "rgb(31, 120, 193)",
"show": true,
"ymax": null,
"ymin": null
},
"tableColumn": "",
"targets": [
{
"alias": "req_select",
"refId": "A",
"sql": "select sum(req_select) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": "120,240",
"timeFrom": null,
"timeShift": null,
"title": "req select",
"type": "singlestat",
"valueFontSize": "150%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "total"
},
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": true,
"colors": [
"#299c46",
"rgba(237, 129, 40, 0.89)",
"#d44a3a"
],
"datasource": "TDengine",
"description": "total insert request per minute for last hour",
"format": "none",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": false,
"thresholdLabels": false,
"thresholdMarkers": true
},
"gridPos": {
"h": 6,
"w": 12,
"x": 12,
"y": 0
},
"id": 6,
"interval": null,
"links": [],
"mappingType": 1,
"mappingTypes": [
{
"name": "value to text",
"value": 1
},
{
"name": "range to text",
"value": 2
}
],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"options": {},
"postfix": "次数/min",
"postfixFontSize": "20%",
"prefix": "",
"prefixFontSize": "50%",
"rangeMaps": [
{
"from": "null",
"text": "N/A",
"to": "null"
}
],
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": true,
"ymax": null,
"ymin": null
},
"tableColumn": "",
"targets": [
{
"alias": "req_insert",
"refId": "A",
"sql": "select sum(req_insert) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": "110,240",
"timeFrom": null,
"timeShift": null,
"title": "req insert",
"type": "singlestat",
"valueFontSize": "150%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "total"
},
{
"datasource": "TDengine",
"description": "taosd max memery last 10 minutes",
"gridPos": {
"h": 6,
"w": 8,
"x": 0,
"y": 6
},
"id": 12,
"options": {
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 4096,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
},
{
"color": "#EAB839",
"value": 2048
}
],
"unit": "decmbytes"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "mem_taosd",
"refId": "A",
"sql": "select max(mem_taosd) from log.dn where ts >= now -10m and ts < now",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "taosd memery",
"type": "gauge"
},
{
"datasource": "TDengine",
"description": "max System Memory last 1 hour",
"gridPos": {
"h": 6,
"w": 8,
"x": 8,
"y": 6
},
"id": 10,
"options": {
"fieldOptions": {
"calcs": [
"last"
],
"defaults": {
"mappings": [],
"max": 4,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "semi-dark-orange",
"value": 60
},
{
"color": "dark-red",
"value": 80
}
],
"title": "",
"unit": "decmbytes"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "mem_system",
"refId": "A",
"sql": "select max(mem_system) from log.dn where ts >= now -10h and ts < now",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "system memory",
"type": "gauge"
},
{
"datasource": "TDengine",
"description": "avg band speed last one minute",
"gridPos": {
"h": 6,
"w": 8,
"x": 16,
"y": 6
},
"id": 14,
"options": {
"fieldOptions": {
"calcs": [
"last"
],
"defaults": {
"mappings": [],
"max": 8192,
"min": 0,
"thresholds": [
{
"color": "green",
"value": null
},
{
"color": "#EAB839",
"value": 4916
},
{
"color": "red",
"value": 6554
}
],
"unit": "Kbits"
},
"override": {},
"values": false
},
"orientation": "auto",
"showThresholdLabels": true,
"showThresholdMarkers": true
},
"pluginVersion": "6.4.3",
"targets": [
{
"alias": "band_speed",
"refId": "A",
"sql": "select avg(band_speed) from log.dn where ts >= now-1h and ts < now interval(1m)",
"target": "select metric",
"type": "timeserie"
}
],
"timeFrom": null,
"timeShift": null,
"title": "band speed",
"type": "gauge"
},
{
"aliasColors": {},
"bars": false,
"cacheTimeout": null,
"dashLength": 10,
"dashes": false,
"datasource": "TDengine",
"description": "monitor system cpu",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 11,
"w": 12,
"x": 0,
"y": 12
},
"hideTimeOverride": true,
"id": 2,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pluginVersion": "6.4.3",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "cpu_system11",
"hide": false,
"refId": "A",
"sql": "select avg(cpu_system) from log.dn where ts >= now-1h and ts < now interval(1s)",
"target": "select metric",
"type": "timeserie"
},
{
"alias": "cpu_taosd",
"hide": false,
"refId": "B",
"sql": "select avg(cpu_taosd) from log.dn where ts >= now-1h and ts < now interval(1s)",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": [],
"timeFrom": "1h",
"timeRegions": [],
"timeShift": "30s",
"title": "cpu_system",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"decimals": null,
"format": "percent",
"label": "使用占比",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": false
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "TDengine",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 11,
"w": 12,
"x": 12,
"y": 12
},
"id": 18,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"refId": "A",
"sql": "select avg(disk_used) disk_used from log.dn where ts >= $from and ts < $to interval(1s) group by ipaddr",
"target": "select metric",
"type": "timeserie"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "avg_disk_used",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "decgbytes",
"label": "",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": "5s",
"schemaVersion": 20,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "TDengine",
"uid": "FE-vpe0Wk",
"version": 1
}
\ No newline at end of file
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.GenericDatasource = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _lodash = require('lodash');
var _lodash2 = _interopRequireDefault(_lodash);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var GenericDatasource = exports.GenericDatasource = function () {
function GenericDatasource(instanceSettings, $q, backendSrv, templateSrv) {
_classCallCheck(this, GenericDatasource);
this.type = instanceSettings.type;
this.url = instanceSettings.url;
this.name = instanceSettings.name;
this.q = $q;
this.backendSrv = backendSrv;
this.templateSrv = templateSrv;
this.headers = { 'Content-Type': 'application/json' };
this.headers.Authorization = this.getAuthorization(instanceSettings.jsonData);
}
_createClass(GenericDatasource, [{
key: 'query',
value: function query(options) {
var targets = this.buildQueryParameters(options);
if (targets.length <= 0) {
return this.q.when({ data: [] });
}
return this.doRequest({
url: this.url + '/grafana/query',
data: targets,
method: 'POST'
});
}
}, {
key: 'testDatasource',
value: function testDatasource() {
return this.doRequest({
url: this.url + '/grafana/heartbeat',
method: 'GET'
}).then(function (response) {
if (response.status === 200) {
return { status: "success", message: "TDengine Data source is working", title: "Success" };
}
});
}
}, {
key: 'doRequest',
value: function doRequest(options) {
options.headers = this.headers;
return this.backendSrv.datasourceRequest(options);
}
}, {
key: 'buildQueryParameters',
value: function buildQueryParameters(options) {
var _this = this;
var targets = _lodash2.default.map(options.targets, function (target) {
return {
refId: target.refId,
alias: _this.generateAlias(options, target),
sql: _this.generateSql(options, target)
};
});
return targets;
}
}, {
key: 'encode',
value: function encode(input) {
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
var output = "";
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
var i = 0;
while (i < input.length) {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = (chr1 & 3) << 4 | chr2 >> 4;
enc3 = (chr2 & 15) << 2 | chr3 >> 6;
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);
}
return output;
}
}, {
key: 'getAuthorization',
value: function getAuthorization(jsonData) {
jsonData = jsonData || {};
var defaultUser = jsonData.user || "root";
var defaultPassword = jsonData.password || "taosdata";
return "Basic " + this.encode(defaultUser + ":" + defaultPassword);
}
}, {
key: 'generateAlias',
value: function generateAlias(options, target) {
var alias = target.alias || "";
alias = this.templateSrv.replace(alias, options.scopedVars, 'csv');
return alias;
}
}, {
key: 'generateSql',
value: function generateSql(options, target) {
var sql = target.sql;
if (sql == null || sql == "") {
return sql;
}
var queryStart = "now-1h";
if (options != null && options.range != null && options.range.from != null) {
queryStart = options.range.from.toISOString();
}
var queryEnd = "now";
if (options != null && options.range != null && options.range.to != null) {
queryEnd = options.range.to.toISOString();
}
var intervalMs = options.intervalMs || "20000";
intervalMs += "a";
sql = sql.replace(/^\s+|\s+$/gm, '');
sql = sql.replace("$from", "'" + queryStart + "'");
sql = sql.replace("$begin", "'" + queryStart + "'");
sql = sql.replace("$to", "'" + queryEnd + "'");
sql = sql.replace("$end", "'" + queryEnd + "'");
sql = sql.replace("$interval", intervalMs);
sql = this.templateSrv.replace(sql, options.scopedVars, 'csv');
return sql;
}
}]);
return GenericDatasource;
}();
//# sourceMappingURL=datasource.js.map
{"version":3,"sources":["../src/datasource.js"],"names":["GenericDatasource","instanceSettings","$q","backendSrv","templateSrv","type","url","name","q","headers","Authorization","getAuthorization","jsonData","options","targets","buildQueryParameters","length","when","data","doRequest","method","then","response","status","message","title","datasourceRequest","_","map","refId","target","alias","generateAlias","sql","generateSql","input","_keyStr","output","chr1","chr2","chr3","enc1","enc2","enc3","enc4","i","charCodeAt","isNaN","charAt","defaultUser","user","defaultPassword","password","encode","replace","scopedVars","queryStart","range","from","toISOString","queryEnd","to","intervalMs"],"mappings":";;;;;;;;;AAAA;;;;;;;;IAEaA,iB,WAAAA,iB;AAEX,6BAAYC,gBAAZ,EAA8BC,EAA9B,EAAkCC,UAAlC,EAA8CC,WAA9C,EAA2D;AAAA;;AACzD,SAAKC,IAAL,GAAYJ,iBAAiBI,IAA7B;AACA,SAAKC,GAAL,GAAWL,iBAAiBK,GAA5B;AACA,SAAKC,IAAL,GAAYN,iBAAiBM,IAA7B;AACA,SAAKC,CAAL,GAASN,EAAT;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACA,SAAKC,WAAL,GAAmBA,WAAnB;AACA,SAAKK,OAAL,GAAe,EAAC,gBAAgB,kBAAjB,EAAf;AACA,SAAKA,OAAL,CAAaC,aAAb,GAA6B,KAAKC,gBAAL,CAAsBV,iBAAiBW,QAAvC,CAA7B;AACD;;;;0BAEKC,O,EAAS;AACb,UAAIC,UAAU,KAAKC,oBAAL,CAA0BF,OAA1B,CAAd;;AAEA,UAAIC,QAAQE,MAAR,IAAkB,CAAtB,EAAyB;AACvB,eAAO,KAAKR,CAAL,CAAOS,IAAP,CAAY,EAACC,MAAM,EAAP,EAAZ,CAAP;AACD;;AAED,aAAO,KAAKC,SAAL,CAAe;AACpBb,aAAK,KAAKA,GAAL,GAAW,gBADI;AAEpBY,cAAMJ,OAFc;AAGpBM,gBAAQ;AAHY,OAAf,CAAP;AAKD;;;qCAEgB;AACf,aAAO,KAAKD,SAAL,CAAe;AACpBb,aAAK,KAAKA,GAAL,GAAW,oBADI;AAEpBc,gBAAQ;AAFY,OAAf,EAGJC,IAHI,CAGC,oBAAY;AAClB,YAAIC,SAASC,MAAT,KAAoB,GAAxB,EAA6B;AAC3B,iBAAO,EAAEA,QAAQ,SAAV,EAAqBC,SAAS,iCAA9B,EAAiEC,OAAO,SAAxE,EAAP;AACD;AACF,OAPM,CAAP;AAQD;;;8BAESZ,O,EAAS;AACjBA,cAAQJ,OAAR,GAAkB,KAAKA,OAAvB;;AAEA,aAAO,KAAKN,UAAL,CAAgBuB,iBAAhB,CAAkCb,OAAlC,CAAP;AACD;;;yCAEoBA,O,EAAS;AAAA;;AAE5B,UAAIC,UAAUa,iBAAEC,GAAF,CAAMf,QAAQC,OAAd,EAAuB,kBAAU;AAC7C,eAAO;AACLe,iBAAOC,OAAOD,KADT;AAELE,iBAAO,MAAKC,aAAL,CAAmBnB,OAAnB,EAA4BiB,MAA5B,CAFF;AAGLG,eAAK,MAAKC,WAAL,CAAiBrB,OAAjB,EAA0BiB,MAA1B;AAHA,SAAP;AAKD,OANa,CAAd;;AAQA,aAAOhB,OAAP;AACD;;;2BAEMqB,K,EAAO;AACZ,UAAIC,UAAU,mEAAd;AACA,UAAIC,SAAS,EAAb;AACA,UAAIC,IAAJ,EAAUC,IAAV,EAAgBC,IAAhB,EAAsBC,IAAtB,EAA4BC,IAA5B,EAAkCC,IAAlC,EAAwCC,IAAxC;AACA,UAAIC,IAAI,CAAR;AACA,aAAOA,IAAIV,MAAMnB,MAAjB,EAAyB;AACvBsB,eAAOH,MAAMW,UAAN,CAAiBD,GAAjB,CAAP;AACAN,eAAOJ,MAAMW,UAAN,CAAiBD,GAAjB,CAAP;AACAL,eAAOL,MAAMW,UAAN,CAAiBD,GAAjB,CAAP;AACAJ,eAAOH,QAAQ,CAAf;AACAI,eAAQ,CAACJ,OAAO,CAAR,KAAc,CAAf,GAAqBC,QAAQ,CAApC;AACAI,eAAQ,CAACJ,OAAO,EAAR,KAAe,CAAhB,GAAsBC,QAAQ,CAArC;AACAI,eAAOJ,OAAO,EAAd;AACA,YAAIO,MAAMR,IAAN,CAAJ,EAAiB;AACfI,iBAAOC,OAAO,EAAd;AACD,SAFD,MAEO,IAAIG,MAAMP,IAAN,CAAJ,EAAiB;AACtBI,iBAAO,EAAP;AACD;AACDP,iBAASA,SAASD,QAAQY,MAAR,CAAeP,IAAf,CAAT,GAAgCL,QAAQY,MAAR,CAAeN,IAAf,CAAhC,GAAuDN,QAAQY,MAAR,CAAeL,IAAf,CAAvD,GAA8EP,QAAQY,MAAR,CAAeJ,IAAf,CAAvF;AACD;;AAED,aAAOP,MAAP;AACD;;;qCAEgBzB,Q,EAAS;AACxBA,iBAAWA,YAAY,EAAvB;AACA,UAAIqC,cAAcrC,SAASsC,IAAT,IAAiB,MAAnC;AACA,UAAIC,kBAAkBvC,SAASwC,QAAT,IAAqB,UAA3C;;AAEA,aAAO,WAAW,KAAKC,MAAL,CAAYJ,cAAc,GAAd,GAAoBE,eAAhC,CAAlB;AACD;;;kCAEatC,O,EAASiB,M,EAAO;AAC5B,UAAIC,QAAQD,OAAOC,KAAP,IAAgB,EAA5B;AACAA,cAAQ,KAAK3B,WAAL,CAAiBkD,OAAjB,CAAyBvB,KAAzB,EAAgClB,QAAQ0C,UAAxC,EAAoD,KAApD,CAAR;AACA,aAAOxB,KAAP;AACD;;;gCAEWlB,O,EAASiB,M,EAAQ;AAC3B,UAAIG,MAAMH,OAAOG,GAAjB;AACA,UAAIA,OAAO,IAAP,IAAeA,OAAO,EAA1B,EAA6B;AAC3B,eAAOA,GAAP;AACD;;AAED,UAAIuB,aAAa,QAAjB;AACA,UAAI3C,WAAW,IAAX,IAAmBA,QAAQ4C,KAAR,IAAiB,IAApC,IAA4C5C,QAAQ4C,KAAR,CAAcC,IAAd,IAAsB,IAAtE,EAA2E;AACzEF,qBAAa3C,QAAQ4C,KAAR,CAAcC,IAAd,CAAmBC,WAAnB,EAAb;AACD;;AAED,UAAIC,WAAW,KAAf;AACA,UAAI/C,WAAW,IAAX,IAAmBA,QAAQ4C,KAAR,IAAiB,IAApC,IAA4C5C,QAAQ4C,KAAR,CAAcI,EAAd,IAAoB,IAApE,EAAyE;AACvED,mBAAW/C,QAAQ4C,KAAR,CAAcI,EAAd,CAAiBF,WAAjB,EAAX;AACD;AACD,UAAIG,aAAajD,QAAQiD,UAAR,IAAsB,OAAvC;;AAEAA,oBAAc,GAAd;AACA7B,YAAMA,IAAIqB,OAAJ,CAAY,aAAZ,EAA2B,EAA3B,CAAN;AACArB,YAAMA,IAAIqB,OAAJ,CAAY,OAAZ,EAAqB,MAAME,UAAN,GAAmB,GAAxC,CAAN;AACAvB,YAAMA,IAAIqB,OAAJ,CAAY,QAAZ,EAAsB,MAAME,UAAN,GAAmB,GAAzC,CAAN;AACAvB,YAAMA,IAAIqB,OAAJ,CAAY,KAAZ,EAAmB,MAAMM,QAAN,GAAiB,GAApC,CAAN;AACA3B,YAAMA,IAAIqB,OAAJ,CAAY,MAAZ,EAAoB,MAAMM,QAAN,GAAiB,GAArC,CAAN;AACA3B,YAAMA,IAAIqB,OAAJ,CAAY,WAAZ,EAAyBQ,UAAzB,CAAN;;AAEA7B,YAAM,KAAK7B,WAAL,CAAiBkD,OAAjB,CAAyBrB,GAAzB,EAA8BpB,QAAQ0C,UAAtC,EAAkD,KAAlD,CAAN;AACA,aAAOtB,GAAP;AACD","file":"datasource.js","sourcesContent":["import _ from \"lodash\";\n\nexport class GenericDatasource {\n\n constructor(instanceSettings, $q, backendSrv, templateSrv) {\n this.type = instanceSettings.type;\n this.url = instanceSettings.url;\n this.name = instanceSettings.name;\n this.q = $q;\n this.backendSrv = backendSrv;\n this.templateSrv = templateSrv;\n this.headers = {'Content-Type': 'application/json'};\n this.headers.Authorization = this.getAuthorization(instanceSettings.jsonData);\n }\n\n query(options) {\n var targets = this.buildQueryParameters(options);\n\n if (targets.length <= 0) {\n return this.q.when({data: []});\n }\n\n return this.doRequest({\n url: this.url + '/grafana/query',\n data: targets,\n method: 'POST'\n });\n }\n\n testDatasource() {\n return this.doRequest({\n url: this.url + '/grafana/heartbeat',\n method: 'GET',\n }).then(response => {\n if (response.status === 200) {\n return { status: \"success\", message: \"TDengine Data source is working\", title: \"Success\" };\n }\n });\n }\n\n doRequest(options) {\n options.headers = this.headers;\n\n return this.backendSrv.datasourceRequest(options);\n }\n\n buildQueryParameters(options) {\n\n var targets = _.map(options.targets, target => {\n return {\n refId: target.refId,\n alias: this.generateAlias(options, target),\n sql: this.generateSql(options, target)\n };\n });\n\n return targets;\n }\n\n encode(input) {\n var _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\";\n var output = \"\";\n var chr1, chr2, chr3, enc1, enc2, enc3, enc4;\n var i = 0;\n while (i < input.length) {\n chr1 = input.charCodeAt(i++);\n chr2 = input.charCodeAt(i++);\n chr3 = input.charCodeAt(i++);\n enc1 = chr1 >> 2;\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);\n enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);\n enc4 = chr3 & 63;\n if (isNaN(chr2)) {\n enc3 = enc4 = 64;\n } else if (isNaN(chr3)) {\n enc4 = 64;\n }\n output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);\n }\n\n return output;\n }\n\n getAuthorization(jsonData){\n jsonData = jsonData || {};\n var defaultUser = jsonData.user || \"root\";\n var defaultPassword = jsonData.password || \"taosdata\";\n\n return \"Basic \" + this.encode(defaultUser + \":\" + defaultPassword);\n }\n\n generateAlias(options, target){\n var alias = target.alias || \"\";\n alias = this.templateSrv.replace(alias, options.scopedVars, 'csv');\n return alias;\n }\n\n generateSql(options, target) {\n var sql = target.sql;\n if (sql == null || sql == \"\"){\n return sql;\n }\n\n var queryStart = \"now-1h\";\n if (options != null && options.range != null && options.range.from != null){\n queryStart = options.range.from.toISOString();\n }\n\n var queryEnd = \"now\";\n if (options != null && options.range != null && options.range.to != null){\n queryEnd = options.range.to.toISOString();\n }\n var intervalMs = options.intervalMs || \"20000\";\n\n intervalMs += \"a\";\n sql = sql.replace(/^\\s+|\\s+$/gm, '');\n sql = sql.replace(\"$from\", \"'\" + queryStart + \"'\");\n sql = sql.replace(\"$begin\", \"'\" + queryStart + \"'\");\n sql = sql.replace(\"$to\", \"'\" + queryEnd + \"'\");\n sql = sql.replace(\"$end\", \"'\" + queryEnd + \"'\");\n sql = sql.replace(\"$interval\", intervalMs);\n\n sql = this.templateSrv.replace(sql, options.scopedVars, 'csv');\n return sql;\n }\n\n}"]}
\ No newline at end of file
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.AnnotationsQueryCtrl = exports.QueryOptionsCtrl = exports.ConfigCtrl = exports.QueryCtrl = exports.Datasource = undefined;
var _datasource = require('./datasource');
var _query_ctrl = require('./query_ctrl');
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var GenericConfigCtrl = function GenericConfigCtrl() {
_classCallCheck(this, GenericConfigCtrl);
};
GenericConfigCtrl.templateUrl = 'partials/config.html';
var GenericQueryOptionsCtrl = function GenericQueryOptionsCtrl() {
_classCallCheck(this, GenericQueryOptionsCtrl);
};
GenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';
var GenericAnnotationsQueryCtrl = function GenericAnnotationsQueryCtrl() {
_classCallCheck(this, GenericAnnotationsQueryCtrl);
};
GenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html';
exports.Datasource = _datasource.GenericDatasource;
exports.QueryCtrl = _query_ctrl.GenericDatasourceQueryCtrl;
exports.ConfigCtrl = GenericConfigCtrl;
exports.QueryOptionsCtrl = GenericQueryOptionsCtrl;
exports.AnnotationsQueryCtrl = GenericAnnotationsQueryCtrl;
//# sourceMappingURL=module.js.map
{"version":3,"sources":["../src/module.js"],"names":["GenericConfigCtrl","templateUrl","GenericQueryOptionsCtrl","GenericAnnotationsQueryCtrl","Datasource","GenericDatasource","QueryCtrl","GenericDatasourceQueryCtrl","ConfigCtrl","QueryOptionsCtrl","AnnotationsQueryCtrl"],"mappings":";;;;;;;AAAA;;AACA;;;;IAEMA,iB;;;;AACNA,kBAAkBC,WAAlB,GAAgC,sBAAhC;;IAEMC,uB;;;;AACNA,wBAAwBD,WAAxB,GAAsC,6BAAtC;;IAEME,2B;;;;AACNA,4BAA4BF,WAA5B,GAA0C,kCAA1C;;QAGuBG,U,GAArBC,6B;QAC8BC,S,GAA9BC,sC;QACqBC,U,GAArBR,iB;QAC2BS,gB,GAA3BP,uB;QAC+BQ,oB,GAA/BP,2B","file":"module.js","sourcesContent":["import {GenericDatasource} from './datasource';\nimport {GenericDatasourceQueryCtrl} from './query_ctrl';\n\nclass GenericConfigCtrl {}\nGenericConfigCtrl.templateUrl = 'partials/config.html';\n\nclass GenericQueryOptionsCtrl {}\nGenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';\n\nclass GenericAnnotationsQueryCtrl {}\nGenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html'\n\nexport {\n GenericDatasource as Datasource,\n GenericDatasourceQueryCtrl as QueryCtrl,\n GenericConfigCtrl as ConfigCtrl,\n GenericQueryOptionsCtrl as QueryOptionsCtrl,\n GenericAnnotationsQueryCtrl as AnnotationsQueryCtrl\n};\n"]}
\ No newline at end of file
<h3 class="page-heading">TDengine Connection</h3>
<div class="gf-form-group">
<div class="gf-form max-width-30">
<span class="gf-form-label width-7">Host</span>
<input type="text" class="gf-form-input" ng-model='ctrl.current.url' placeholder="http://localhost:6041" bs-typeahead="{{['http://localhost:6041']}}" required></input>
</div>
<div class="gf-form-inline">
<div class="gf-form max-width-15">
<span class="gf-form-label width-7">User</span>
<input type="text" class="gf-form-input" ng-model='ctrl.current.jsonData.user' placeholder="root"></input>
</div>
<div class="gf-form max-width-15">
<span class="gf-form-label width-7">Password</span>
<input type="password" class="gf-form-input" ng-model='ctrl.current.jsonData.password' placeholder="taosdata"></input>
</div>
</div>
</div>
\ No newline at end of file
<query-editor-row query-ctrl="ctrl" can-collapse="true" >
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<label class="gf-form-label query-keyword width-7">INPUT SQL</label>
<input type="text" class="gf-form-input" ng-model="ctrl.target.sql" spellcheck='false' placeholder="select avg(mem_system) from log.dn where ts >= $from and ts < $to interval($interval)" ng-blur="ctrl.panelCtrl.refresh()" data-mode="sql"></input>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form-inline" ng-hide="ctrl.target.resultFormat === 'table'">
<div class="gf-form max-width-30">
<label class="gf-form-label query-keyword width-7">ALIAS BY</label>
<input type="text" class="gf-form-input" ng-model="ctrl.target.alias" spellcheck='false' placeholder="Naming pattern" ng-blur="ctrl.panelCtrl.refresh()">
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.generateSQL()">
GENERATE SQL
<i class="fa fa-caret-down" ng-show="ctrl.showGenerateSQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showGenerateSQL"></i>
</label>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
SHOW HELP
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showGenerateSQL">
<pre class="gf-form-pre">{{ctrl.lastGenerateSQL}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Use any SQL that can return Resultset such as:
- [[timestamp1, value1], [timestamp2, value2], ... ]
Macros:
- $from -&gt; start timestamp of panel
- $to -&gt; stop timestamp of panel
- $interval -&gt; interval of panel
Example of SQL:
&nbsp;&nbsp;SELECT count(*)
&nbsp;&nbsp;FROM db.table
&nbsp;&nbsp;WHERE ts > $from and ts < $to
&nbsp;&nbsp;INTERVAL ($interval)
</pre>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
{
"name": "TDengine",
"id": "taosdata-tdengine-datasource",
"type": "datasource",
"partials": {
"config": "partials/config.html"
},
"metrics": true,
"annotations": false,
"info": {
"description": "grafana datasource plugin for tdengine",
"author": {
"name": "Taosdata Inc.",
"url": "https://www.taosdata.com"
},
"logos": {
"small": "img/taosdata_logo.png",
"large": "img/taosdata_logo.png"
},
"links": [
{"name": "GitHub", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"},
{"name": "AGPL 3.0", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine/LICENSE"}
],
"version": "1.0.0",
"updated": "2020-01-13"
},
"dependencies": {
"grafanaVersion": "5.2.4",
"plugins": [ ]
}
}
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.GenericDatasourceQueryCtrl = undefined;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _sdk = require('app/plugins/sdk');
require('./css/query-editor.css!');
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var GenericDatasourceQueryCtrl = exports.GenericDatasourceQueryCtrl = function (_QueryCtrl) {
_inherits(GenericDatasourceQueryCtrl, _QueryCtrl);
function GenericDatasourceQueryCtrl($scope, $injector) {
_classCallCheck(this, GenericDatasourceQueryCtrl);
var _this = _possibleConstructorReturn(this, (GenericDatasourceQueryCtrl.__proto__ || Object.getPrototypeOf(GenericDatasourceQueryCtrl)).call(this, $scope, $injector));
_this.scope = $scope;
_this.target.target = _this.target.target || 'select metric';
_this.target.type = _this.target.type || 'timeserie';
return _this;
}
_createClass(GenericDatasourceQueryCtrl, [{
key: 'onChangeInternal',
value: function onChangeInternal() {
this.panelCtrl.refresh(); // Asks the panel to refresh data.
}
}, {
key: 'generateSQL',
value: function generateSQL(query) {
this.lastGenerateSQL = this.datasource.generateSql(this.panelCtrl, this.target);
this.showGenerateSQL = !this.showGenerateSQL;
}
}]);
return GenericDatasourceQueryCtrl;
}(_sdk.QueryCtrl);
GenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';
//# sourceMappingURL=query_ctrl.js.map
{"version":3,"sources":["../src/query_ctrl.js"],"names":["GenericDatasourceQueryCtrl","$scope","$injector","scope","target","type","panelCtrl","refresh","query","lastGenerateSQL","datasource","generateSql","showGenerateSQL","QueryCtrl","templateUrl"],"mappings":";;;;;;;;;AAAA;;AACA;;;;;;;;IAEaA,0B,WAAAA,0B;;;AAEX,sCAAYC,MAAZ,EAAoBC,SAApB,EAAgC;AAAA;;AAAA,wJACxBD,MADwB,EAChBC,SADgB;;AAG9B,UAAKC,KAAL,GAAaF,MAAb;AACA,UAAKG,MAAL,CAAYA,MAAZ,GAAqB,MAAKA,MAAL,CAAYA,MAAZ,IAAsB,eAA3C;AACA,UAAKA,MAAL,CAAYC,IAAZ,GAAmB,MAAKD,MAAL,CAAYC,IAAZ,IAAoB,WAAvC;AAL8B;AAM/B;;;;uCAEkB;AACjB,WAAKC,SAAL,CAAeC,OAAf,GADiB,CACS;AAC3B;;;gCAEWC,K,EAAO;AACjB,WAAKC,eAAL,GAAuB,KAAKC,UAAL,CAAgBC,WAAhB,CAA6B,KAAKL,SAAlC,EAA6C,KAAKF,MAAlD,CAAvB;AACA,WAAKQ,eAAL,GAAuB,CAAC,KAAKA,eAA7B;AACD;;;;EAjB6CC,c;;AAqBhDb,2BAA2Bc,WAA3B,GAAyC,4BAAzC","file":"query_ctrl.js","sourcesContent":["import {QueryCtrl} from 'app/plugins/sdk';\nimport './css/query-editor.css!'\n\nexport class GenericDatasourceQueryCtrl extends QueryCtrl {\n\n constructor($scope, $injector) {\n super($scope, $injector);\n\n this.scope = $scope;\n this.target.target = this.target.target || 'select metric';\n this.target.type = this.target.type || 'timeserie';\n }\n\n onChangeInternal() {\n this.panelCtrl.refresh(); // Asks the panel to refresh data.\n }\n\n generateSQL(query) {\n this.lastGenerateSQL = this.datasource.generateSql( this.panelCtrl, this.target);\n this.showGenerateSQL = !this.showGenerateSQL;\n }\n\n}\n\nGenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';"]}
\ No newline at end of file
{
"name": "TDengine",
"private": false,
"version": "2.0.0",
"description": "grafana datasource plugin for tdengine",
"scripts": {
"build": "./node_modules/grunt-cli/bin/grunt",
"test": "./node_modules/grunt-cli/bin/grunt mochaTest"
},
"repository": {
"type": "git",
"url": "git+https://github.com/taosdata/TDengine.git"
},
"author": "https://www.taosdata.com",
"license": "AGPL 3.0",
"bugs": {
"url": "https://github.com/taosdata/TDengine/issues"
},
"engineStrict": true,
"devDependencies": {
"babel": "^6.23.0",
"babel-plugin-transform-object-rest-spread": "^6.26.0",
"babel-preset-env": "^1.7.0",
"chai": "~3.5.0",
"grunt": "^1.0.4",
"grunt-babel": "~6.0.0",
"grunt-cli": "^1.2.0",
"grunt-contrib-clean": "^1.1.0",
"grunt-contrib-copy": "^1.0.0",
"grunt-contrib-uglify": "^2.3.0",
"grunt-contrib-watch": "^1.0.0",
"grunt-mocha-test": "^0.13.2",
"grunt-systemjs-builder": "^1.0.0",
"jsdom": "~9.12.0",
"load-grunt-tasks": "^3.5.2",
"mocha": "^6.2.2",
"prunk": "^1.3.0",
"q": "^1.5.0"
},
"dependencies": {
"lodash": "^4.17.19",
"yarn": "^1.22.0"
},
"homepage": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"
}
import {Datasource} from "../module";
import Q from "q";
describe('GenericDatasource', function() {
var ctx = {};
beforeEach(function() {
ctx.$q = Q;
ctx.backendSrv = {};
ctx.templateSrv = {};
ctx.ds = new Datasource({}, ctx.$q, ctx.backendSrv, ctx.templateSrv);
});
it('should return an empty array when no targets are set', function(done) {
ctx.ds.query({targets: []}).then(function(result) {
expect(result.data).to.have.length(0);
done();
});
});
});
import prunk from 'prunk';
import {jsdom} from 'jsdom';
import chai from 'chai';
// Mock Grafana modules that are not available outside of the core project
// Required for loading module.js
prunk.mock('./css/query-editor.css!', 'no css, dude.');
prunk.mock('app/plugins/sdk', {
QueryCtrl: null
});
// Setup jsdom
// Required for loading angularjs
global.document = jsdom('<html><head><script></script></head><body></body></html>');
global.window = global.document.parentWindow;
// Setup Chai
chai.should();
global.assert = chai.assert;
global.expect = chai.expect;
.generic-datasource-query-row .query-keyword {
width: 75px;
}
\ No newline at end of file
import _ from "lodash";
export class GenericDatasource {
constructor(instanceSettings, $q, backendSrv, templateSrv) {
this.type = instanceSettings.type;
this.url = instanceSettings.url;
this.name = instanceSettings.name;
this.q = $q;
this.backendSrv = backendSrv;
this.templateSrv = templateSrv;
this.headers = {'Content-Type': 'application/json'};
this.headers.Authorization = this.getAuthorization(instanceSettings.jsonData);
}
query(options) {
var targets = this.buildQueryParameters(options);
if (targets.length <= 0) {
return this.q.when({data: []});
}
return this.doRequest({
url: this.url + '/grafana/query',
data: targets,
method: 'POST'
});
}
testDatasource() {
return this.doRequest({
url: this.url + '/grafana/heartbeat',
method: 'GET',
}).then(response => {
if (response.status === 200) {
return { status: "success", message: "TDengine Data source is working", title: "Success" };
}
});
}
doRequest(options) {
options.headers = this.headers;
return this.backendSrv.datasourceRequest(options);
}
buildQueryParameters(options) {
var targets = _.map(options.targets, target => {
return {
refId: target.refId,
alias: this.generateAlias(options, target),
sql: this.generateSql(options, target)
};
});
return targets;
}
encode(input) {
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
var output = "";
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
var i = 0;
while (i < input.length) {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);
}
return output;
}
getAuthorization(jsonData){
jsonData = jsonData || {};
var defaultUser = jsonData.user || "root";
var defaultPassword = jsonData.password || "taosdata";
return "Basic " + this.encode(defaultUser + ":" + defaultPassword);
}
generateAlias(options, target){
var alias = target.alias || "";
alias = this.templateSrv.replace(alias, options.scopedVars, 'csv');
return alias;
}
generateSql(options, target) {
var sql = target.sql;
if (sql == null || sql == ""){
return sql;
}
var queryStart = "now-1h";
if (options != null && options.range != null && options.range.from != null){
queryStart = options.range.from.toISOString();
}
var queryEnd = "now";
if (options != null && options.range != null && options.range.to != null){
queryEnd = options.range.to.toISOString();
}
var intervalMs = options.intervalMs || "20000";
intervalMs += "a";
sql = sql.replace(/^\s+|\s+$/gm, '');
sql = sql.replace("$from", "'" + queryStart + "'");
sql = sql.replace("$begin", "'" + queryStart + "'");
sql = sql.replace("$to", "'" + queryEnd + "'");
sql = sql.replace("$end", "'" + queryEnd + "'");
sql = sql.replace("$interval", intervalMs);
sql = this.templateSrv.replace(sql, options.scopedVars, 'csv');
return sql;
}
}
\ No newline at end of file
import {GenericDatasource} from './datasource';
import {GenericDatasourceQueryCtrl} from './query_ctrl';
class GenericConfigCtrl {}
GenericConfigCtrl.templateUrl = 'partials/config.html';
class GenericQueryOptionsCtrl {}
GenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';
class GenericAnnotationsQueryCtrl {}
GenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html'
export {
GenericDatasource as Datasource,
GenericDatasourceQueryCtrl as QueryCtrl,
GenericConfigCtrl as ConfigCtrl,
GenericQueryOptionsCtrl as QueryOptionsCtrl,
GenericAnnotationsQueryCtrl as AnnotationsQueryCtrl
};
<h3 class="page-heading">TDengine Connection</h3>
<div class="gf-form-group">
<div class="gf-form max-width-30">
<span class="gf-form-label width-7">Host</span>
<input type="text" class="gf-form-input" ng-model='ctrl.current.url' placeholder="http://localhost:6041" bs-typeahead="{{['http://localhost:6041']}}" required></input>
</div>
<div class="gf-form-inline">
<div class="gf-form max-width-15">
<span class="gf-form-label width-7">User</span>
<input type="text" class="gf-form-input" ng-model='ctrl.current.jsonData.user' placeholder="root"></input>
</div>
<div class="gf-form max-width-15">
<span class="gf-form-label width-7">Password</span>
<input type="password" class="gf-form-input" ng-model='ctrl.current.jsonData.password' placeholder="taosdata"></input>
</div>
</div>
</div>
\ No newline at end of file
<query-editor-row query-ctrl="ctrl" can-collapse="true" >
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<label class="gf-form-label query-keyword width-7">INPUT SQL</label>
<input type="text" class="gf-form-input" ng-model="ctrl.target.sql" spellcheck='false' placeholder="select avg(mem_system) from log.dn where ts >= $from and ts < $to interval($interval)" ng-blur="ctrl.panelCtrl.refresh()" data-mode="sql"></input>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form-inline" ng-hide="ctrl.target.resultFormat === 'table'">
<div class="gf-form max-width-30">
<label class="gf-form-label query-keyword width-7">ALIAS BY</label>
<input type="text" class="gf-form-input" ng-model="ctrl.target.alias" spellcheck='false' placeholder="Naming pattern" ng-blur="ctrl.panelCtrl.refresh()">
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.generateSQL()">
GENERATE SQL
<i class="fa fa-caret-down" ng-show="ctrl.showGenerateSQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showGenerateSQL"></i>
</label>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
SHOW HELP
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showGenerateSQL">
<pre class="gf-form-pre">{{ctrl.lastGenerateSQL}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Use any SQL that can return Resultset such as:
- [[timestamp1, value1], [timestamp2, value2], ... ]
Macros:
- $from -&gt; start timestamp of panel
- $to -&gt; stop timestamp of panel
- $interval -&gt; interval of panel
Example of SQL:
&nbsp;&nbsp;SELECT count(*)
&nbsp;&nbsp;FROM db.table
&nbsp;&nbsp;WHERE ts > $from and ts < $to
&nbsp;&nbsp;INTERVAL ($interval)
</pre>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
{
"name": "TDengine",
"id": "taosdata-tdengine-datasource",
"type": "datasource",
"partials": {
"config": "partials/config.html"
},
"metrics": true,
"annotations": false,
"info": {
"description": "grafana datasource plugin for tdengine",
"author": {
"name": "Taosdata Inc.",
"url": "https://www.taosdata.com"
},
"logos": {
"small": "img/taosdata_logo.png",
"large": "img/taosdata_logo.png"
},
"links": [
{"name": "GitHub", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"},
{"name": "AGPL 3.0", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine/LICENSE"}
],
"version": "1.0.0",
"updated": "2020-01-13"
},
"dependencies": {
"grafanaVersion": "5.2.4",
"plugins": [ ]
}
}
import {QueryCtrl} from 'app/plugins/sdk';
import './css/query-editor.css!'
export class GenericDatasourceQueryCtrl extends QueryCtrl {
constructor($scope, $injector) {
super($scope, $injector);
this.scope = $scope;
this.target.target = this.target.target || 'select metric';
this.target.type = this.target.type || 'timeserie';
}
onChangeInternal() {
this.panelCtrl.refresh(); // Asks the panel to refresh data.
}
generateSQL(query) {
this.lastGenerateSQL = this.datasource.generateSql( this.panelCtrl, this.target);
this.showGenerateSQL = !this.showGenerateSQL;
}
}
GenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';
\ No newline at end of file
此差异已折叠。
Subproject commit d598db167eb256fe67409b7bb3d0eb7fffc3ff8c
...@@ -165,6 +165,13 @@ int32_t dnodeInitMgmtTimer() { ...@@ -165,6 +165,13 @@ int32_t dnodeInitMgmtTimer() {
return TSDB_CODE_SUCCESS; return TSDB_CODE_SUCCESS;
} }
void dnodeSendStatusMsgToMnode() {
if (tsDnodeTmr != NULL && tsStatusTimer != NULL) {
dInfo("force send status msg to mnode");
taosTmrReset(dnodeSendStatusMsg, 3, NULL, tsDnodeTmr, &tsStatusTimer);
}
}
void dnodeCleanupMgmtTimer() { void dnodeCleanupMgmtTimer() {
if (tsStatusTimer != NULL) { if (tsStatusTimer != NULL) {
taosTmrStopA(&tsStatusTimer); taosTmrStopA(&tsStatusTimer);
......
...@@ -93,7 +93,7 @@ static void dnodeProcessReqMsgFromDnode(SRpcMsg *pMsg, SRpcEpSet *pEpSet) { ...@@ -93,7 +93,7 @@ static void dnodeProcessReqMsgFromDnode(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
if (pMsg->pCont == NULL) return; if (pMsg->pCont == NULL) return;
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) { if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
rspMsg.code = TSDB_CODE_RPC_NOT_READY; rspMsg.code = TSDB_CODE_APP_NOT_READY;
rpcSendResponse(&rspMsg); rpcSendResponse(&rspMsg);
rpcFreeCont(pMsg->pCont); rpcFreeCont(pMsg->pCont);
dDebug("RPC %p, msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]); dDebug("RPC %p, msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
......
...@@ -119,7 +119,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) { ...@@ -119,7 +119,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) { if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
dError("RPC %p, shell msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]); dError("RPC %p, shell msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
rpcMsg.code = TSDB_CODE_RPC_NOT_READY; rpcMsg.code = TSDB_CODE_APP_NOT_READY;
rpcSendResponse(&rpcMsg); rpcSendResponse(&rpcMsg);
rpcFreeCont(pMsg->pCont); rpcFreeCont(pMsg->pCont);
return; return;
...@@ -144,7 +144,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) { ...@@ -144,7 +144,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
static int dnodeRetrieveUserAuthInfo(char *user, char *spi, char *encrypt, char *secret, char *ckey) { static int dnodeRetrieveUserAuthInfo(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
int code = mnodeRetriveAuth(user, spi, encrypt, secret, ckey); int code = mnodeRetriveAuth(user, spi, encrypt, secret, ckey);
if (code != TSDB_CODE_RPC_NOT_READY) return code; if (code != TSDB_CODE_APP_NOT_READY) return code;
SDMAuthMsg *pMsg = rpcMallocCont(sizeof(SDMAuthMsg)); SDMAuthMsg *pMsg = rpcMallocCont(sizeof(SDMAuthMsg));
tstrncpy(pMsg->user, user, sizeof(pMsg->user)); tstrncpy(pMsg->user, user, sizeof(pMsg->user));
......
...@@ -65,6 +65,8 @@ void dnodeSendRpcMnodeWriteRsp(void *pMsg, int32_t code); ...@@ -65,6 +65,8 @@ void dnodeSendRpcMnodeWriteRsp(void *pMsg, int32_t code);
void dnodeReprocessMnodeWriteMsg(void *pMsg); void dnodeReprocessMnodeWriteMsg(void *pMsg);
void dnodeDelayReprocessMnodeWriteMsg(void *pMsg); void dnodeDelayReprocessMnodeWriteMsg(void *pMsg);
void dnodeSendStatusMsgToMnode();
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
......
...@@ -65,6 +65,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_SESSION_ID, 0, 0x0010, "Invalid se ...@@ -65,6 +65,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_SESSION_ID, 0, 0x0010, "Invalid se
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_MSG_TYPE, 0, 0x0011, "Invalid message type") TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_MSG_TYPE, 0, 0x0011, "Invalid message type")
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_RESPONSE_TYPE, 0, 0x0012, "Invalid response type") TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_RESPONSE_TYPE, 0, 0x0012, "Invalid response type")
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_TIME_STAMP, 0, 0x0013, "Invalid timestamp") TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_TIME_STAMP, 0, 0x0013, "Invalid timestamp")
TAOS_DEFINE_ERROR(TSDB_CODE_APP_NOT_READY, 0, 0x0014, "Database not ready")
//common & util //common & util
TAOS_DEFINE_ERROR(TSDB_CODE_COM_OPS_NOT_SUPPORT, 0, 0x0100, "Operation not supported") TAOS_DEFINE_ERROR(TSDB_CODE_COM_OPS_NOT_SUPPORT, 0, 0x0100, "Operation not supported")
...@@ -184,7 +185,6 @@ TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_DISK_PERMISSIONS, 0, 0x0506, "No write p ...@@ -184,7 +185,6 @@ TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_DISK_PERMISSIONS, 0, 0x0506, "No write p
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_SUCH_FILE_OR_DIR, 0, 0x0507, "Missing data file") TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_SUCH_FILE_OR_DIR, 0, 0x0507, "Missing data file")
TAOS_DEFINE_ERROR(TSDB_CODE_VND_OUT_OF_MEMORY, 0, 0x0508, "Out of memory") TAOS_DEFINE_ERROR(TSDB_CODE_VND_OUT_OF_MEMORY, 0, 0x0508, "Out of memory")
TAOS_DEFINE_ERROR(TSDB_CODE_VND_APP_ERROR, 0, 0x0509, "Unexpected generic error in vnode") TAOS_DEFINE_ERROR(TSDB_CODE_VND_APP_ERROR, 0, 0x0509, "Unexpected generic error in vnode")
TAOS_DEFINE_ERROR(TSDB_CODE_VND_INVALID_STATUS, 0, 0x0510, "Database not ready")
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, 0, 0x0511, "Database suspended") TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, 0, 0x0511, "Database suspended")
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, 0, 0x0512, "Write operation denied") TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, 0, 0x0512, "Write operation denied")
......
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
static void * tsDbSdb = NULL; static void * tsDbSdb = NULL;
static int32_t tsDbUpdateSize; static int32_t tsDbUpdateSize;
static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMsg); static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, SMnodeMsg *pMsg);
static int32_t mnodeDropDb(SMnodeMsg *newMsg); static int32_t mnodeDropDb(SMnodeMsg *newMsg);
static int32_t mnodeSetDbDropping(SDbObj *pDb); static int32_t mnodeSetDbDropping(SDbObj *pDb);
static int32_t mnodeGetDbMeta(STableMetaMsg *pMeta, SShowObj *pShow, void *pConn); static int32_t mnodeGetDbMeta(STableMetaMsg *pMeta, SShowObj *pShow, void *pConn);
...@@ -343,7 +343,7 @@ static int32_t mnodeCreateDbCb(SMnodeMsg *pMsg, int32_t code) { ...@@ -343,7 +343,7 @@ static int32_t mnodeCreateDbCb(SMnodeMsg *pMsg, int32_t code) {
return code; return code;
} }
static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMsg) { static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, SMnodeMsg *pMsg) {
int32_t code = acctCheck(pAcct, ACCT_GRANT_DB); int32_t code = acctCheck(pAcct, ACCT_GRANT_DB);
if (code != 0) return code; if (code != 0) return code;
...@@ -393,6 +393,9 @@ static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMs ...@@ -393,6 +393,9 @@ static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMs
return code; return code;
} }
pMsg->pDb = pDb;
mnodeIncDbRef(pDb);
SSdbOper oper = { SSdbOper oper = {
.type = SDB_OPER_GLOBAL, .type = SDB_OPER_GLOBAL,
.table = tsDbSdb, .table = tsDbSdb,
...@@ -405,6 +408,7 @@ static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMs ...@@ -405,6 +408,7 @@ static int32_t mnodeCreateDb(SAcctObj *pAcct, SCMCreateDbMsg *pCreate, void *pMs
code = sdbInsertRow(&oper); code = sdbInsertRow(&oper);
if (code != TSDB_CODE_SUCCESS && code != TSDB_CODE_MND_ACTION_IN_PROGRESS) { if (code != TSDB_CODE_SUCCESS && code != TSDB_CODE_MND_ACTION_IN_PROGRESS) {
mError("db:%s, failed to create, reason:%s", pDb->name, tstrerror(code)); mError("db:%s, failed to create, reason:%s", pDb->name, tstrerror(code));
pMsg->pDb = NULL;
mnodeDestroyDb(pDb); mnodeDestroyDb(pDb);
} }
......
...@@ -581,8 +581,8 @@ void mnodeDropAllUsers(SAcctObj *pAcct) { ...@@ -581,8 +581,8 @@ void mnodeDropAllUsers(SAcctObj *pAcct) {
int32_t mnodeRetriveAuth(char *user, char *spi, char *encrypt, char *secret, char *ckey) { int32_t mnodeRetriveAuth(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
if (!sdbIsMaster()) { if (!sdbIsMaster()) {
*secret = 0; *secret = 0;
mDebug("user:%s, failed to auth user, reason:%s", user, tstrerror(TSDB_CODE_RPC_NOT_READY)); mDebug("user:%s, failed to auth user, reason:%s", user, tstrerror(TSDB_CODE_APP_NOT_READY));
return TSDB_CODE_RPC_NOT_READY; return TSDB_CODE_APP_NOT_READY;
} }
SUserObj *pUser = mnodeGetUser(user); SUserObj *pUser = mnodeGetUser(user);
......
...@@ -230,8 +230,12 @@ void wordfree(wordexp_t *pwordexp); ...@@ -230,8 +230,12 @@ void wordfree(wordexp_t *pwordexp);
#define atomic_exchange_16(ptr, val) _InterlockedExchange16((short volatile*)(ptr), (short)(val)) #define atomic_exchange_16(ptr, val) _InterlockedExchange16((short volatile*)(ptr), (short)(val))
#define atomic_exchange_32(ptr, val) _InterlockedExchange((long volatile*)(ptr), (long)(val)) #define atomic_exchange_32(ptr, val) _InterlockedExchange((long volatile*)(ptr), (long)(val))
#define atomic_exchange_64(ptr, val) _InterlockedExchange64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_exchange_64(ptr, val) _InterlockedExchange64((__int64 volatile*)(ptr), (__int64)(val))
#define atomic_exchange_ptr(ptr, val) _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val)) #ifdef _WIN64
#define atomic_exchange_ptr(ptr, val) _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
#else
#define atomic_exchange_ptr(ptr, val) _InlineInterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
#endif
#ifdef _TD_GO_DLL_ #ifdef _TD_GO_DLL_
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap #define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
#else #else
...@@ -247,162 +251,104 @@ void wordfree(wordexp_t *pwordexp); ...@@ -247,162 +251,104 @@ void wordfree(wordexp_t *pwordexp);
long interlocked_add_fetch_32(long volatile *ptr, long val); long interlocked_add_fetch_32(long volatile *ptr, long val);
__int64 interlocked_add_fetch_64(__int64 volatile *ptr, __int64 val); __int64 interlocked_add_fetch_64(__int64 volatile *ptr, __int64 val);
char interlocked_and_fetch_8(char volatile* ptr, char val);
short interlocked_and_fetch_16(short volatile* ptr, short val);
long interlocked_and_fetch_32(long volatile* ptr, long val);
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val);
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val);
char interlocked_or_fetch_8(char volatile* ptr, char val);
short interlocked_or_fetch_16(short volatile* ptr, short val);
long interlocked_or_fetch_32(long volatile* ptr, long val);
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val);
char interlocked_xor_fetch_8(char volatile* ptr, char val);
short interlocked_xor_fetch_16(short volatile* ptr, short val);
long interlocked_xor_fetch_32(long volatile* ptr, long val);
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val);
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val);
#define atomic_add_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), (char)(val)) #define atomic_add_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), (char)(val))
#define atomic_add_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), (short)(val)) #define atomic_add_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), (short)(val))
#define atomic_add_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), (long)(val)) #define atomic_add_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), (long)(val))
#define atomic_add_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_add_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _WIN64
#define atomic_add_fetch_ptr atomic_add_fetch_64
#else
#define atomic_add_fetch_ptr atomic_add_fetch_32
#endif
#ifdef _TD_GO_DLL_ #ifdef _TD_GO_DLL_
#define atomic_fetch_add_8 __sync_fetch_and_ad #define atomic_fetch_add_8 __sync_fetch_and_ad
#define atomic_fetch_add_16 __sync_fetch_and_add #define atomic_fetch_add_16 __sync_fetch_and_add
#else #else
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val)) #define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val)) #define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
#endif #endif
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
#define atomic_fetch_add_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), (long)(val)) #define atomic_fetch_add_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), (long)(val))
#define atomic_fetch_add_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_fetch_add_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _WIN64
#define atomic_fetch_add_ptr atomic_fetch_add_64
#else
#define atomic_fetch_add_ptr atomic_fetch_add_32
#endif
#define atomic_sub_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), -(char)(val)) #define atomic_sub_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), -(char)(val))
#define atomic_sub_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), -(short)(val)) #define atomic_sub_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), -(short)(val))
#define atomic_sub_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), -(long)(val)) #define atomic_sub_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), -(long)(val))
#define atomic_sub_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), -(__int64)(val)) #define atomic_sub_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), -(__int64)(val))
#ifdef _WIN64
#define atomic_sub_fetch_ptr atomic_sub_fetch_64
#else
#define atomic_sub_fetch_ptr atomic_sub_fetch_32
#endif
#define atomic_fetch_sub_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), -(char)(val)) #define atomic_fetch_sub_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), -(char)(val))
#define atomic_fetch_sub_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), -(short)(val)) #define atomic_fetch_sub_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), -(short)(val))
#define atomic_fetch_sub_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), -(long)(val)) #define atomic_fetch_sub_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), -(long)(val))
#define atomic_fetch_sub_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), -(__int64)(val)) #define atomic_fetch_sub_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), -(__int64)(val))
#ifdef _WIN64
#define atomic_fetch_sub_ptr atomic_fetch_sub_64
#else
#define atomic_fetch_sub_ptr atomic_fetch_sub_32
#endif
#ifndef _TD_GO_DLL_ #define atomic_and_fetch_8(ptr, val) interlocked_and_fetch_8((char volatile*)(ptr), (char)(val))
char interlocked_and_fetch_8(char volatile* ptr, char val); #define atomic_and_fetch_16(ptr, val) interlocked_and_fetch_16((short volatile*)(ptr), (short)(val))
short interlocked_and_fetch_16(short volatile* ptr, short val);
#endif
long interlocked_and_fetch_32(long volatile* ptr, long val);
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val);
#ifndef _TD_GO_DLL_
#define atomic_and_fetch_8(ptr, val) interlocked_and_fetch_8((char volatile*)(ptr), (char)(val))
#define atomic_and_fetch_16(ptr, val) interlocked_and_fetch_16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_and_fetch_32(ptr, val) interlocked_and_fetch_32((long volatile*)(ptr), (long)(val)) #define atomic_and_fetch_32(ptr, val) interlocked_and_fetch_32((long volatile*)(ptr), (long)(val))
#define atomic_and_fetch_64(ptr, val) interlocked_and_fetch_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_and_fetch_64(ptr, val) interlocked_and_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _WIN64
#define atomic_and_fetch_ptr atomic_and_fetch_64
#else
#define atomic_and_fetch_ptr atomic_and_fetch_32
#endif
#ifndef _TD_GO_DLL_
#define atomic_fetch_and_8(ptr, val) _InterlockedAnd8((char volatile*)(ptr), (char)(val))
#define atomic_fetch_and_16(ptr, val) _InterlockedAnd16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_fetch_and_32(ptr, val) _InterlockedAnd((long volatile*)(ptr), (long)(val))
#ifdef _M_IX86 #define atomic_fetch_and_8(ptr, val) _InterlockedAnd8((char volatile*)(ptr), (char)(val))
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val); #define atomic_fetch_and_16(ptr, val) _InterlockedAnd16((short volatile*)(ptr), (short)(val))
#define atomic_fetch_and_64(ptr, val) interlocked_fetch_and_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_fetch_and_32(ptr, val) _InterlockedAnd((long volatile*)(ptr), (long)(val))
#else #define atomic_fetch_and_64(ptr, val) interlocked_fetch_and_64((__int64 volatile*)(ptr), (__int64)(val))
#define atomic_fetch_and_64(ptr, val) _InterlockedAnd64((__int64 volatile*)(ptr), (__int64)(val))
#endif
#ifdef _WIN64
#define atomic_fetch_and_ptr atomic_fetch_and_64
#else
#define atomic_fetch_and_ptr atomic_fetch_and_32
#endif
#ifndef _TD_GO_DLL_
char interlocked_or_fetch_8(char volatile* ptr, char val);
short interlocked_or_fetch_16(short volatile* ptr, short val);
#endif
long interlocked_or_fetch_32(long volatile* ptr, long val);
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val);
#ifndef _TD_GO_DLL_ #define atomic_or_fetch_8(ptr, val) interlocked_or_fetch_8((char volatile*)(ptr), (char)(val))
#define atomic_or_fetch_8(ptr, val) interlocked_or_fetch_8((char volatile*)(ptr), (char)(val)) #define atomic_or_fetch_16(ptr, val) interlocked_or_fetch_16((short volatile*)(ptr), (short)(val))
#define atomic_or_fetch_16(ptr, val) interlocked_or_fetch_16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_or_fetch_32(ptr, val) interlocked_or_fetch_32((long volatile*)(ptr), (long)(val)) #define atomic_or_fetch_32(ptr, val) interlocked_or_fetch_32((long volatile*)(ptr), (long)(val))
#define atomic_or_fetch_64(ptr, val) interlocked_or_fetch_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_or_fetch_64(ptr, val) interlocked_or_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _WIN64
#define atomic_or_fetch_ptr atomic_or_fetch_64
#else
#define atomic_or_fetch_ptr atomic_or_fetch_32
#endif
#ifndef _TD_GO_DLL_
#define atomic_fetch_or_8(ptr, val) _InterlockedOr8((char volatile*)(ptr), (char)(val))
#define atomic_fetch_or_16(ptr, val) _InterlockedOr16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_fetch_or_32(ptr, val) _InterlockedOr((long volatile*)(ptr), (long)(val))
#ifdef _M_IX86 #define atomic_fetch_or_8(ptr, val) _InterlockedOr8((char volatile*)(ptr), (char)(val))
__int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val); #define atomic_fetch_or_16(ptr, val) _InterlockedOr16((short volatile*)(ptr), (short)(val))
#define atomic_fetch_or_64(ptr, val) interlocked_fetch_or_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_fetch_or_32(ptr, val) _InterlockedOr((long volatile*)(ptr), (long)(val))
#else #define atomic_fetch_or_64(ptr, val) interlocked_fetch_or_64((__int64 volatile*)(ptr), (__int64)(val))
#define atomic_fetch_or_64(ptr, val) _InterlockedOr64((__int64 volatile*)(ptr), (__int64)(val))
#endif
#ifdef _WIN64
#define atomic_fetch_or_ptr atomic_fetch_or_64
#else
#define atomic_fetch_or_ptr atomic_fetch_or_32
#endif
#ifndef _TD_GO_DLL_
char interlocked_xor_fetch_8(char volatile* ptr, char val);
short interlocked_xor_fetch_16(short volatile* ptr, short val);
#endif
long interlocked_xor_fetch_32(long volatile* ptr, long val);
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val);
#ifndef _TD_GO_DLL_ #define atomic_xor_fetch_8(ptr, val) interlocked_xor_fetch_8((char volatile*)(ptr), (char)(val))
#define atomic_xor_fetch_8(ptr, val) interlocked_xor_fetch_8((char volatile*)(ptr), (char)(val)) #define atomic_xor_fetch_16(ptr, val) interlocked_xor_fetch_16((short volatile*)(ptr), (short)(val))
#define atomic_xor_fetch_16(ptr, val) interlocked_xor_fetch_16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_xor_fetch_32(ptr, val) interlocked_xor_fetch_32((long volatile*)(ptr), (long)(val)) #define atomic_xor_fetch_32(ptr, val) interlocked_xor_fetch_32((long volatile*)(ptr), (long)(val))
#define atomic_xor_fetch_64(ptr, val) interlocked_xor_fetch_64((__int64 volatile*)(ptr), (__int64)(val)) #define atomic_xor_fetch_64(ptr, val) interlocked_xor_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _WIN64
#define atomic_xor_fetch_ptr atomic_xor_fetch_64
#else
#define atomic_xor_fetch_ptr atomic_xor_fetch_32
#endif
#ifndef _TD_GO_DLL_ #define atomic_fetch_xor_8(ptr, val) _InterlockedXor8((char volatile*)(ptr), (char)(val))
#define atomic_fetch_xor_8(ptr, val) _InterlockedXor8((char volatile*)(ptr), (char)(val)) #define atomic_fetch_xor_16(ptr, val) _InterlockedXor16((short volatile*)(ptr), (short)(val))
#define atomic_fetch_xor_16(ptr, val) _InterlockedXor16((short volatile*)(ptr), (short)(val))
#endif
#define atomic_fetch_xor_32(ptr, val) _InterlockedXor((long volatile*)(ptr), (long)(val)) #define atomic_fetch_xor_32(ptr, val) _InterlockedXor((long volatile*)(ptr), (long)(val))
#define atomic_fetch_xor_64(ptr, val) interlocked_fetch_xor_64((__int64 volatile*)(ptr), (__int64)(val))
#ifdef _M_IX86
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val);
#define atomic_fetch_xor_64(ptr, val) interlocked_fetch_xor_64((__int64 volatile*)(ptr), (__int64)(val))
#else
#define atomic_fetch_xor_64(ptr, val) _InterlockedXor64((__int64 volatile*)(ptr), (__int64)(val))
#endif
#ifdef _WIN64 #ifdef _WIN64
#define atomic_add_fetch_ptr atomic_add_fetch_64
#define atomic_fetch_add_ptr atomic_fetch_add_64
#define atomic_sub_fetch_ptr atomic_sub_fetch_64
#define atomic_fetch_sub_ptr atomic_fetch_sub_64
#define atomic_and_fetch_ptr atomic_and_fetch_64
#define atomic_fetch_and_ptr atomic_fetch_and_64
#define atomic_or_fetch_ptr atomic_or_fetch_64
#define atomic_fetch_or_ptr atomic_fetch_or_64
#define atomic_xor_fetch_ptr atomic_xor_fetch_64
#define atomic_fetch_xor_ptr atomic_fetch_xor_64 #define atomic_fetch_xor_ptr atomic_fetch_xor_64
#else #else
#define atomic_add_fetch_ptr atomic_add_fetch_32
#define atomic_fetch_add_ptr atomic_fetch_add_32
#define atomic_sub_fetch_ptr atomic_sub_fetch_32
#define atomic_fetch_sub_ptr atomic_fetch_sub_32
#define atomic_and_fetch_ptr atomic_and_fetch_32
#define atomic_fetch_and_ptr atomic_fetch_and_32
#define atomic_or_fetch_ptr atomic_or_fetch_32
#define atomic_fetch_or_ptr atomic_fetch_or_32
#define atomic_xor_fetch_ptr atomic_xor_fetch_32
#define atomic_fetch_xor_ptr atomic_fetch_xor_32 #define atomic_fetch_xor_ptr atomic_fetch_xor_32
#endif #endif
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
......
...@@ -23,19 +23,19 @@ ...@@ -23,19 +23,19 @@
// add // add
char interlocked_add_fetch_8(char volatile* ptr, char val) { char interlocked_add_fetch_8(char volatile* ptr, char val) {
#ifdef _TD_GO_DLL_ #ifdef _TD_GO_DLL_
return __sync_fetch_and_add(ptr, val) + val; return __sync_fetch_and_add(ptr, val) + val;
#else #else
return _InterlockedExchangeAdd8(ptr, val) + val; return _InterlockedExchangeAdd8(ptr, val) + val;
#endif #endif
} }
short interlocked_add_fetch_16(short volatile* ptr, short val) { short interlocked_add_fetch_16(short volatile* ptr, short val) {
#ifdef _TD_GO_DLL_ #ifdef _TD_GO_DLL_
return __sync_fetch_and_add(ptr, val) + val; return __sync_fetch_and_add(ptr, val) + val;
#else #else
return _InterlockedExchangeAdd16(ptr, val) + val; return _InterlockedExchangeAdd16(ptr, val) + val;
#endif #endif
} }
long interlocked_add_fetch_32(long volatile* ptr, long val) { long interlocked_add_fetch_32(long volatile* ptr, long val) {
...@@ -43,15 +43,13 @@ long interlocked_add_fetch_32(long volatile* ptr, long val) { ...@@ -43,15 +43,13 @@ long interlocked_add_fetch_32(long volatile* ptr, long val) {
} }
__int64 interlocked_add_fetch_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_add_fetch_64(__int64 volatile* ptr, __int64 val) {
#ifdef _WIN64 //#ifdef _WIN64
return _InterlockedExchangeAdd64(ptr, val) + val; return _InterlockedExchangeAdd64(ptr, val) + val;
#else //#else
return _InterlockedExchangeAdd(ptr, val) + val; // return _InterlockedExchangeAdd(ptr, val) + val;
#endif //#endif
} }
// and
#ifndef _TD_GO_DLL_
char interlocked_and_fetch_8(char volatile* ptr, char val) { char interlocked_and_fetch_8(char volatile* ptr, char val) {
return _InterlockedAnd8(ptr, val) & val; return _InterlockedAnd8(ptr, val) & val;
} }
...@@ -59,41 +57,37 @@ char interlocked_and_fetch_8(char volatile* ptr, char val) { ...@@ -59,41 +57,37 @@ char interlocked_and_fetch_8(char volatile* ptr, char val) {
short interlocked_and_fetch_16(short volatile* ptr, short val) { short interlocked_and_fetch_16(short volatile* ptr, short val) {
return _InterlockedAnd16(ptr, val) & val; return _InterlockedAnd16(ptr, val) & val;
} }
#endif
long interlocked_and_fetch_32(long volatile* ptr, long val) { long interlocked_and_fetch_32(long volatile* ptr, long val) {
return _InterlockedAnd(ptr, val) & val; return _InterlockedAnd(ptr, val) & val;
} }
#ifndef _M_IX86
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) {
#ifndef _M_IX86
return _InterlockedAnd64(ptr, val) & val; return _InterlockedAnd64(ptr, val) & val;
}
#else #else
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) {
__int64 old, res; __int64 old, res;
do { do {
old = *ptr; old = *ptr;
res = old & val; res = old & val;
} while(_InterlockedCompareExchange64(ptr, res, old) != old); } while (_InterlockedCompareExchange64(ptr, res, old) != old);
return res; return res;
#endif
} }
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val) {
#ifdef _M_IX86
__int64 old; __int64 old;
do { do {
old = *ptr; old = *ptr;
} while(_InterlockedCompareExchange64(ptr, old & val, old) != old); } while (_InterlockedCompareExchange64(ptr, old & val, old) != old);
return old; return old;
} #else
return _InterlockedAnd64((__int64 volatile*)(ptr), (__int64)(val));
#endif #endif
}
// or
#ifndef _TD_GO_DLL_
char interlocked_or_fetch_8(char volatile* ptr, char val) { char interlocked_or_fetch_8(char volatile* ptr, char val) {
return _InterlockedOr8(ptr, val) | val; return _InterlockedOr8(ptr, val) | val;
} }
...@@ -101,40 +95,36 @@ char interlocked_or_fetch_8(char volatile* ptr, char val) { ...@@ -101,40 +95,36 @@ char interlocked_or_fetch_8(char volatile* ptr, char val) {
short interlocked_or_fetch_16(short volatile* ptr, short val) { short interlocked_or_fetch_16(short volatile* ptr, short val) {
return _InterlockedOr16(ptr, val) | val; return _InterlockedOr16(ptr, val) | val;
} }
#endif
long interlocked_or_fetch_32(long volatile* ptr, long val) { long interlocked_or_fetch_32(long volatile* ptr, long val) {
return _InterlockedOr(ptr, val) | val; return _InterlockedOr(ptr, val) | val;
} }
#ifndef _M_IX86
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) {
return _InterlockedOr64(ptr, val) & val;
}
#else
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) {
#ifdef _M_IX86
__int64 old, res; __int64 old, res;
do { do {
old = *ptr; old = *ptr;
res = old | val; res = old | val;
} while(_InterlockedCompareExchange64(ptr, res, old) != old); } while(_InterlockedCompareExchange64(ptr, res, old) != old);
return res; return res;
#else
return _InterlockedOr64(ptr, val) & val;
#endif
} }
__int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val) {
#ifdef _M_IX86
__int64 old; __int64 old;
do { do {
old = *ptr; old = *ptr;
} while(_InterlockedCompareExchange64(ptr, old | val, old) != old); } while(_InterlockedCompareExchange64(ptr, old | val, old) != old);
return old; return old;
} #else
return _InterlockedOr64((__int64 volatile*)(ptr), (__int64)(val));
#endif #endif
}
// xor
#ifndef _TD_GO_DLL_
char interlocked_xor_fetch_8(char volatile* ptr, char val) { char interlocked_xor_fetch_8(char volatile* ptr, char val) {
return _InterlockedXor8(ptr, val) ^ val; return _InterlockedXor8(ptr, val) ^ val;
} }
...@@ -142,35 +132,33 @@ char interlocked_xor_fetch_8(char volatile* ptr, char val) { ...@@ -142,35 +132,33 @@ char interlocked_xor_fetch_8(char volatile* ptr, char val) {
short interlocked_xor_fetch_16(short volatile* ptr, short val) { short interlocked_xor_fetch_16(short volatile* ptr, short val) {
return _InterlockedXor16(ptr, val) ^ val; return _InterlockedXor16(ptr, val) ^ val;
} }
#endif
long interlocked_xor_fetch_32(long volatile* ptr, long val) { long interlocked_xor_fetch_32(long volatile* ptr, long val) {
return _InterlockedXor(ptr, val) ^ val; return _InterlockedXor(ptr, val) ^ val;
} }
#ifndef _M_IX86
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) {
return _InterlockedXor64(ptr, val) ^ val;
}
#else
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) {
#ifdef _M_IX86
__int64 old, res; __int64 old, res;
do { do {
old = *ptr; old = *ptr;
res = old ^ val; res = old ^ val;
} while(_InterlockedCompareExchange64(ptr, res, old) != old); } while(_InterlockedCompareExchange64(ptr, res, old) != old);
return res; return res;
#else
return _InterlockedXor64(ptr, val) ^ val;
#endif
} }
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val) { __int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val) {
#ifdef _M_IX86
__int64 old; __int64 old;
do { do {
old = *ptr; old = *ptr;
} while(_InterlockedCompareExchange64(ptr, old ^ val, old) != old); } while (_InterlockedCompareExchange64(ptr, old ^ val, old) != old);
return old; return old;
} #else
return _InterlockedXor64((__int64 volatile*)(ptr), (__int64)(val));
#endif #endif
}
...@@ -154,13 +154,15 @@ bool taosGetCpuUsage(float *sysCpuUsage, float *procCpuUsage) { ...@@ -154,13 +154,15 @@ bool taosGetCpuUsage(float *sysCpuUsage, float *procCpuUsage) {
bool taosGetProcMemory(float *memoryUsedMB) { bool taosGetProcMemory(float *memoryUsedMB) {
unsigned bytes_used = 0; unsigned bytes_used = 0;
#if 0
#if defined(_WIN32) && defined(_MSC_VER) #if defined(_WIN32) && defined(_MSC_VER)
PROCESS_MEMORY_COUNTERS pmc; PROCESS_MEMORY_COUNTERS pmc;
HANDLE cur_proc = GetCurrentProcess(); HANDLE cur_proc = GetCurrentProcess();
if (GetProcessMemoryInfo(cur_proc, &pmc, sizeof(pmc))) { if (GetProcessMemoryInfo(cur_proc, &pmc, sizeof(pmc))) {
bytes_used = (unsigned)(pmc.WorkingSetSize + pmc.PagefileUsage); bytes_used = (unsigned)(pmc.WorkingSetSize + pmc.PagefileUsage);
} }
#endif
#endif #endif
*memoryUsedMB = (float)bytes_used / 1024 / 1024; *memoryUsedMB = (float)bytes_used / 1024 / 1024;
......
此差异已折叠。
...@@ -990,7 +990,7 @@ void tColModelCompact(SColumnModel *pModel, tFilePage *inputBuffer, int32_t maxE ...@@ -990,7 +990,7 @@ void tColModelCompact(SColumnModel *pModel, tFilePage *inputBuffer, int32_t maxE
SSchemaEx* pSchemaEx = &pModel->pFields[i]; SSchemaEx* pSchemaEx = &pModel->pFields[i];
memmove(inputBuffer->data + pSchemaEx->offset * inputBuffer->num, memmove(inputBuffer->data + pSchemaEx->offset * inputBuffer->num,
inputBuffer->data + pSchemaEx->offset * maxElemsCapacity, inputBuffer->data + pSchemaEx->offset * maxElemsCapacity,
pSchemaEx->field.bytes * inputBuffer->num); (size_t)(pSchemaEx->field.bytes * inputBuffer->num));
} }
} }
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册