Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
killuaz丶x
SkyWalking
提交
d4333ff7
S
SkyWalking
项目概览
killuaz丶x
/
SkyWalking
与 Fork 源项目一致
Fork自
apache / SkyWalking
通知
1
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
S
SkyWalking
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
d4333ff7
编写于
10月 13, 2017
作者:
clevertension
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add h2 support phase 2
上级
bc38a56d
变更
21
隐藏空白更改
内联
并排
Showing
21 changed file
with
725 addition
and
66 deletion
+725
-66
apm-collector/apm-collector-agentregister/src/main/java/org/skywalking/apm/collector/agentregister/worker/application/dao/ApplicationH2DAO.java
...gentregister/worker/application/dao/ApplicationH2DAO.java
+1
-1
apm-collector/apm-collector-agentregister/src/main/java/org/skywalking/apm/collector/agentregister/worker/instance/dao/InstanceH2DAO.java
...ctor/agentregister/worker/instance/dao/InstanceH2DAO.java
+6
-10
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/node/mapping/dao/NodeMappingH2DAO.java
...agentstream/worker/node/mapping/dao/NodeMappingH2DAO.java
+1
-0
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/noderef/dao/NodeReferenceH2DAO.java
...or/agentstream/worker/noderef/dao/NodeReferenceH2DAO.java
+71
-2
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/noderef/define/NodeReferenceH2TableDefine.java
...eam/worker/noderef/define/NodeReferenceH2TableDefine.java
+1
-0
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ApplicationH2DAO.java
...org/skywalking/apm/collector/ui/dao/ApplicationH2DAO.java
+1
-1
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/CpuMetricH2DAO.java
...a/org/skywalking/apm/collector/ui/dao/CpuMetricH2DAO.java
+6
-13
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/GCMetricH2DAO.java
...va/org/skywalking/apm/collector/ui/dao/GCMetricH2DAO.java
+133
-4
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/GlobalTraceH2DAO.java
...org/skywalking/apm/collector/ui/dao/GlobalTraceH2DAO.java
+45
-4
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/InstPerformanceH2DAO.java
...skywalking/apm/collector/ui/dao/InstPerformanceH2DAO.java
+7
-8
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/InstanceH2DAO.java
...va/org/skywalking/apm/collector/ui/dao/InstanceH2DAO.java
+8
-11
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/MemoryMetricH2DAO.java
...rg/skywalking/apm/collector/ui/dao/MemoryMetricH2DAO.java
+89
-0
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/MemoryPoolMetricH2DAO.java
...kywalking/apm/collector/ui/dao/MemoryPoolMetricH2DAO.java
+89
-0
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeComponentH2DAO.java
...g/skywalking/apm/collector/ui/dao/NodeComponentH2DAO.java
+1
-1
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeMappingH2DAO.java
...org/skywalking/apm/collector/ui/dao/NodeMappingH2DAO.java
+1
-1
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeReferenceH2DAO.java
...g/skywalking/apm/collector/ui/dao/NodeReferenceH2DAO.java
+1
-2
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/SegmentCostH2DAO.java
...org/skywalking/apm/collector/ui/dao/SegmentCostH2DAO.java
+124
-3
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/SegmentH2DAO.java
...ava/org/skywalking/apm/collector/ui/dao/SegmentH2DAO.java
+34
-1
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ServiceEntryH2DAO.java
...rg/skywalking/apm/collector/ui/dao/ServiceEntryH2DAO.java
+65
-1
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ServiceNameH2DAO.java
...org/skywalking/apm/collector/ui/dao/ServiceNameH2DAO.java
+38
-3
apm-collector/apm-collector-ui/src/main/resources/META-INF/defines/h2_dao.define
...ctor-ui/src/main/resources/META-INF/defines/h2_dao.define
+3
-0
未找到文件。
apm-collector/apm-collector-agentregister/src/main/java/org/skywalking/apm/collector/agentregister/worker/application/dao/ApplicationH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -16,7 +16,7 @@ import java.text.MessageFormat;
public
class
ApplicationH2DAO
extends
H2DAO
implements
IApplicationDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
ApplicationH2DAO
.
class
);
private
static
final
String
INSERT_APPLICATION_SQL
=
"insert into {0}({1}, {2}) values(?, ?)"
;
;
@Override
@Override
public
int
getApplicationId
(
String
applicationCode
)
{
logger
.
info
(
"get the application id with application code = {}"
,
applicationCode
);
String
sql
=
"select "
+
ApplicationTable
.
COLUMN_APPLICATION_ID
+
" from "
+
...
...
apm-collector/apm-collector-agentregister/src/main/java/org/skywalking/apm/collector/agentregister/worker/instance/dao/InstanceH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.agentregister.worker.instance.dao
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.HashMap
;
import
java.util.Map
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.stream.Data
;
import
org.skywalking.apm.collector.storage.define.jvm.CpuMetricTable
;
import
org.skywalking.apm.collector.storage.define.register.ApplicationTable
;
import
org.skywalking.apm.collector.storage.define.register.InstanceDataDefine
;
import
org.skywalking.apm.collector.storage.define.register.InstanceTable
;
import
org.skywalking.apm.collector.storage.define.serviceref.ServiceReferenceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.HashMap
;
import
java.util.Map
;
/**
* @author pengys5
*/
...
...
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/node/mapping/dao/NodeMappingH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -51,6 +51,7 @@ public class NodeMappingH2DAO extends H2DAO implements INodeMappingDAO, IPersist
source
.
put
(
NodeMappingTable
.
COLUMN_TIME_BUCKET
,
data
.
getDataLong
(
0
));
String
sql
=
getBatchInsertSql
(
NodeMappingTable
.
TABLE
,
source
.
keySet
());
entity
.
setSql
(
sql
);
entity
.
setParams
(
source
.
values
().
toArray
(
new
Object
[
0
]));
return
entity
;
}
...
...
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/noderef/dao/NodeReferenceH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.agentstream.worker.noderef.dao
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.stream.Data
;
import
org.skywalking.apm.collector.storage.define.DataDefine
;
import
org.skywalking.apm.collector.storage.define.node.NodeMappingTable
;
import
org.skywalking.apm.collector.storage.define.noderef.NodeReferenceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.skywalking.apm.collector.storage.h2.define.H2SqlEntity
;
import
org.skywalking.apm.collector.stream.worker.impl.dao.IPersistenceDAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
/**
* @author pengys5
*/
public
class
NodeReferenceH2DAO
extends
H2DAO
implements
INodeReferenceDAO
,
IPersistenceDAO
<
H2SqlEntity
,
H2SqlEntity
>
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
NodeReferenceH2DAO
.
class
);
private
static
final
String
GET_SQL
=
"select * from {0} where {1} = ?"
;
@Override
public
Data
get
(
String
id
,
DataDefine
dataDefine
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_SQL
,
NodeReferenceTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
id
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
Data
data
=
dataDefine
.
build
(
id
);
data
.
setDataInteger
(
0
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_FRONT_APPLICATION_ID
));
data
.
setDataInteger
(
1
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_BEHIND_APPLICATION_ID
));
data
.
setDataString
(
1
,
rs
.
getString
(
NodeReferenceTable
.
COLUMN_BEHIND_PEER
));
data
.
setDataInteger
(
2
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_S1_LTE
));
data
.
setDataInteger
(
3
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_S3_LTE
));
data
.
setDataInteger
(
4
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_S5_LTE
));
data
.
setDataInteger
(
5
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_S5_GT
));
data
.
setDataInteger
(
6
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_SUMMARY
));
data
.
setDataInteger
(
7
,
rs
.
getInt
(
NodeReferenceTable
.
COLUMN_ERROR
));
data
.
setDataLong
(
0
,
rs
.
getLong
(
NodeReferenceTable
.
COLUMN_TIME_BUCKET
));
return
data
;
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
null
;
}
@Override
public
H2SqlEntity
prepareBatchInsert
(
Data
data
)
{
return
null
;
Map
<
String
,
Object
>
source
=
new
HashMap
<>();
H2SqlEntity
entity
=
new
H2SqlEntity
();
source
.
put
(
"id"
,
data
.
getDataString
(
0
));
source
.
put
(
NodeReferenceTable
.
COLUMN_FRONT_APPLICATION_ID
,
data
.
getDataInteger
(
0
));
source
.
put
(
NodeReferenceTable
.
COLUMN_BEHIND_APPLICATION_ID
,
data
.
getDataInteger
(
1
));
source
.
put
(
NodeReferenceTable
.
COLUMN_BEHIND_PEER
,
data
.
getDataString
(
1
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S1_LTE
,
data
.
getDataInteger
(
2
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S3_LTE
,
data
.
getDataInteger
(
3
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S5_LTE
,
data
.
getDataInteger
(
4
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S5_GT
,
data
.
getDataInteger
(
5
));
source
.
put
(
NodeReferenceTable
.
COLUMN_SUMMARY
,
data
.
getDataInteger
(
6
));
source
.
put
(
NodeReferenceTable
.
COLUMN_ERROR
,
data
.
getDataInteger
(
7
));
source
.
put
(
NodeReferenceTable
.
COLUMN_TIME_BUCKET
,
data
.
getDataLong
(
0
));
String
sql
=
getBatchInsertSql
(
NodeMappingTable
.
TABLE
,
source
.
keySet
());
entity
.
setSql
(
sql
);
entity
.
setParams
(
source
.
values
().
toArray
(
new
Object
[
0
]));
return
entity
;
}
@Override
public
H2SqlEntity
prepareBatchUpdate
(
Data
data
)
{
return
null
;
Map
<
String
,
Object
>
source
=
new
HashMap
<>();
H2SqlEntity
entity
=
new
H2SqlEntity
();
source
.
put
(
NodeReferenceTable
.
COLUMN_FRONT_APPLICATION_ID
,
data
.
getDataInteger
(
0
));
source
.
put
(
NodeReferenceTable
.
COLUMN_BEHIND_APPLICATION_ID
,
data
.
getDataInteger
(
1
));
source
.
put
(
NodeReferenceTable
.
COLUMN_BEHIND_PEER
,
data
.
getDataString
(
1
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S1_LTE
,
data
.
getDataInteger
(
2
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S3_LTE
,
data
.
getDataInteger
(
3
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S5_LTE
,
data
.
getDataInteger
(
4
));
source
.
put
(
NodeReferenceTable
.
COLUMN_S5_GT
,
data
.
getDataInteger
(
5
));
source
.
put
(
NodeReferenceTable
.
COLUMN_SUMMARY
,
data
.
getDataInteger
(
6
));
source
.
put
(
NodeReferenceTable
.
COLUMN_ERROR
,
data
.
getDataInteger
(
7
));
source
.
put
(
NodeReferenceTable
.
COLUMN_TIME_BUCKET
,
data
.
getDataLong
(
0
));
String
id
=
data
.
getDataString
(
0
);
String
sql
=
getBatchUpdateSql
(
NodeMappingTable
.
TABLE
,
source
.
keySet
(),
"id"
);
entity
.
setSql
(
sql
);
List
<
Object
>
values
=
new
ArrayList
<>(
source
.
values
());
values
.
add
(
id
);
entity
.
setParams
(
values
.
toArray
(
new
Object
[
0
]));
return
entity
;
}
}
apm-collector/apm-collector-agentstream/src/main/java/org/skywalking/apm/collector/agentstream/worker/noderef/define/NodeReferenceH2TableDefine.java
浏览文件 @
d4333ff7
...
...
@@ -14,6 +14,7 @@ public class NodeReferenceH2TableDefine extends H2TableDefine {
}
@Override
public
void
initialize
()
{
addColumn
(
new
H2ColumnDefine
(
NodeReferenceTable
.
COLUMN_ID
,
H2ColumnDefine
.
Type
.
Varchar
.
name
()));
addColumn
(
new
H2ColumnDefine
(
NodeReferenceTable
.
COLUMN_FRONT_APPLICATION_ID
,
H2ColumnDefine
.
Type
.
Int
.
name
()));
addColumn
(
new
H2ColumnDefine
(
NodeReferenceTable
.
COLUMN_BEHIND_APPLICATION_ID
,
H2ColumnDefine
.
Type
.
Int
.
name
()));
addColumn
(
new
H2ColumnDefine
(
NodeReferenceTable
.
COLUMN_BEHIND_PEER
,
H2ColumnDefine
.
Type
.
Varchar
.
name
()));
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ApplicationH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -13,7 +13,7 @@ import java.sql.SQLException;
import
java.text.MessageFormat
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
ApplicationH2DAO
extends
H2DAO
implements
IApplicationDAO
{
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/CpuMetricH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
org.elasticsearch.action.get.GetResponse
;
import
org.elasticsearch.action.get.MultiGetItemResponse
;
import
org.elasticsearch.action.get.MultiGetRequestBuilder
;
import
org.elasticsearch.action.get.MultiGetResponse
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.jvm.CpuMetricTable
;
import
org.skywalking.apm.collector.storage.define.register.InstanceDataDefine
;
import
org.skywalking.apm.collector.storage.define.register.InstanceTable
;
import
org.skywalking.apm.collector.storage.elasticsearch.dao.EsDAO
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
...
...
@@ -24,16 +17,16 @@ import java.util.ArrayList;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
CpuMetricH2DAO
extends
H2DAO
implements
ICpuMetricDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
InstanceH2DAO
.
class
);
private
static
final
String
GET_METRIC_SQL
=
"select * from {0} where {1} = ?"
;
private
static
final
String
GET_METRICS_SQL
=
"select * from {0} where {1} in ("
;
private
static
final
String
GET_
CPU_
METRIC_SQL
=
"select * from {0} where {1} = ?"
;
private
static
final
String
GET_
CPU_
METRICS_SQL
=
"select * from {0} where {1} in ("
;
@Override
public
int
getMetric
(
int
instanceId
,
long
timeBucket
)
{
String
id
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
;
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_METRIC_SQL
,
CpuMetricTable
.
TABLE
,
"id"
);
String
sql
=
MessageFormat
.
format
(
GET_
CPU_
METRIC_SQL
,
CpuMetricTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
id
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
...
...
@@ -47,7 +40,7 @@ public class CpuMetricH2DAO extends H2DAO implements ICpuMetricDAO {
@Override
public
JsonArray
getMetric
(
int
instanceId
,
long
startTimeBucket
,
long
endTimeBucket
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_METRICS_SQL
,
CpuMetricTable
.
TABLE
,
"id"
);
String
sql
=
MessageFormat
.
format
(
GET_
CPU_
METRICS_SQL
,
CpuMetricTable
.
TABLE
,
"id"
);
long
timeBucket
=
startTimeBucket
;
List
<
String
>
idList
=
new
ArrayList
<>();
...
...
@@ -59,7 +52,7 @@ public class CpuMetricH2DAO extends H2DAO implements ICpuMetricDAO {
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/GCMetricH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
com.google.gson.JsonObject
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.jvm.GCMetricTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.skywalking.apm.network.proto.GCPhrase
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
GCMetricH2DAO
extends
H2DAO
implements
IGCMetricDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
GCMetricH2DAO
.
class
);
private
static
final
String
GET_GC_COUNT_SQL
=
"select sum({0}) as cnt, {1} from {2} where {3} > ? group by {1}"
;
private
static
final
String
GET_GC_COUNT_SQL
=
"select {1}, sum({0}) as cnt, {1} from {2} where {3} = ? and {4} in ("
;
private
static
final
String
GET_GC_METRIC_SQL
=
"select * from {0} where {1} = ?"
;
private
static
final
String
GET_GC_METRICS_SQL
=
"select * from {0} where {1} in ("
;
@Override
public
GCCount
getGCCount
(
long
[]
timeBuckets
,
int
instanceId
)
{
GCCount
gcCount
=
new
GCCount
();
H2Client
client
=
getClient
();
String
sql
=
GET_GC_COUNT_SQL
;
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
+
" group by {1}"
;
sql
=
MessageFormat
.
format
(
sql
,
GCMetricTable
.
COLUMN_COUNT
,
GCMetricTable
.
COLUMN_PHRASE
,
GCMetricTable
.
TABLE
,
GCMetricTable
.
COLUMN_INSTANCE_ID
,
"id"
);
Object
[]
params
=
new
Object
[
timeBuckets
.
length
+
1
];
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++)
{
params
[
i
+
1
]
=
timeBuckets
[
i
];
}
params
[
0
]
=
instanceId
;
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
int
phrase
=
rs
.
getInt
(
GCMetricTable
.
COLUMN_PHRASE
);
int
count
=
rs
.
getInt
(
"cnt"
);
if
(
phrase
==
GCPhrase
.
NEW_VALUE
)
{
gcCount
.
setYoung
(
count
);
}
else
if
(
phrase
==
GCPhrase
.
OLD_VALUE
)
{
gcCount
.
setOld
(
count
);
}
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
gcCount
;
}
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
timeBucket
)
{
return
null
;
JsonObject
response
=
new
JsonObject
();
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_GC_METRIC_SQL
,
GCMetricTable
.
TABLE
,
"id"
);
String
youngId
=
timeBucket
+
Const
.
ID_SPLIT
+
GCPhrase
.
NEW_VALUE
+
instanceId
;
Object
[]
params
=
new
Object
[]{
youngId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
response
.
addProperty
(
"ygc"
,
rs
.
getInt
(
GCMetricTable
.
COLUMN_COUNT
));
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
String
oldId
=
timeBucket
+
Const
.
ID_SPLIT
+
GCPhrase
.
OLD_VALUE
+
instanceId
;
Object
[]
params1
=
new
Object
[]{
oldId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params1
))
{
if
(
rs
.
next
())
{
response
.
addProperty
(
"ogc"
,
rs
.
getInt
(
GCMetricTable
.
COLUMN_COUNT
));
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
response
;
}
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
startTimeBucket
,
long
endTimeBucket
)
{
return
null
;
JsonObject
response
=
new
JsonObject
();
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_GC_METRICS_SQL
,
GCMetricTable
.
TABLE
,
"id"
);
long
timeBucket
=
startTimeBucket
;
List
<
String
>
idList
=
new
ArrayList
<>();
do
{
timeBucket
=
TimeBucketUtils
.
INSTANCE
.
addSecondForSecondTimeBucket
(
TimeBucketUtils
.
TimeBucketType
.
SECOND
.
name
(),
timeBucket
,
1
);
String
youngId
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
GCPhrase
.
NEW_VALUE
;
idList
.
add
(
youngId
);
}
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
;
Object
[]
params
=
idList
.
toArray
(
new
String
[
0
]);
JsonArray
youngArray
=
new
JsonArray
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
while
(
rs
.
next
())
{
youngArray
.
add
(
rs
.
getInt
(
GCMetricTable
.
COLUMN_COUNT
));
}
if
(
youngArray
.
size
()
==
0
)
{
youngArray
.
add
(
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
response
.
add
(
"ygc"
,
youngArray
);
List
<
String
>
idList1
=
new
ArrayList
<>();
timeBucket
=
startTimeBucket
;
do
{
timeBucket
=
TimeBucketUtils
.
INSTANCE
.
addSecondForSecondTimeBucket
(
TimeBucketUtils
.
TimeBucketType
.
SECOND
.
name
(),
timeBucket
,
1
);
String
oldId
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
GCPhrase
.
OLD_VALUE
;
idList1
.
add
(
oldId
);
}
while
(
timeBucket
<=
endTimeBucket
);
String
sql1
=
MessageFormat
.
format
(
GET_GC_METRICS_SQL
,
GCMetricTable
.
TABLE
,
"id"
);
StringBuilder
builder1
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList1
.
size
();
i
++)
{
builder1
.
append
(
"?,"
);
}
builder1
.
delete
(
builder1
.
length
()
-
1
,
builder1
.
length
());
builder1
.
append
(
")"
);
sql1
=
sql1
+
builder1
;
Object
[]
params1
=
idList
.
toArray
(
new
String
[
0
]);
JsonArray
oldArray
=
new
JsonArray
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql1
,
params1
))
{
while
(
rs
.
next
())
{
oldArray
.
add
(
rs
.
getInt
(
GCMetricTable
.
COLUMN_COUNT
));
}
if
(
oldArray
.
size
()
==
0
)
{
oldArray
.
add
(
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
response
.
add
(
"ogc"
,
oldArray
);
return
response
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/GlobalTraceH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
java.util.List
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.storage.define.global.GlobalTraceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
GlobalTraceH2DAO
extends
H2DAO
implements
IGlobalTraceDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
GlobalTraceH2DAO
.
class
);
private
static
final
String
GET_GLOBAL_TRACE_ID_SQL
=
"select {0} from {1} where {2} = ? limit 10"
;
private
static
final
String
GET_SEGMENT_IDS_SQL
=
"select {0} from {1} where {2} = ? limit 10"
;
@Override
public
List
<
String
>
getGlobalTraceId
(
String
segmentId
)
{
return
null
;
List
<
String
>
globalTraceIds
=
new
ArrayList
<>();
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_GLOBAL_TRACE_ID_SQL
,
GlobalTraceTable
.
COLUMN_GLOBAL_TRACE_ID
,
GlobalTraceTable
.
TABLE
,
GlobalTraceTable
.
COLUMN_SEGMENT_ID
);
Object
[]
params
=
new
Object
[]{
segmentId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
while
(
rs
.
next
())
{
String
globalTraceId
=
rs
.
getString
(
GlobalTraceTable
.
COLUMN_GLOBAL_TRACE_ID
);
logger
.
debug
(
"segmentId: {}, global trace id: {}"
,
segmentId
,
globalTraceId
);
globalTraceIds
.
add
(
globalTraceId
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
globalTraceIds
;
}
@Override
public
List
<
String
>
getSegmentIds
(
String
globalTraceId
)
{
return
null
;
List
<
String
>
segmentIds
=
new
ArrayList
<>();
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_SEGMENT_IDS_SQL
,
GlobalTraceTable
.
COLUMN_SEGMENT_ID
,
GlobalTraceTable
.
TABLE
,
GlobalTraceTable
.
COLUMN_GLOBAL_TRACE_ID
);
Object
[]
params
=
new
Object
[]{
globalTraceId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
while
(
rs
.
next
())
{
String
segmentId
=
rs
.
getString
(
GlobalTraceTable
.
COLUMN_SEGMENT_ID
);
logger
.
debug
(
"segmentId: {}, global trace id: {}"
,
segmentId
,
globalTraceId
);
segmentIds
.
add
(
globalTraceId
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
segmentIds
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/InstPerformanceH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -6,9 +6,6 @@ import org.skywalking.apm.collector.client.h2.H2ClientException;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.instance.InstPerformanceTable
;
import
org.skywalking.apm.collector.storage.define.jvm.CpuMetricTable
;
import
org.skywalking.apm.collector.storage.define.register.InstanceDataDefine
;
import
org.skywalking.apm.collector.storage.define.register.InstanceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
...
...
@@ -20,7 +17,7 @@ import java.util.ArrayList;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
InstPerformanceH2DAO
extends
H2DAO
implements
IInstPerformanceDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
InstPerformanceH2DAO
.
class
);
...
...
@@ -32,14 +29,14 @@ public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO {
logger
.
info
(
"the inst performance inst id = {}"
,
instanceId
);
String
sql
=
MessageFormat
.
format
(
GET_INST_PERF_SQL
,
InstPerformanceTable
.
TABLE
,
InstPerformanceTable
.
COLUMN_INSTANCE_ID
,
InstPerformanceTable
.
COLUMN_TIME_BUCKET
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++
)
{
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++
)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
;
Object
[]
params
=
new
Object
[
timeBuckets
.
length
+
1
];
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++)
{
for
(
int
i
=
0
;
i
<
timeBuckets
.
length
;
i
++)
{
params
[
i
+
1
]
=
timeBuckets
[
i
];
}
params
[
0
]
=
instanceId
;
...
...
@@ -56,6 +53,7 @@ public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO {
}
@Override
public
int
getTpsMetric
(
int
instanceId
,
long
timeBucket
)
{
logger
.
info
(
"getTpMetric instanceId = {}, startTimeBucket = {}"
,
instanceId
,
timeBucket
);
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_TPS_METRIC_SQL
,
InstPerformanceTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
instanceId
};
...
...
@@ -70,6 +68,7 @@ public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO {
}
@Override
public
JsonArray
getTpsMetric
(
int
instanceId
,
long
startTimeBucket
,
long
endTimeBucket
)
{
logger
.
info
(
"getTpsMetric instanceId = {}, startTimeBucket = {}, endTimeBucket = {}"
,
instanceId
,
startTimeBucket
,
endTimeBucket
);
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_TPS_METRICS_SQL
,
InstPerformanceTable
.
TABLE
,
"id"
);
...
...
@@ -83,7 +82,7 @@ public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO {
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
...
...
@@ -133,7 +132,7 @@ public class InstPerformanceH2DAO extends H2DAO implements IInstPerformanceDAO {
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++
)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/InstanceH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.LinkedList
;
import
java.util.List
;
import
com.google.gson.JsonObject
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.StringUtils
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.node.NodeComponentTable
;
import
org.skywalking.apm.collector.storage.define.register.InstanceDataDefine
;
import
org.skywalking.apm.collector.storage.define.register.InstanceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
...
...
@@ -21,8 +12,14 @@ import org.skywalking.apm.collector.ui.cache.ApplicationCache;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.LinkedList
;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
InstanceH2DAO
extends
H2DAO
implements
IInstanceDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
InstanceH2DAO
.
class
);
...
...
@@ -55,7 +52,7 @@ public class InstanceH2DAO extends H2DAO implements IInstanceDAO {
long
fiveMinuteBefore
=
System
.
currentTimeMillis
()
-
5
*
60
*
1000
;
fiveMinuteBefore
=
TimeBucketUtils
.
INSTANCE
.
getSecondTimeBucket
(
fiveMinuteBefore
);
String
sql
=
MessageFormat
.
format
(
GET_INST_LAST_HEARTBEAT_TIME_SQL
,
InstanceTable
.
COLUMN_HEARTBEAT_TIME
,
InstanceTable
.
TABLE
,
InstanceTable
.
COLUMN_HEARTBEAT_TIME
,
InstanceTable
.
COLUMN_
APPLICATION
_ID
);
InstanceTable
.
COLUMN_HEARTBEAT_TIME
,
InstanceTable
.
COLUMN_
INSTANCE
_ID
);
Object
[]
params
=
new
Object
[]{
fiveMinuteBefore
,
applicationInstanceId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/MemoryMetricH2DAO.java
0 → 100644
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
com.google.gson.JsonObject
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.jvm.MemoryMetricTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author clevertension
*/
public
class
MemoryMetricH2DAO
extends
H2DAO
implements
IMemoryMetricDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
InstanceH2DAO
.
class
);
private
static
final
String
GET_MEMORY_METRIC_SQL
=
"select * from {0} where {1} =?"
;
private
static
final
String
GET_MEMORY_METRICS_SQL
=
"select * from {0} where {1} in ("
;
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
timeBucket
,
boolean
isHeap
)
{
H2Client
client
=
getClient
();
String
id
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
isHeap
;
String
sql
=
MessageFormat
.
format
(
GET_MEMORY_METRIC_SQL
,
MemoryMetricTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
id
};
JsonObject
metric
=
new
JsonObject
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
metric
.
addProperty
(
"max"
,
rs
.
getInt
(
MemoryMetricTable
.
COLUMN_MAX
));
metric
.
addProperty
(
"init"
,
rs
.
getInt
(
MemoryMetricTable
.
COLUMN_INIT
));
metric
.
addProperty
(
"used"
,
rs
.
getInt
(
MemoryMetricTable
.
COLUMN_USED
));
}
else
{
metric
.
addProperty
(
"max"
,
0
);
metric
.
addProperty
(
"init"
,
0
);
metric
.
addProperty
(
"used"
,
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
metric
;
}
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
startTimeBucket
,
long
endTimeBucket
,
boolean
isHeap
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_MEMORY_METRICS_SQL
,
MemoryMetricTable
.
TABLE
,
"id"
);
List
<
String
>
idList
=
new
ArrayList
<>();
long
timeBucket
=
startTimeBucket
;
do
{
timeBucket
=
TimeBucketUtils
.
INSTANCE
.
addSecondForSecondTimeBucket
(
TimeBucketUtils
.
TimeBucketType
.
SECOND
.
name
(),
timeBucket
,
1
);
String
id
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
isHeap
;
idList
.
add
(
id
);
}
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
;
Object
[]
params
=
idList
.
toArray
(
new
String
[
0
]);
JsonObject
metric
=
new
JsonObject
();
JsonArray
usedMetric
=
new
JsonArray
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
while
(
rs
.
next
())
{
metric
.
addProperty
(
"max"
,
rs
.
getLong
(
MemoryMetricTable
.
COLUMN_MAX
));
metric
.
addProperty
(
"init"
,
rs
.
getLong
(
MemoryMetricTable
.
COLUMN_INIT
));
usedMetric
.
add
(
rs
.
getLong
(
MemoryMetricTable
.
COLUMN_USED
));
}
if
(
usedMetric
.
size
()
==
0
)
{
metric
.
addProperty
(
"max"
,
0
);
metric
.
addProperty
(
"init"
,
0
);
usedMetric
.
add
(
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
metric
.
add
(
"used"
,
usedMetric
);
return
metric
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/MemoryPoolMetricH2DAO.java
0 → 100644
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
com.google.gson.JsonObject
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.core.util.TimeBucketUtils
;
import
org.skywalking.apm.collector.storage.define.jvm.MemoryPoolMetricTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author clevertension
*/
public
class
MemoryPoolMetricH2DAO
extends
H2DAO
implements
IMemoryPoolMetricDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
InstanceH2DAO
.
class
);
private
static
final
String
GET_MEMORY_POOL_METRIC_SQL
=
"select * from {0} where {1} =?"
;
private
static
final
String
GET_MEMORY_POOL_METRICS_SQL
=
"select * from {0} where {1} in ("
;
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
timeBucket
,
int
poolType
)
{
H2Client
client
=
getClient
();
String
id
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
poolType
;
String
sql
=
MessageFormat
.
format
(
GET_MEMORY_POOL_METRIC_SQL
,
MemoryPoolMetricTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
id
};
JsonObject
metric
=
new
JsonObject
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
metric
.
addProperty
(
"max"
,
rs
.
getInt
(
MemoryPoolMetricTable
.
COLUMN_MAX
));
metric
.
addProperty
(
"init"
,
rs
.
getInt
(
MemoryPoolMetricTable
.
COLUMN_INIT
));
metric
.
addProperty
(
"used"
,
rs
.
getInt
(
MemoryPoolMetricTable
.
COLUMN_USED
));
}
else
{
metric
.
addProperty
(
"max"
,
0
);
metric
.
addProperty
(
"init"
,
0
);
metric
.
addProperty
(
"used"
,
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
metric
;
}
@Override
public
JsonObject
getMetric
(
int
instanceId
,
long
startTimeBucket
,
long
endTimeBucket
,
int
poolType
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_MEMORY_POOL_METRICS_SQL
,
MemoryPoolMetricTable
.
TABLE
,
"id"
);
List
<
String
>
idList
=
new
ArrayList
<>();
long
timeBucket
=
startTimeBucket
;
do
{
timeBucket
=
TimeBucketUtils
.
INSTANCE
.
addSecondForSecondTimeBucket
(
TimeBucketUtils
.
TimeBucketType
.
SECOND
.
name
(),
timeBucket
,
1
);
String
id
=
timeBucket
+
Const
.
ID_SPLIT
+
instanceId
+
Const
.
ID_SPLIT
+
poolType
;
idList
.
add
(
id
);
}
while
(
timeBucket
<=
endTimeBucket
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
idList
.
size
();
i
++)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
;
Object
[]
params
=
idList
.
toArray
(
new
String
[
0
]);
JsonObject
metric
=
new
JsonObject
();
JsonArray
usedMetric
=
new
JsonArray
();
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
while
(
rs
.
next
())
{
metric
.
addProperty
(
"max"
,
rs
.
getLong
(
MemoryPoolMetricTable
.
COLUMN_MAX
));
metric
.
addProperty
(
"init"
,
rs
.
getLong
(
MemoryPoolMetricTable
.
COLUMN_INIT
));
usedMetric
.
add
(
rs
.
getLong
(
MemoryPoolMetricTable
.
COLUMN_USED
));
}
if
(
usedMetric
.
size
()
==
0
)
{
metric
.
addProperty
(
"max"
,
0
);
metric
.
addProperty
(
"init"
,
0
);
usedMetric
.
add
(
0
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
metric
.
add
(
"used"
,
usedMetric
);
return
metric
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeComponentH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -16,7 +16,7 @@ import java.sql.SQLException;
import
java.text.MessageFormat
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
NodeComponentH2DAO
extends
H2DAO
implements
INodeComponentDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
NodeComponentH2DAO
.
class
);
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeMappingH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -16,7 +16,7 @@ import java.sql.SQLException;
import
java.text.MessageFormat
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
NodeMappingH2DAO
extends
H2DAO
implements
INodeMappingDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
NodeMappingH2DAO
.
class
);
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/NodeReferenceH2DAO.java
浏览文件 @
d4333ff7
...
...
@@ -5,7 +5,6 @@ import com.google.gson.JsonObject;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.StringUtils
;
import
org.skywalking.apm.collector.storage.define.node.NodeMappingTable
;
import
org.skywalking.apm.collector.storage.define.noderef.NodeReferenceTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.skywalking.apm.collector.ui.cache.ApplicationCache
;
...
...
@@ -17,7 +16,7 @@ import java.sql.SQLException;
import
java.text.MessageFormat
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
NodeReferenceH2DAO
extends
H2DAO
implements
INodeReferenceDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
NodeReferenceH2DAO
.
class
);
...
...
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/SegmentCostH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
com.google.gson.JsonObject
;
import
java.util.List
;
import
org.elasticsearch.search.sort.SortOrder
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.CollectionUtils
;
import
org.skywalking.apm.collector.core.util.StringUtils
;
import
org.skywalking.apm.collector.storage.dao.DAOContainer
;
import
org.skywalking.apm.collector.storage.define.global.GlobalTraceTable
;
import
org.skywalking.apm.collector.storage.define.segment.SegmentCostTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
SegmentCostH2DAO
extends
H2DAO
implements
ISegmentCostDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
SegmentCostH2DAO
.
class
);
private
static
final
String
GET_SEGMENT_COST_SQL
=
"select * from {0} where {1} >= ? and {1} <= ?"
;
@Override
public
JsonObject
loadTop
(
long
startTime
,
long
endTime
,
long
minCost
,
long
maxCost
,
String
operationName
,
Error
error
,
int
applicationId
,
List
<
String
>
segmentIds
,
int
limit
,
int
from
,
Sort
sort
)
{
return
null
;
H2Client
client
=
getClient
();
String
sql
=
GET_SEGMENT_COST_SQL
;
List
<
Object
>
params
=
new
ArrayList
<>();
List
<
String
>
columns
=
new
ArrayList
<>();
params
.
add
(
startTime
);
params
.
add
(
endTime
);
int
paramIndex
=
1
;
if
(
minCost
!=
-
1
||
maxCost
!=
-
1
)
{
if
(
minCost
!=
-
1
)
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} >= ?"
;
params
.
add
(
minCost
);
columns
.
add
(
SegmentCostTable
.
COLUMN_COST
);
}
if
(
maxCost
!=
-
1
)
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} <= ?"
;
params
.
add
(
maxCost
);
columns
.
add
(
SegmentCostTable
.
COLUMN_COST
);
}
}
if
(
StringUtils
.
isNotEmpty
(
operationName
))
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
operationName
);
columns
.
add
(
SegmentCostTable
.
COLUMN_SERVICE_NAME
);
}
if
(
CollectionUtils
.
isNotEmpty
(
segmentIds
))
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} in ("
;
columns
.
add
(
SegmentCostTable
.
COLUMN_SEGMENT_ID
);
StringBuilder
builder
=
new
StringBuilder
();
for
(
int
i
=
0
;
i
<
segmentIds
.
size
();
i
++)
{
builder
.
append
(
"?,"
);
}
builder
.
delete
(
builder
.
length
()
-
1
,
builder
.
length
());
builder
.
append
(
")"
);
sql
=
sql
+
builder
;
for
(
String
segmentId
:
segmentIds
)
{
params
.
add
(
segmentId
);
}
}
if
(
Error
.
True
.
equals
(
error
))
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
true
);
columns
.
add
(
SegmentCostTable
.
COLUMN_IS_ERROR
);
}
else
if
(
Error
.
False
.
equals
(
error
))
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
false
);
columns
.
add
(
SegmentCostTable
.
COLUMN_IS_ERROR
);
}
if
(
applicationId
!=
0
)
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
applicationId
);
columns
.
add
(
SegmentCostTable
.
COLUMN_APPLICATION_ID
);
}
if
(
Sort
.
Cost
.
equals
(
sort
))
{
sql
=
sql
+
" order by "
+
SegmentCostTable
.
COLUMN_COST
+
" "
+
SortOrder
.
DESC
;
}
else
if
(
Sort
.
Time
.
equals
(
sort
))
{
sql
=
sql
+
" order by "
+
SegmentCostTable
.
COLUMN_START_TIME
+
" "
+
SortOrder
.
DESC
;
}
sql
=
sql
+
" limit "
+
from
+
","
+
limit
;
sql
=
MessageFormat
.
format
(
sql
,
SegmentCostTable
.
TABLE
,
SegmentCostTable
.
COLUMN_TIME_BUCKET
,
columns
);
Object
[]
p
=
params
.
toArray
(
new
Object
[
0
]);
JsonObject
topSegPaging
=
new
JsonObject
();
JsonArray
topSegArray
=
new
JsonArray
();
topSegPaging
.
add
(
"data"
,
topSegArray
);
int
cnt
=
0
;
int
num
=
from
;
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
p
))
{
JsonObject
topSegmentJson
=
new
JsonObject
();
topSegmentJson
.
addProperty
(
"num"
,
num
);
String
segmentId
=
rs
.
getString
(
SegmentCostTable
.
COLUMN_SEGMENT_ID
);
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_SEGMENT_ID
,
segmentId
);
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_START_TIME
,
rs
.
getLong
(
SegmentCostTable
.
COLUMN_START_TIME
));
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_END_TIME
,
rs
.
getLong
(
SegmentCostTable
.
COLUMN_END_TIME
));
IGlobalTraceDAO
globalTraceDAO
=
(
IGlobalTraceDAO
)
DAOContainer
.
INSTANCE
.
get
(
IGlobalTraceDAO
.
class
.
getName
());
List
<
String
>
globalTraces
=
globalTraceDAO
.
getGlobalTraceId
(
segmentId
);
if
(
CollectionUtils
.
isNotEmpty
(
globalTraces
))
{
topSegmentJson
.
addProperty
(
GlobalTraceTable
.
COLUMN_GLOBAL_TRACE_ID
,
globalTraces
.
get
(
0
));
}
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_APPLICATION_ID
,
rs
.
getInt
(
SegmentCostTable
.
COLUMN_APPLICATION_ID
));
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_SERVICE_NAME
,
rs
.
getString
(
SegmentCostTable
.
COLUMN_SERVICE_NAME
));
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_COST
,
rs
.
getLong
(
SegmentCostTable
.
COLUMN_COST
));
topSegmentJson
.
addProperty
(
SegmentCostTable
.
COLUMN_IS_ERROR
,
rs
.
getBoolean
(
SegmentCostTable
.
COLUMN_IS_ERROR
));
num
++;
topSegArray
.
add
(
topSegmentJson
);
cnt
++;
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
topSegPaging
.
addProperty
(
"recordsTotal"
,
cnt
);
return
topSegPaging
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/SegmentH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.protobuf.InvalidProtocolBufferException
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.StringUtils
;
import
org.skywalking.apm.collector.storage.define.segment.SegmentTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.skywalking.apm.network.proto.TraceSegmentObject
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.Base64
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
SegmentH2DAO
extends
H2DAO
implements
ISegmentDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
SegmentH2DAO
.
class
);
private
static
final
String
GET_SEGMENT_SQL
=
"select {0} from {1} where {2} = ?"
;
@Override
public
TraceSegmentObject
load
(
String
segmentId
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_SEGMENT_SQL
,
SegmentTable
.
COLUMN_DATA_BINARY
,
SegmentTable
.
TABLE
,
"id"
);
Object
[]
params
=
new
Object
[]{
segmentId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
String
dataBinaryBase64
=
rs
.
getString
(
SegmentTable
.
COLUMN_DATA_BINARY
);
if
(
StringUtils
.
isNotEmpty
(
dataBinaryBase64
))
{
byte
[]
dataBinary
=
Base64
.
getDecoder
().
decode
(
dataBinaryBase64
);
try
{
return
TraceSegmentObject
.
parseFrom
(
dataBinary
);
}
catch
(
InvalidProtocolBufferException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
}
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
null
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ServiceEntryH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
com.google.gson.JsonArray
;
import
com.google.gson.JsonObject
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.ColumnNameUtils
;
import
org.skywalking.apm.collector.core.util.StringUtils
;
import
org.skywalking.apm.collector.storage.define.service.ServiceEntryTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.skywalking.apm.collector.ui.cache.ApplicationCache
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
import
java.util.ArrayList
;
import
java.util.List
;
/**
* @author pengys5
*/
public
class
ServiceEntryH2DAO
extends
H2DAO
implements
IServiceEntryDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
SegmentH2DAO
.
class
);
private
static
final
String
GET_SERVICE_ENTRY_SQL
=
"select * from {0} where {1} >= ? and {2} <= ?"
;
@Override
public
JsonObject
load
(
int
applicationId
,
String
entryServiceName
,
long
startTime
,
long
endTime
,
int
from
,
int
size
)
{
return
null
;
H2Client
client
=
getClient
();
String
sql
=
GET_SERVICE_ENTRY_SQL
;
List
<
Object
>
params
=
new
ArrayList
<>();
List
<
String
>
columns
=
new
ArrayList
<>();
params
.
add
(
startTime
);
params
.
add
(
endTime
);
int
paramIndex
=
2
;
if
(
applicationId
!=
0
)
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
applicationId
);
columns
.
add
(
ServiceEntryTable
.
COLUMN_APPLICATION_ID
);
}
if
(
StringUtils
.
isNotEmpty
(
entryServiceName
))
{
paramIndex
++;
sql
=
sql
+
" and {"
+
paramIndex
+
"} = ?"
;
params
.
add
(
entryServiceName
);
columns
.
add
(
ServiceEntryTable
.
COLUMN_ENTRY_SERVICE_NAME
);
}
sql
=
sql
+
" limit "
+
from
+
","
+
size
;
sql
=
MessageFormat
.
format
(
sql
,
ServiceEntryTable
.
TABLE
,
ServiceEntryTable
.
COLUMN_NEWEST_TIME
,
ServiceEntryTable
.
COLUMN_REGISTER_TIME
,
columns
);
Object
[]
p
=
params
.
toArray
(
new
Object
[
0
]);
JsonArray
serviceArray
=
new
JsonArray
();
JsonObject
response
=
new
JsonObject
();
int
index
=
0
;
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
p
))
{
while
(
rs
.
next
())
{
int
appId
=
rs
.
getInt
(
ServiceEntryTable
.
COLUMN_APPLICATION_ID
);
int
entryServiceId
=
rs
.
getInt
(
ServiceEntryTable
.
COLUMN_ENTRY_SERVICE_ID
);
String
applicationCode
=
ApplicationCache
.
getForUI
(
applicationId
);
String
entryServiceName1
=
rs
.
getString
(
ServiceEntryTable
.
COLUMN_ENTRY_SERVICE_NAME
);
JsonObject
row
=
new
JsonObject
();
row
.
addProperty
(
ColumnNameUtils
.
INSTANCE
.
rename
(
ServiceEntryTable
.
COLUMN_ENTRY_SERVICE_ID
),
entryServiceId
);
row
.
addProperty
(
ColumnNameUtils
.
INSTANCE
.
rename
(
ServiceEntryTable
.
COLUMN_ENTRY_SERVICE_NAME
),
entryServiceName1
);
row
.
addProperty
(
ColumnNameUtils
.
INSTANCE
.
rename
(
ServiceEntryTable
.
COLUMN_APPLICATION_ID
),
appId
);
row
.
addProperty
(
"applicationCode"
,
applicationCode
);
serviceArray
.
add
(
row
);
index
++;
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
response
.
addProperty
(
"total"
,
index
);
response
.
add
(
"array"
,
serviceArray
);
return
response
;
}
}
apm-collector/apm-collector-ui/src/main/java/org/skywalking/apm/collector/ui/dao/ServiceNameH2DAO.java
浏览文件 @
d4333ff7
package
org.skywalking.apm.collector.ui.dao
;
import
org.skywalking.apm.collector.client.h2.H2Client
;
import
org.skywalking.apm.collector.client.h2.H2ClientException
;
import
org.skywalking.apm.collector.core.util.Const
;
import
org.skywalking.apm.collector.storage.define.register.ServiceNameTable
;
import
org.skywalking.apm.collector.storage.h2.dao.H2DAO
;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.sql.ResultSet
;
import
java.sql.SQLException
;
import
java.text.MessageFormat
;
/**
* @author pengys5
* @author pengys5
, clevertension
*/
public
class
ServiceNameH2DAO
extends
H2DAO
implements
IServiceNameDAO
{
private
final
Logger
logger
=
LoggerFactory
.
getLogger
(
ServiceNameH2DAO
.
class
);
private
static
final
String
GET_SERVICE_NAME_SQL
=
"select {0} from {1} where {2} = ?"
;
private
static
final
String
GET_SERVICE_ID_SQL
=
"select {0} from {1} where {2} = ? and {3} = ? limit 1"
;
@Override
public
String
getServiceName
(
int
serviceId
)
{
return
null
;
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_SERVICE_NAME_SQL
,
ServiceNameTable
.
COLUMN_SERVICE_NAME
,
ServiceNameTable
.
TABLE
,
ServiceNameTable
.
COLUMN_SERVICE_ID
);
Object
[]
params
=
new
Object
[]{
serviceId
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
return
rs
.
getString
(
ServiceNameTable
.
COLUMN_SERVICE_NAME
);
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
Const
.
UNKNOWN
;
}
@Override
public
int
getServiceId
(
int
applicationId
,
String
serviceName
)
{
H2Client
client
=
getClient
();
String
sql
=
MessageFormat
.
format
(
GET_SERVICE_ID_SQL
,
ServiceNameTable
.
COLUMN_SERVICE_ID
,
ServiceNameTable
.
TABLE
,
ServiceNameTable
.
COLUMN_APPLICATION_ID
,
ServiceNameTable
.
COLUMN_SERVICE_NAME
);
Object
[]
params
=
new
Object
[]{
applicationId
,
serviceName
};
try
(
ResultSet
rs
=
client
.
executeQuery
(
sql
,
params
))
{
if
(
rs
.
next
())
{
int
serviceId
=
rs
.
getInt
(
ServiceNameTable
.
COLUMN_SERVICE_ID
);
return
serviceId
;
}
}
catch
(
SQLException
|
H2ClientException
e
)
{
logger
.
error
(
e
.
getMessage
(),
e
);
}
return
0
;
}
}
apm-collector/apm-collector-ui/src/main/resources/META-INF/defines/h2_dao.define
浏览文件 @
d4333ff7
...
...
@@ -8,6 +8,9 @@ org.skywalking.apm.collector.ui.dao.ApplicationH2DAO
org.skywalking.apm.collector.ui.dao.ServiceNameH2DAO
org.skywalking.apm.collector.ui.dao.InstanceH2DAO
org.skywalking.apm.collector.ui.dao.InstPerformanceH2DAO
org.skywalking.apm.collector.ui.dao.CpuMetricH2DAO
org.skywalking.apm.collector.ui.dao.GCMetricH2DAO
org.skywalking.apm.collector.ui.dao.MemoryMetricH2DAO
org.skywalking.apm.collector.ui.dao.MemoryPoolMetricH2DAO
org.skywalking.apm.collector.ui.dao.ServiceEntryH2DAO
org.skywalking.apm.collector.ui.dao.ServiceReferenceH2DAO
\ No newline at end of file
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录