未验证 提交 33616bc5 编写于 作者: wu-sheng's avatar wu-sheng 提交者: GitHub

Merge branch 'master' into ci-from-cache

......@@ -22,6 +22,7 @@ import org.apache.skywalking.apm.collector.analysis.jvm.define.graph.GraphIdDefi
import org.apache.skywalking.apm.collector.analysis.jvm.define.service.IMemoryMetricService;
import org.apache.skywalking.apm.collector.core.graph.Graph;
import org.apache.skywalking.apm.collector.core.graph.GraphManager;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.core.util.ObjectUtils;
import org.apache.skywalking.apm.collector.storage.table.jvm.MemoryMetric;
......@@ -53,7 +54,7 @@ public class MemoryMetricService implements IMemoryMetricService {
memoryMetric.setId(id);
memoryMetric.setMetricId(metricId);
memoryMetric.setInstanceId(instanceId);
memoryMetric.setIsHeap(isHeap);
memoryMetric.setIsHeap(BooleanUtils.booleanToValue(isHeap));
memoryMetric.setInit(init);
memoryMetric.setMax(max);
memoryMetric.setUsed(used);
......
......@@ -25,6 +25,7 @@ import org.apache.skywalking.apm.collector.analysis.worker.model.base.WorkerExce
import org.apache.skywalking.apm.collector.cache.CacheModule;
import org.apache.skywalking.apm.collector.cache.service.ApplicationCacheService;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.StorageModule;
import org.apache.skywalking.apm.collector.storage.dao.register.IApplicationRegisterDAO;
......@@ -65,7 +66,7 @@ public class ApplicationRegisterSerialWorker extends AbstractLocalAsyncWorker<Ap
userApplication.setApplicationCode(Const.USER_CODE);
userApplication.setApplicationId(Const.NONE_APPLICATION_ID);
userApplication.setAddressId(Const.NONE);
userApplication.setIsAddress(false);
userApplication.setIsAddress(BooleanUtils.FALSE);
applicationRegisterDAO.save(userApplication);
newApplication = new Application();
......
......@@ -25,6 +25,7 @@ import org.apache.skywalking.apm.collector.analysis.worker.model.base.WorkerExce
import org.apache.skywalking.apm.collector.cache.CacheModule;
import org.apache.skywalking.apm.collector.cache.service.InstanceCacheService;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.StorageModule;
import org.apache.skywalking.apm.collector.storage.dao.register.IInstanceRegisterDAO;
......@@ -70,7 +71,7 @@ public class InstanceRegisterSerialWorker extends AbstractLocalAsyncWorker<Insta
userInstance.setOsInfo(Const.EMPTY_STRING);
userInstance.setRegisterTime(System.currentTimeMillis());
userInstance.setAddressId(Const.NONE);
userInstance.setIsAddress(false);
userInstance.setIsAddress(BooleanUtils.FALSE);
instanceRegisterDAO.save(userInstance);
newInstance = new Instance();
......
......@@ -26,19 +26,16 @@ import org.apache.skywalking.apm.collector.cache.service.NetworkAddressCacheServ
import org.apache.skywalking.apm.collector.core.graph.Graph;
import org.apache.skywalking.apm.collector.core.graph.GraphManager;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.core.util.ObjectUtils;
import org.apache.skywalking.apm.collector.storage.table.register.Application;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class ApplicationIDService implements IApplicationIDService {
private final Logger logger = LoggerFactory.getLogger(ApplicationIDService.class);
private final ModuleManager moduleManager;
private ApplicationCacheService applicationCacheService;
private NetworkAddressCacheService networkAddressCacheService;
......@@ -78,7 +75,7 @@ public class ApplicationIDService implements IApplicationIDService {
application.setApplicationCode(applicationCode);
application.setApplicationId(0);
application.setAddressId(Const.NONE);
application.setIsAddress(false);
application.setIsAddress(BooleanUtils.FALSE);
getApplicationRegisterGraph().start(application);
}
......@@ -94,7 +91,7 @@ public class ApplicationIDService implements IApplicationIDService {
application.setApplicationCode(networkAddress);
application.setApplicationId(0);
application.setAddressId(addressId);
application.setIsAddress(true);
application.setIsAddress(BooleanUtils.TRUE);
getApplicationRegisterGraph().start(application);
}
......
......@@ -25,6 +25,7 @@ import org.apache.skywalking.apm.collector.cache.service.InstanceCacheService;
import org.apache.skywalking.apm.collector.core.graph.Graph;
import org.apache.skywalking.apm.collector.core.graph.GraphManager;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.core.util.ObjectUtils;
import org.apache.skywalking.apm.collector.storage.StorageModule;
......@@ -83,7 +84,7 @@ public class InstanceIDService implements IInstanceIDService {
instance.setHeartBeatTime(registerTime);
instance.setInstanceId(0);
instance.setOsInfo(osInfo);
instance.setIsAddress(false);
instance.setIsAddress(BooleanUtils.FALSE);
instance.setAddressId(Const.NONE);
getInstanceRegisterGraph().start(instance);
......@@ -104,7 +105,7 @@ public class InstanceIDService implements IInstanceIDService {
instance.setHeartBeatTime(registerTime);
instance.setInstanceId(0);
instance.setOsInfo(Const.EMPTY_STRING);
instance.setIsAddress(true);
instance.setIsAddress(BooleanUtils.TRUE);
instance.setAddressId(addressId);
getInstanceRegisterGraph().start(instance);
......
......@@ -34,14 +34,18 @@ ui:
host: localhost
port: 12800
context_path: /
#storage:
# elasticsearch:
# cluster_name: CollectorDBCluster
# cluster_transport_sniffer: true
# cluster_nodes: localhost:9300
# index_shards_number: 2
# index_replicas_number: 0
# ttl: 7
storage:
elasticsearch:
cluster_name: CollectorDBCluster
cluster_transport_sniffer: true
cluster_nodes: localhost:9300
index_shards_number: 2
index_replicas_number: 0
ttl: 7
h2:
url: jdbc:h2:tcp://localhost/~/test
user_name: sa
configuration:
default:
application_apdex_threshold: 2000
......
......@@ -26,28 +26,24 @@ public abstract class AbstractData {
private Long[] dataLongs;
private Double[] dataDoubles;
private Integer[] dataIntegers;
private Boolean[] dataBooleans;
private byte[][] dataBytes;
private final Column[] stringColumns;
private final Column[] longColumns;
private final Column[] doubleColumns;
private final Column[] integerColumns;
private final Column[] booleanColumns;
private final Column[] byteColumns;
public AbstractData(Column[] stringColumns, Column[] longColumns, Column[] doubleColumns,
Column[] integerColumns, Column[] booleanColumns, Column[] byteColumns) {
Column[] integerColumns, Column[] byteColumns) {
this.dataStrings = new String[stringColumns.length];
this.dataLongs = new Long[longColumns.length];
this.dataDoubles = new Double[doubleColumns.length];
this.dataIntegers = new Integer[integerColumns.length];
this.dataBooleans = new Boolean[booleanColumns.length];
this.dataBytes = new byte[byteColumns.length][];
this.stringColumns = stringColumns;
this.longColumns = longColumns;
this.doubleColumns = doubleColumns;
this.integerColumns = integerColumns;
this.booleanColumns = booleanColumns;
this.byteColumns = byteColumns;
}
......@@ -67,10 +63,6 @@ public abstract class AbstractData {
return dataIntegers.length;
}
public final int getDataBooleansCount() {
return dataBooleans.length;
}
public final int getDataBytesCount() {
return dataBytes.length;
}
......@@ -91,10 +83,6 @@ public abstract class AbstractData {
dataIntegers[position] = value;
}
public final void setDataBoolean(int position, Boolean value) {
dataBooleans[position] = value;
}
public final void setDataBytes(int position, byte[] dataBytes) {
this.dataBytes[position] = dataBytes;
}
......@@ -133,10 +121,6 @@ public abstract class AbstractData {
}
}
public final Boolean getDataBoolean(int position) {
return dataBooleans[position];
}
public final byte[] getDataBytes(int position) {
return dataBytes[position];
}
......@@ -158,10 +142,6 @@ public abstract class AbstractData {
Integer integerData = integerColumns[i].getOperation().operate(newData.getDataInteger(i), this.getDataInteger(i));
this.dataIntegers[i] = integerData;
}
for (int i = 0; i < booleanColumns.length; i++) {
Boolean booleanData = booleanColumns[i].getOperation().operate(newData.getDataBoolean(i), this.getDataBoolean(i));
this.dataBooleans[i] = booleanData;
}
for (int i = 0; i < byteColumns.length; i++) {
byte[] byteData = byteColumns[i].getOperation().operate(newData.getDataBytes(i), this.getDataBytes(i));
this.dataBytes[i] = byteData;
......@@ -186,10 +166,6 @@ public abstract class AbstractData {
for (Integer dataInteger : dataIntegers) {
dataStr.append(dataInteger).append(",");
}
dataStr.append("], boolean: [");
for (Boolean dataBoolean : dataBooleans) {
dataStr.append(dataBoolean).append(",");
}
dataStr.append("]");
return dataStr.toString();
}
......
......@@ -30,8 +30,6 @@ public interface Data {
int getDataIntegersCount();
int getDataBooleansCount();
int getDataBytesCount();
void setDataString(int position, String value);
......@@ -42,8 +40,6 @@ public interface Data {
void setDataInteger(int position, Integer value);
void setDataBoolean(int position, Boolean value);
void setDataBytes(int position, byte[] dataBytes);
String getDataString(int position);
......@@ -54,7 +50,5 @@ public interface Data {
Integer getDataInteger(int position);
Boolean getDataBoolean(int position);
byte[] getDataBytes(int position);
}
......@@ -36,8 +36,8 @@ public abstract class StreamData extends AbstractData implements RemoteData, Que
}
public StreamData(Column[] stringColumns, Column[] longColumns, Column[] doubleColumns,
Column[] integerColumns, Column[] booleanColumns, Column[] byteColumns) {
super(stringColumns, longColumns, doubleColumns, integerColumns, booleanColumns, byteColumns);
Column[] integerColumns, Column[] byteColumns) {
super(stringColumns, longColumns, doubleColumns, integerColumns, byteColumns);
}
@Override public final String selectKey() {
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.core.util;
import org.apache.skywalking.apm.collector.core.UnexpectedException;
/**
* @author peng-yongsheng
*/
public class BooleanUtils {
public static final Integer TRUE = 1;
public static final Integer FALSE = 0;
public static boolean valueToBoolean(Integer value) {
if (TRUE.equals(value)) {
return true;
} else if (FALSE.equals(value)) {
return false;
} else {
throw new UnexpectedException("Boolean value error, must be 0 or 1");
}
}
public static Integer booleanToValue(Boolean booleanValue) {
if (booleanValue) {
return TRUE;
} else {
return FALSE;
}
}
}
......@@ -37,9 +37,6 @@ public class GRPCRemoteDeserializeService implements RemoteDeserializeService<Re
for (int i = 0; i < remoteData.getDataLongsCount(); i++) {
data.setDataLong(i, remoteData.getDataLongs(i));
}
for (int i = 0; i < remoteData.getDataBooleansCount(); i++) {
data.setDataBoolean(i, remoteData.getDataBooleans(i));
}
for (int i = 0; i < remoteData.getDataDoublesCount(); i++) {
data.setDataDouble(i, remoteData.getDataDoubles(i));
}
......
......@@ -37,15 +37,9 @@ public class GRPCRemoteSerializeService implements RemoteSerializeService<Remote
for (int i = 0; i < data.getDataLongsCount(); i++) {
builder.addDataLongs(data.getDataLong(i));
}
for (int i = 0; i < data.getDataBooleansCount(); i++) {
builder.addDataBooleans(data.getDataBoolean(i));
}
for (int i = 0; i < data.getDataDoublesCount(); i++) {
builder.addDataDoubles(data.getDataDouble(i));
}
for (int i = 0; i < data.getDataBytesCount(); i++) {
// builder.addDataBytes(ByteString.copyFrom(data.getDataBytes(i)));
}
return builder;
}
}
......@@ -195,6 +195,9 @@ public class StorageModule extends Module {
classes.add(IInstanceMappingMonthPersistenceDAO.class);
classes.add(IGlobalTracePersistenceDAO.class);
classes.add(ISegmentCostPersistenceDAO.class);
classes.add(ISegmentPersistenceDAO.class);
classes.add(IInstanceHeartBeatPersistenceDAO.class);
classes.add(IApplicationMinuteMetricPersistenceDAO.class);
classes.add(IApplicationHourMetricPersistenceDAO.class);
......@@ -206,9 +209,6 @@ public class StorageModule extends Module {
classes.add(IApplicationReferenceDayMetricPersistenceDAO.class);
classes.add(IApplicationReferenceMonthMetricPersistenceDAO.class);
classes.add(ISegmentCostPersistenceDAO.class);
classes.add(ISegmentPersistenceDAO.class);
classes.add(IServiceMinuteMetricPersistenceDAO.class);
classes.add(IServiceHourMetricPersistenceDAO.class);
classes.add(IServiceDayMetricPersistenceDAO.class);
......@@ -228,8 +228,6 @@ public class StorageModule extends Module {
classes.add(IInstanceReferenceHourMetricPersistenceDAO.class);
classes.add(IInstanceReferenceDayMetricPersistenceDAO.class);
classes.add(IInstanceReferenceMonthMetricPersistenceDAO.class);
classes.add(IInstanceHeartBeatPersistenceDAO.class);
}
private void addUiDAO(List<Class> classes) {
......
......@@ -16,13 +16,15 @@
*
*/
package org.apache.skywalking.apm.collector.storage.base.sql;
import java.text.MessageFormat;
import java.util.List;
import java.util.Set;
/**
* @author peng-yongsheng, clevertension
*/
public class SqlBuilder {
public static String buildSql(String sql, Object... args) {
return MessageFormat.format(sql, args);
......
......@@ -45,12 +45,10 @@ public class ApplicationAlarm extends StreamData implements Alarm {
new Column(ApplicationAlarmTable.COLUMN_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -45,12 +45,10 @@ public class ApplicationAlarmList extends StreamData {
new Column(ApplicationAlarmListTable.COLUMN_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -46,12 +46,10 @@ public class ApplicationReferenceAlarm extends StreamData implements Alarm {
new Column(ApplicationReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationReferenceAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -46,12 +46,10 @@ public class ApplicationReferenceAlarmList extends StreamData {
new Column(ApplicationReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationReferenceAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -46,12 +46,10 @@ public class InstanceAlarm extends StreamData implements Alarm {
new Column(InstanceAlarmTable.COLUMN_INSTANCE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -46,12 +46,10 @@ public class InstanceAlarmList extends StreamData {
new Column(InstanceAlarmListTable.COLUMN_INSTANCE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -48,12 +48,10 @@ public class InstanceReferenceAlarm extends StreamData implements Alarm {
new Column(InstanceReferenceAlarmTable.COLUMN_BEHIND_INSTANCE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceReferenceAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -48,12 +48,10 @@ public class InstanceReferenceAlarmList extends StreamData {
new Column(InstanceReferenceAlarmListTable.COLUMN_BEHIND_INSTANCE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceReferenceAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -47,12 +47,10 @@ public class ServiceAlarm extends StreamData implements Alarm {
new Column(ServiceAlarmTable.COLUMN_SERVICE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -47,12 +47,10 @@ public class ServiceAlarmList extends StreamData {
new Column(ServiceAlarmListTable.COLUMN_SERVICE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -50,12 +50,10 @@ public class ServiceReferenceAlarm extends StreamData implements Alarm {
new Column(ServiceReferenceAlarmTable.COLUMN_BEHIND_SERVICE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceReferenceAlarm() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -50,12 +50,10 @@ public class ServiceReferenceAlarmList extends StreamData {
new Column(ServiceReferenceAlarmListTable.COLUMN_BEHIND_SERVICE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceReferenceAlarmList() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -44,12 +44,10 @@ public class ApplicationComponent extends StreamData {
new Column(ApplicationComponentTable.COLUMN_PEER_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationComponent() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -44,12 +44,10 @@ public class ApplicationMapping extends StreamData {
new Column(ApplicationMappingTable.COLUMN_ADDRESS_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationMapping() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -61,12 +61,10 @@ public class ApplicationMetric extends StreamData implements Metric {
new Column(ApplicationMetricTable.COLUMN_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -62,12 +62,10 @@ public class ApplicationReferenceMetric extends StreamData implements Metric {
new Column(ApplicationReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ApplicationReferenceMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -37,15 +37,16 @@ public class GlobalTrace extends StreamData {
private static final Column[] LONG_COLUMNS = {
new Column(GlobalTraceTable.COLUMN_TIME_BUCKET, new CoverOperation()),
};
private static final Column[] DOUBLE_COLUMNS = {};
private static final Column[] INTEGER_COLUMNS = {
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public GlobalTrace() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -45,12 +45,10 @@ public class InstanceMapping extends StreamData {
new Column(InstanceMappingTable.COLUMN_ADDRESS_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceMapping() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -60,12 +60,10 @@ public class InstanceMetric extends StreamData implements Metric {
new Column(InstanceMetricTable.COLUMN_INSTANCE_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -61,12 +61,10 @@ public class InstanceReferenceMetric extends StreamData implements Metric {
new Column(InstanceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public InstanceReferenceMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -47,11 +47,10 @@ public class CpuMetric extends StreamData {
new Column(CpuMetricTable.COLUMN_INSTANCE_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public CpuMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -48,11 +48,10 @@ public class GCMetric extends StreamData {
new Column(GCMetricTable.COLUMN_PHRASE, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public GCMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -50,15 +50,13 @@ public class MemoryMetric extends StreamData {
private static final Column[] INTEGER_COLUMNS = {
new Column(MemoryMetricTable.COLUMN_INSTANCE_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {
new Column(MemoryMetricTable.COLUMN_IS_HEAP, new CoverOperation()),
};
private static final Column[] BYTE_COLUMNS = {};
public MemoryMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......@@ -125,14 +123,6 @@ public class MemoryMetric extends StreamData {
setDataLong(5, timeBucket);
}
public Boolean getIsHeap() {
return getDataBoolean(0);
}
public void setIsHeap(Boolean isHeap) {
setDataBoolean(0, isHeap);
}
public Integer getInstanceId() {
return getDataInteger(0);
}
......@@ -140,4 +130,12 @@ public class MemoryMetric extends StreamData {
public void setInstanceId(Integer instanceId) {
setDataInteger(0, instanceId);
}
public Integer getIsHeap() {
return getDataInteger(1);
}
public void setIsHeap(Integer isHeap) {
setDataInteger(1, isHeap);
}
}
......@@ -53,12 +53,10 @@ public class MemoryPoolMetric extends StreamData {
new Column(MemoryPoolMetricTable.COLUMN_POOL_TYPE, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public MemoryPoolMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -40,16 +40,13 @@ public class Application extends StreamData {
private static final Column[] INTEGER_COLUMNS = {
new Column(ApplicationTable.COLUMN_APPLICATION_ID, new CoverOperation()),
new Column(ApplicationTable.COLUMN_ADDRESS_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {
new Column(ApplicationTable.COLUMN_IS_ADDRESS, new CoverOperation()),
};
private static final Column[] BYTE_COLUMNS = {};
public Application() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......@@ -92,11 +89,11 @@ public class Application extends StreamData {
setDataInteger(1, addressId);
}
public boolean getIsAddress() {
return getDataBoolean(0);
public int getIsAddress() {
return getDataInteger(2);
}
public void setIsAddress(boolean isAddress) {
setDataBoolean(0, isAddress);
public void setIsAddress(int isAddress) {
setDataInteger(2, isAddress);
}
}
......@@ -45,16 +45,13 @@ public class Instance extends StreamData {
new Column(InstanceTable.COLUMN_APPLICATION_ID, new CoverOperation()),
new Column(InstanceTable.COLUMN_INSTANCE_ID, new CoverOperation()),
new Column(InstanceTable.COLUMN_ADDRESS_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {
new Column(InstanceTable.COLUMN_IS_ADDRESS, new CoverOperation()),
};
private static final Column[] BYTE_COLUMNS = {};
public Instance() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......@@ -129,11 +126,11 @@ public class Instance extends StreamData {
setDataInteger(2, addressId);
}
public boolean getIsAddress() {
return getDataBoolean(0);
public int getIsAddress() {
return getDataInteger(3);
}
public void setIsAddress(boolean isAddress) {
setDataBoolean(0, isAddress);
public void setIsAddress(int isAddress) {
setDataInteger(3, isAddress);
}
}
......@@ -41,12 +41,10 @@ public class NetworkAddress extends StreamData {
new Column(NetworkAddressTable.COLUMN_ADDRESS_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public NetworkAddress() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -34,17 +34,18 @@ public class ServiceName extends StreamData {
};
private static final Column[] LONG_COLUMNS = {};
private static final Column[] DOUBLE_COLUMNS = {};
private static final Column[] INTEGER_COLUMNS = {
new Column(ServiceNameTable.COLUMN_APPLICATION_ID, new CoverOperation()),
new Column(ServiceNameTable.COLUMN_SERVICE_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceName() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -35,17 +35,18 @@ public class Segment extends StreamData {
private static final Column[] LONG_COLUMNS = {
new Column(SegmentTable.COLUMN_TIME_BUCKET, new NonOperation()),
};
private static final Column[] DOUBLE_COLUMNS = {};
private static final Column[] INTEGER_COLUMNS = {
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {
new Column(SegmentTable.COLUMN_DATA_BINARY, new CoverOperation()),
};
public Segment() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -22,6 +22,7 @@ import org.apache.skywalking.apm.collector.core.data.Column;
import org.apache.skywalking.apm.collector.core.data.StreamData;
import org.apache.skywalking.apm.collector.core.data.operator.CoverOperation;
import org.apache.skywalking.apm.collector.core.data.operator.NonOperation;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
/**
* @author peng-yongsheng
......@@ -40,18 +41,18 @@ public class SegmentCost extends StreamData {
new Column(SegmentCostTable.COLUMN_END_TIME, new CoverOperation()),
new Column(SegmentCostTable.COLUMN_TIME_BUCKET, new CoverOperation()),
};
private static final Column[] DOUBLE_COLUMNS = {};
private static final Column[] INTEGER_COLUMNS = {
new Column(SegmentCostTable.COLUMN_APPLICATION_ID, new CoverOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {
new Column(SegmentCostTable.COLUMN_IS_ERROR, new CoverOperation()),
};
private static final Column[] BYTE_COLUMNS = {};
public SegmentCost() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......@@ -127,10 +128,10 @@ public class SegmentCost extends StreamData {
}
public Boolean getIsError() {
return getDataBoolean(0);
return BooleanUtils.valueToBoolean(getDataInteger(1));
}
public void setIsError(Boolean isError) {
setDataBoolean(0, isError);
setDataInteger(0, BooleanUtils.booleanToValue(isError));
}
}
......@@ -60,12 +60,10 @@ public class ServiceMetric extends StreamData implements Metric {
new Column(ServiceMetricTable.COLUMN_SERVICE_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -66,12 +66,10 @@ public class ServiceReferenceMetric extends StreamData implements Metric {
new Column(ServiceReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, new NonOperation()),
};
private static final Column[] BOOLEAN_COLUMNS = {};
private static final Column[] BYTE_COLUMNS = {};
public ServiceReferenceMetric() {
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BOOLEAN_COLUMNS, BYTE_COLUMNS);
super(STRING_COLUMNS, LONG_COLUMNS, DOUBLE_COLUMNS, INTEGER_COLUMNS, BYTE_COLUMNS);
}
@Override public String getId() {
......
......@@ -207,10 +207,10 @@ import org.apache.skywalking.apm.collector.storage.es.dao.mpoolmp.MemoryPoolHour
import org.apache.skywalking.apm.collector.storage.es.dao.mpoolmp.MemoryPoolMinuteMetricEsPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.mpoolmp.MemoryPoolMonthMetricEsPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.mpoolmp.MemoryPoolSecondMetricEsPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.ApplicationEsRegisterDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.InstanceEsRegisterDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.ApplicationRegisterEsDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.InstanceRegisterEsDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.NetworkAddressRegisterEsDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.ServiceNameEsRegisterDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.register.ServiceNameRegisterEsDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.smp.ServiceDayMetricEsPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.smp.ServiceHourMetricEsPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.es.dao.smp.ServiceMinuteMetricEsPersistenceDAO;
......@@ -303,9 +303,9 @@ public class StorageModuleEsProvider extends ModuleProvider {
private void registerRegisterDAO() throws ServiceNotProvidedException {
this.registerServiceImplementation(INetworkAddressRegisterDAO.class, new NetworkAddressRegisterEsDAO(elasticSearchClient));
this.registerServiceImplementation(IApplicationRegisterDAO.class, new ApplicationEsRegisterDAO(elasticSearchClient));
this.registerServiceImplementation(IInstanceRegisterDAO.class, new InstanceEsRegisterDAO(elasticSearchClient));
this.registerServiceImplementation(IServiceNameRegisterDAO.class, new ServiceNameEsRegisterDAO(elasticSearchClient));
this.registerServiceImplementation(IApplicationRegisterDAO.class, new ApplicationRegisterEsDAO(elasticSearchClient));
this.registerServiceImplementation(IInstanceRegisterDAO.class, new InstanceRegisterEsDAO(elasticSearchClient));
this.registerServiceImplementation(IServiceNameRegisterDAO.class, new ServiceNameRegisterEsDAO(elasticSearchClient));
}
private void registerPersistenceDAO() throws ServiceNotProvidedException {
......
......@@ -16,7 +16,6 @@
*
*/
package org.apache.skywalking.apm.collector.storage.es.base.define;
import org.apache.skywalking.apm.collector.core.data.ColumnDefine;
......@@ -30,6 +29,6 @@ public class ElasticSearchColumnDefine extends ColumnDefine {
}
public enum Type {
Binary, Boolean, Keyword, Long, Integer, Double, Text
Binary, Keyword, Long, Integer, Double, Text
}
}
......@@ -19,6 +19,7 @@
package org.apache.skywalking.apm.collector.storage.es.dao.cache;
import org.apache.skywalking.apm.collector.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.cache.IApplicationCacheDAO;
import org.apache.skywalking.apm.collector.storage.es.base.dao.EsDAO;
......@@ -54,7 +55,7 @@ public class ApplicationEsCacheDAO extends EsDAO implements IApplicationCacheDAO
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_APPLICATION_CODE, applicationCode));
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_IS_ADDRESS, false));
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_IS_ADDRESS, BooleanUtils.FALSE));
searchRequestBuilder.setQuery(boolQueryBuilder);
searchRequestBuilder.setSize(1);
......@@ -88,7 +89,7 @@ public class ApplicationEsCacheDAO extends EsDAO implements IApplicationCacheDAO
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_ADDRESS_ID, addressId));
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_IS_ADDRESS, true));
boolQueryBuilder.must().add(QueryBuilders.termQuery(ApplicationTable.COLUMN_IS_ADDRESS, BooleanUtils.TRUE));
searchRequestBuilder.setQuery(boolQueryBuilder);
searchRequestBuilder.setSize(1);
......
......@@ -19,6 +19,7 @@
package org.apache.skywalking.apm.collector.storage.es.dao.cache;
import org.apache.skywalking.apm.collector.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.apm.collector.core.util.BooleanUtils;
import org.apache.skywalking.apm.collector.storage.dao.cache.IInstanceCacheDAO;
import org.apache.skywalking.apm.collector.storage.es.base.dao.EsDAO;
import org.apache.skywalking.apm.collector.storage.table.register.InstanceTable;
......@@ -29,16 +30,12 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class InstanceEsCacheDAO extends EsDAO implements IInstanceCacheDAO {
private final Logger logger = LoggerFactory.getLogger(InstanceEsCacheDAO.class);
public InstanceEsCacheDAO(ElasticSearchClient client) {
super(client);
}
......@@ -61,7 +58,7 @@ public class InstanceEsCacheDAO extends EsDAO implements IInstanceCacheDAO {
BoolQueryBuilder builder = QueryBuilders.boolQuery();
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_APPLICATION_ID, applicationId));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_AGENT_UUID, agentUUID));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_IS_ADDRESS, false));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_IS_ADDRESS, BooleanUtils.FALSE));
searchRequestBuilder.setQuery(builder);
searchRequestBuilder.setSize(1);
......@@ -82,7 +79,7 @@ public class InstanceEsCacheDAO extends EsDAO implements IInstanceCacheDAO {
BoolQueryBuilder builder = QueryBuilders.boolQuery();
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_APPLICATION_ID, applicationId));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_ADDRESS_ID, addressId));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_IS_ADDRESS, true));
builder.must().add(QueryBuilders.termQuery(InstanceTable.COLUMN_IS_ADDRESS, BooleanUtils.TRUE));
searchRequestBuilder.setQuery(builder);
searchRequestBuilder.setSize(1);
......
......@@ -48,7 +48,7 @@ public class NetworkAddressEsCacheDAO extends EsDAO implements INetworkAddressCa
ElasticSearchClient client = getClient();
SearchRequestBuilder searchRequestBuilder = client.prepareSearch(NetworkAddressTable.TABLE);
searchRequestBuilder.setTypes("type");
searchRequestBuilder.setTypes(NetworkAddressTable.TABLE_TYPE);
searchRequestBuilder.setSearchType(SearchType.QUERY_THEN_FETCH);
searchRequestBuilder.setQuery(QueryBuilders.termQuery(NetworkAddressTable.COLUMN_NETWORK_ADDRESS, networkAddress));
searchRequestBuilder.setSize(1);
......
......@@ -44,7 +44,7 @@ public abstract class AbstractMemoryMetricEsPersistenceDAO extends AbstractPersi
memoryMetric.setMetricId((String)source.get(MemoryMetricTable.COLUMN_METRIC_ID));
memoryMetric.setInstanceId(((Number)source.get(MemoryMetricTable.COLUMN_INSTANCE_ID)).intValue());
memoryMetric.setIsHeap((Boolean)source.get(MemoryMetricTable.COLUMN_IS_HEAP));
memoryMetric.setIsHeap(((Number)source.get(MemoryMetricTable.COLUMN_IS_HEAP)).intValue());
memoryMetric.setInit(((Number)source.get(MemoryMetricTable.COLUMN_INIT)).longValue());
memoryMetric.setMax(((Number)source.get(MemoryMetricTable.COLUMN_MAX)).longValue());
......
......@@ -33,11 +33,11 @@ import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class ApplicationEsRegisterDAO extends EsDAO implements IApplicationRegisterDAO {
public class ApplicationRegisterEsDAO extends EsDAO implements IApplicationRegisterDAO {
private final Logger logger = LoggerFactory.getLogger(ApplicationEsRegisterDAO.class);
private final Logger logger = LoggerFactory.getLogger(ApplicationRegisterEsDAO.class);
public ApplicationEsRegisterDAO(ElasticSearchClient client) {
public ApplicationRegisterEsDAO(ElasticSearchClient client) {
super(client);
}
......
......@@ -34,11 +34,11 @@ import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class InstanceEsRegisterDAO extends EsDAO implements IInstanceRegisterDAO {
public class InstanceRegisterEsDAO extends EsDAO implements IInstanceRegisterDAO {
private final Logger logger = LoggerFactory.getLogger(InstanceEsRegisterDAO.class);
private final Logger logger = LoggerFactory.getLogger(InstanceRegisterEsDAO.class);
public InstanceEsRegisterDAO(ElasticSearchClient client) {
public InstanceRegisterEsDAO(ElasticSearchClient client) {
super(client);
}
......@@ -71,7 +71,7 @@ public class InstanceEsRegisterDAO extends EsDAO implements IInstanceRegisterDAO
ElasticSearchClient client = getClient();
UpdateRequest updateRequest = new UpdateRequest();
updateRequest.index(InstanceTable.TABLE);
updateRequest.type("type");
updateRequest.type(InstanceTable.TABLE_TYPE);
updateRequest.id(String.valueOf(instanceId));
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
......
......@@ -33,11 +33,11 @@ import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class ServiceNameEsRegisterDAO extends EsDAO implements IServiceNameRegisterDAO {
public class ServiceNameRegisterEsDAO extends EsDAO implements IServiceNameRegisterDAO {
private final Logger logger = LoggerFactory.getLogger(ServiceNameEsRegisterDAO.class);
private final Logger logger = LoggerFactory.getLogger(ServiceNameRegisterEsDAO.class);
public ServiceNameEsRegisterDAO(ElasticSearchClient client) {
public ServiceNameRegisterEsDAO(ElasticSearchClient client) {
super(client);
}
......
......@@ -43,7 +43,7 @@ public class SegmentCostEsTableDefine extends ElasticSearchTableDefine {
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_COST, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_START_TIME, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_END_TIME, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_IS_ERROR, ElasticSearchColumnDefine.Type.Boolean.name()));
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_IS_ERROR, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(SegmentCostTable.COLUMN_TIME_BUCKET, ElasticSearchColumnDefine.Type.Long.name()));
}
}
......@@ -34,7 +34,9 @@ public abstract class AbstractInstanceReferenceMetricEsTableDefine extends Elast
@Override public final void initialize() {
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_METRIC_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_FRONT_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_FRONT_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(InstanceReferenceMetricTable.COLUMN_SOURCE_VALUE, ElasticSearchColumnDefine.Type.Integer.name()));
......
......@@ -35,8 +35,7 @@ public abstract class AbstractMemoryMetricEsTableDefine extends ElasticSearchTab
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_METRIC_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_IS_HEAP, ElasticSearchColumnDefine.Type.Boolean.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_IS_HEAP, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_INIT, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_MAX, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(MemoryMetricTable.COLUMN_USED, ElasticSearchColumnDefine.Type.Long.name()));
......
......@@ -39,6 +39,6 @@ public class ApplicationEsTableDefine extends ElasticSearchTableDefine {
addColumn(new ElasticSearchColumnDefine(ApplicationTable.COLUMN_APPLICATION_CODE, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(ApplicationTable.COLUMN_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ApplicationTable.COLUMN_ADDRESS_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ApplicationTable.COLUMN_IS_ADDRESS, ElasticSearchColumnDefine.Type.Boolean.name()));
addColumn(new ElasticSearchColumnDefine(ApplicationTable.COLUMN_IS_ADDRESS, ElasticSearchColumnDefine.Type.Integer.name()));
}
}
......@@ -43,6 +43,6 @@ public class InstanceEsTableDefine extends ElasticSearchTableDefine {
addColumn(new ElasticSearchColumnDefine(InstanceTable.COLUMN_HEARTBEAT_TIME, ElasticSearchColumnDefine.Type.Long.name()));
addColumn(new ElasticSearchColumnDefine(InstanceTable.COLUMN_OS_INFO, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(InstanceTable.COLUMN_ADDRESS_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(InstanceTable.COLUMN_IS_ADDRESS, ElasticSearchColumnDefine.Type.Boolean.name()));
addColumn(new ElasticSearchColumnDefine(InstanceTable.COLUMN_IS_ADDRESS, ElasticSearchColumnDefine.Type.Integer.name()));
}
}
......@@ -34,6 +34,8 @@ public abstract class AbstractServiceMetricEsTableDefine extends ElasticSearchTa
@Override public final void initialize() {
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_METRIC_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_SERVICE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceMetricTable.COLUMN_SOURCE_VALUE, ElasticSearchColumnDefine.Type.Integer.name()));
......
......@@ -34,7 +34,11 @@ public abstract class AbstractServiceReferenceMetricEsTableDefine extends Elasti
@Override public final void initialize() {
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_METRIC_ID, ElasticSearchColumnDefine.Type.Keyword.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_FRONT_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_FRONT_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_FRONT_SERVICE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_BEHIND_SERVICE_ID, ElasticSearchColumnDefine.Type.Integer.name()));
addColumn(new ElasticSearchColumnDefine(ServiceReferenceMetricTable.COLUMN_SOURCE_VALUE, ElasticSearchColumnDefine.Type.Integer.name()));
......
......@@ -16,49 +16,47 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.base.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.core.data.CommonTable;
import org.apache.skywalking.apm.collector.core.data.StreamData;
import org.apache.skywalking.apm.collector.storage.base.dao.IPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.acp.IApplicationComponentMinutePersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
* @author peng-yongsheng
*/
public class ApplicationComponentH2MinutePersistenceDAO extends H2DAO implements IApplicationComponentMinutePersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationComponent> {
private final Logger logger = LoggerFactory.getLogger(ApplicationComponentH2MinutePersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public abstract class AbstractPersistenceH2DAO<STREAM_DATA extends StreamData> extends H2DAO implements IPersistenceDAO<H2SqlEntity, H2SqlEntity, STREAM_DATA> {
private final Logger logger = LoggerFactory.getLogger(AbstractPersistenceH2DAO.class);
public ApplicationComponentH2MinutePersistenceDAO(H2Client client) {
public AbstractPersistenceH2DAO(H2Client client) {
super(client);
}
@Override
public ApplicationComponent get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ApplicationComponentTable.TABLE, ApplicationComponentTable.COLUMN_ID);
private static final String GET_SQL = "select * from {0} where {1} = ?";
protected abstract STREAM_DATA h2DataToStreamData(ResultSet resultSet) throws SQLException;
protected abstract String tableName();
@Override public final STREAM_DATA get(String id) {
String sql = SqlBuilder.buildSql(GET_SQL, tableName(), CommonTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ApplicationComponent applicationComponent = new ApplicationComponent();
applicationComponent.setId(id);
applicationComponent.setComponentId(rs.getInt(ApplicationComponentTable.COLUMN_COMPONENT_ID));
applicationComponent.setPeerId(rs.getInt(ApplicationComponentTable.COLUMN_PEER_ID));
applicationComponent.setTimeBucket(rs.getLong(ApplicationComponentTable.COLUMN_TIME_BUCKET));
return applicationComponent;
try (ResultSet resultSet = getClient().executeQuery(sql, params)) {
if (resultSet.next()) {
return h2DataToStreamData(resultSet);
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
......@@ -66,36 +64,32 @@ public class ApplicationComponentH2MinutePersistenceDAO extends H2DAO implements
return null;
}
@Override
public H2SqlEntity prepareBatchInsert(ApplicationComponent data) {
Map<String, Object> source = new HashMap<>();
protected abstract Map<String, Object> streamDataToH2Data(STREAM_DATA streamData);
@Override public final H2SqlEntity prepareBatchInsert(STREAM_DATA streamData) {
Map<String, Object> source = streamDataToH2Data(streamData);
source.put(CommonTable.COLUMN_ID, streamData.getId());
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationComponentTable.COLUMN_ID, data.getId());
source.put(ApplicationComponentTable.COLUMN_COMPONENT_ID, data.getComponentId());
source.put(ApplicationComponentTable.COLUMN_PEER_ID, data.getPeerId());
source.put(ApplicationComponentTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ApplicationComponentTable.TABLE, source.keySet());
String sql = SqlBuilder.buildBatchInsertSql(tableName(), source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override
public H2SqlEntity prepareBatchUpdate(ApplicationComponent data) {
Map<String, Object> source = new HashMap<>();
@Override public final H2SqlEntity prepareBatchUpdate(STREAM_DATA streamData) {
Map<String, Object> source = streamDataToH2Data(streamData);
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationComponentTable.COLUMN_COMPONENT_ID, data.getComponentId());
source.put(ApplicationComponentTable.COLUMN_PEER_ID, data.getPeerId());
source.put(ApplicationComponentTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ApplicationComponentTable.TABLE, source.keySet(), ApplicationComponentTable.COLUMN_ID);
String sql = SqlBuilder.buildBatchUpdateSql(tableName(), source.keySet(), CommonTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
values.add(streamData.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override public final void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
......@@ -16,7 +16,6 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.base.define;
import org.apache.skywalking.apm.collector.core.data.ColumnDefine;
......@@ -31,6 +30,6 @@ public class H2ColumnDefine extends ColumnDefine {
}
public enum Type {
Boolean, Varchar, Int, Bigint, BINARY, Double
Varchar, Int, Bigint, BINARY, Double
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.ampp.IApplicationMappingMinutePersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMapping;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMappingTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationMappingH2MinutePersistenceDAO extends H2DAO implements IApplicationMappingMinutePersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMapping> {
private final Logger logger = LoggerFactory.getLogger(ApplicationMappingH2MinutePersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public ApplicationMappingH2MinutePersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationMapping get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ApplicationMappingTable.TABLE, ApplicationMappingTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ApplicationMapping applicationMapping = new ApplicationMapping();
applicationMapping.setId(id);
applicationMapping.setApplicationId(rs.getInt(ApplicationMappingTable.COLUMN_APPLICATION_ID));
applicationMapping.setAddressId(rs.getInt(ApplicationMappingTable.COLUMN_ADDRESS_ID));
applicationMapping.setTimeBucket(rs.getLong(ApplicationMappingTable.COLUMN_TIME_BUCKET));
return applicationMapping;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationMapping applicationMapping) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationMappingTable.COLUMN_ID, applicationMapping.getId());
source.put(ApplicationMappingTable.COLUMN_APPLICATION_ID, applicationMapping.getApplicationId());
source.put(ApplicationMappingTable.COLUMN_ADDRESS_ID, applicationMapping.getAddressId());
source.put(ApplicationMappingTable.COLUMN_TIME_BUCKET, applicationMapping.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ApplicationMappingTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(ApplicationMapping applicationMapping) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationMappingTable.COLUMN_APPLICATION_ID, applicationMapping.getApplicationId());
source.put(ApplicationMappingTable.COLUMN_ADDRESS_ID, applicationMapping.getAddressId());
source.put(ApplicationMappingTable.COLUMN_TIME_BUCKET, applicationMapping.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ApplicationMappingTable.TABLE, source.keySet(), ApplicationMappingTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(applicationMapping.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.amp.IApplicationMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationMinuteMetricH2PersistenceDAO extends H2DAO implements IApplicationMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMetric> {
private final Logger logger = LoggerFactory.getLogger(ApplicationMinuteMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public ApplicationMinuteMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ApplicationMetricTable.TABLE, ApplicationMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ApplicationMetric applicationMetric = new ApplicationMetric();
applicationMetric.setId(id);
applicationMetric.setApplicationId(rs.getInt(ApplicationMetricTable.COLUMN_APPLICATION_ID));
applicationMetric.setTransactionCalls(rs.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_CALLS));
applicationMetric.setTransactionErrorCalls(rs.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
applicationMetric.setTransactionDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
applicationMetric.setTransactionErrorDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setBusinessTransactionCalls(rs.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
applicationMetric.setBusinessTransactionErrorCalls(rs.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
applicationMetric.setBusinessTransactionDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
applicationMetric.setBusinessTransactionErrorDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setMqTransactionCalls(rs.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
applicationMetric.setMqTransactionErrorCalls(rs.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
applicationMetric.setMqTransactionDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
applicationMetric.setMqTransactionErrorDurationSum(rs.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setSatisfiedCount(rs.getLong(ApplicationMetricTable.COLUMN_SATISFIED_COUNT));
applicationMetric.setToleratingCount(rs.getLong(ApplicationMetricTable.COLUMN_TOLERATING_COUNT));
applicationMetric.setFrustratedCount(rs.getLong(ApplicationMetricTable.COLUMN_FRUSTRATED_COUNT));
applicationMetric.setTimeBucket(rs.getLong(ApplicationMetricTable.COLUMN_TIME_BUCKET));
return applicationMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationMetricTable.COLUMN_ID, data.getId());
source.put(ApplicationMetricTable.COLUMN_APPLICATION_ID, data.getApplicationId());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_SATISFIED_COUNT, data.getSatisfiedCount());
source.put(ApplicationMetricTable.COLUMN_TOLERATING_COUNT, data.getToleratingCount());
source.put(ApplicationMetricTable.COLUMN_FRUSTRATED_COUNT, data.getFrustratedCount());
source.put(ApplicationMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ApplicationMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(ApplicationMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationMetricTable.COLUMN_APPLICATION_ID, data.getApplicationId());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_SATISFIED_COUNT, data.getSatisfiedCount());
source.put(ApplicationMetricTable.COLUMN_TOLERATING_COUNT, data.getToleratingCount());
source.put(ApplicationMetricTable.COLUMN_FRUSTRATED_COUNT, data.getFrustratedCount());
source.put(ApplicationMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ApplicationMetricTable.TABLE, source.keySet(), ApplicationMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.armp.IApplicationReferenceMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationReferenceMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationReferenceMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationReferenceMinuteMetricH2PersistenceDAO extends H2DAO implements IApplicationReferenceMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationReferenceMetric> {
private final Logger logger = LoggerFactory.getLogger(ApplicationReferenceMinuteMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public ApplicationReferenceMinuteMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationReferenceMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ApplicationReferenceMetricTable.TABLE, ApplicationReferenceMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ApplicationReferenceMetric applicationReferenceMetric = new ApplicationReferenceMetric();
applicationReferenceMetric.setId(id);
applicationReferenceMetric.setFrontApplicationId(rs.getInt(ApplicationReferenceMetricTable.COLUMN_FRONT_APPLICATION_ID));
applicationReferenceMetric.setBehindApplicationId(rs.getInt(ApplicationReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID));
applicationReferenceMetric.setTransactionCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_CALLS));
applicationReferenceMetric.setTransactionErrorCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
applicationReferenceMetric.setTransactionDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
applicationReferenceMetric.setTransactionErrorDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
applicationReferenceMetric.setBusinessTransactionCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
applicationReferenceMetric.setBusinessTransactionErrorCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
applicationReferenceMetric.setBusinessTransactionDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
applicationReferenceMetric.setBusinessTransactionErrorDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
applicationReferenceMetric.setMqTransactionCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
applicationReferenceMetric.setMqTransactionErrorCalls(rs.getLong(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
applicationReferenceMetric.setMqTransactionDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
applicationReferenceMetric.setMqTransactionErrorDurationSum(rs.getLong(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
applicationReferenceMetric.setSatisfiedCount(rs.getLong(ApplicationReferenceMetricTable.COLUMN_SATISFIED_COUNT));
applicationReferenceMetric.setToleratingCount(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TOLERATING_COUNT));
applicationReferenceMetric.setFrustratedCount(rs.getLong(ApplicationReferenceMetricTable.COLUMN_FRUSTRATED_COUNT));
applicationReferenceMetric.setTimeBucket(rs.getLong(ApplicationReferenceMetricTable.COLUMN_TIME_BUCKET));
return applicationReferenceMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationReferenceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationReferenceMetricTable.COLUMN_ID, data.getId());
source.put(ApplicationReferenceMetricTable.COLUMN_FRONT_APPLICATION_ID, data.getFrontApplicationId());
source.put(ApplicationReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, data.getBehindApplicationId());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_SATISFIED_COUNT, data.getSatisfiedCount());
source.put(ApplicationReferenceMetricTable.COLUMN_TOLERATING_COUNT, data.getToleratingCount());
source.put(ApplicationReferenceMetricTable.COLUMN_FRUSTRATED_COUNT, data.getFrustratedCount());
source.put(ApplicationReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ApplicationReferenceMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(ApplicationReferenceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(ApplicationReferenceMetricTable.COLUMN_FRONT_APPLICATION_ID, data.getFrontApplicationId());
source.put(ApplicationReferenceMetricTable.COLUMN_BEHIND_APPLICATION_ID, data.getBehindApplicationId());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ApplicationReferenceMetricTable.COLUMN_SATISFIED_COUNT, data.getSatisfiedCount());
source.put(ApplicationReferenceMetricTable.COLUMN_TOLERATING_COUNT, data.getToleratingCount());
source.put(ApplicationReferenceMetricTable.COLUMN_FRUSTRATED_COUNT, data.getFrustratedCount());
source.put(ApplicationReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ApplicationReferenceMetricTable.TABLE, source.keySet(), ApplicationReferenceMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
......@@ -16,17 +16,16 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.UnexpectedException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.IGlobalTracePersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.global.GlobalTrace;
import org.apache.skywalking.apm.collector.storage.table.global.GlobalTraceTable;
import org.slf4j.Logger;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.impp.IInstanceMappingMinutePersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceMapping;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceMappingTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class InstanceMappingH2MinutePersistenceDAO extends H2DAO implements IInstanceMappingMinutePersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceMapping> {
private final Logger logger = LoggerFactory.getLogger(InstanceMappingH2MinutePersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public InstanceMappingH2MinutePersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceMapping get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, InstanceMappingTable.TABLE, InstanceMappingTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
InstanceMapping instanceMapping = new InstanceMapping();
instanceMapping.setId(id);
instanceMapping.setApplicationId(rs.getInt(InstanceMappingTable.COLUMN_APPLICATION_ID));
instanceMapping.setInstanceId(rs.getInt(InstanceMappingTable.COLUMN_INSTANCE_ID));
instanceMapping.setAddressId(rs.getInt(InstanceMappingTable.COLUMN_ADDRESS_ID));
instanceMapping.setTimeBucket(rs.getLong(InstanceMappingTable.COLUMN_TIME_BUCKET));
return instanceMapping;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceMapping instanceMapping) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceMappingTable.COLUMN_ID, instanceMapping.getId());
source.put(InstanceMappingTable.COLUMN_APPLICATION_ID, instanceMapping.getApplicationId());
source.put(InstanceMappingTable.COLUMN_INSTANCE_ID, instanceMapping.getInstanceId());
source.put(InstanceMappingTable.COLUMN_ADDRESS_ID, instanceMapping.getAddressId());
source.put(InstanceMappingTable.COLUMN_TIME_BUCKET, instanceMapping.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(InstanceMappingTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(InstanceMapping instanceMapping) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceMappingTable.COLUMN_APPLICATION_ID, instanceMapping.getApplicationId());
source.put(InstanceMappingTable.COLUMN_INSTANCE_ID, instanceMapping.getInstanceId());
source.put(InstanceMappingTable.COLUMN_ADDRESS_ID, instanceMapping.getAddressId());
source.put(InstanceMappingTable.COLUMN_TIME_BUCKET, instanceMapping.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(InstanceMappingTable.TABLE, source.keySet(), InstanceMappingTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(instanceMapping.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.imp.IInstanceMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceMetric;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class InstanceMinuteMetricH2PersistenceDAO extends H2DAO implements IInstanceMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceMetric> {
private final Logger logger = LoggerFactory.getLogger(InstanceMinuteMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public InstanceMinuteMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, InstanceMetricTable.TABLE, InstanceMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
InstanceMetric instanceMetric = new InstanceMetric();
instanceMetric.setId(id);
instanceMetric.setApplicationId(rs.getInt(InstanceMetricTable.COLUMN_APPLICATION_ID));
instanceMetric.setInstanceId(rs.getInt(InstanceMetricTable.COLUMN_INSTANCE_ID));
instanceMetric.setTransactionCalls(rs.getLong(InstanceMetricTable.COLUMN_TRANSACTION_CALLS));
instanceMetric.setTransactionErrorCalls(rs.getLong(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
instanceMetric.setTransactionDurationSum(rs.getLong(InstanceMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
instanceMetric.setTransactionErrorDurationSum(rs.getLong(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
instanceMetric.setBusinessTransactionCalls(rs.getLong(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
instanceMetric.setBusinessTransactionErrorCalls(rs.getLong(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
instanceMetric.setBusinessTransactionDurationSum(rs.getLong(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
instanceMetric.setBusinessTransactionErrorDurationSum(rs.getLong(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
instanceMetric.setMqTransactionCalls(rs.getLong(InstanceMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
instanceMetric.setMqTransactionErrorCalls(rs.getLong(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
instanceMetric.setMqTransactionDurationSum(rs.getLong(InstanceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
instanceMetric.setMqTransactionErrorDurationSum(rs.getLong(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
instanceMetric.setTimeBucket(rs.getLong(InstanceMetricTable.COLUMN_TIME_BUCKET));
return instanceMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceMetricTable.COLUMN_ID, data.getId());
source.put(InstanceMetricTable.COLUMN_APPLICATION_ID, data.getApplicationId());
source.put(InstanceMetricTable.COLUMN_INSTANCE_ID, data.getInstanceId());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(InstanceMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(InstanceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceMetricTable.COLUMN_APPLICATION_ID, data.getApplicationId());
source.put(InstanceMetricTable.COLUMN_INSTANCE_ID, data.getInstanceId());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(InstanceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(InstanceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(InstanceMetricTable.TABLE, source.keySet(), InstanceMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceReferenceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class InstanceReferenceAlarmH2PersistenceDAO extends H2DAO implements IInstanceReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceReferenceAlarm> {
private final Logger logger = LoggerFactory.getLogger(InstanceReferenceAlarmH2PersistenceDAO.class);
public InstanceReferenceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceReferenceAlarm get(String id) {
return null;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceReferenceAlarm data) {
return null;
}
@Override public H2SqlEntity prepareBatchUpdate(InstanceReferenceAlarm data) {
return null;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.irmp.IInstanceReferenceMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceReferenceMetric;
import org.apache.skywalking.apm.collector.storage.table.instance.InstanceReferenceMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class InstanceReferenceMetricH2PersistenceDAO extends H2DAO implements IInstanceReferenceMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceReferenceMetric> {
private final Logger logger = LoggerFactory.getLogger(InstanceReferenceMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public InstanceReferenceMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceReferenceMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, InstanceReferenceMetricTable.TABLE, InstanceReferenceMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
InstanceReferenceMetric instanceReferenceMetric = new InstanceReferenceMetric();
instanceReferenceMetric.setId(id);
instanceReferenceMetric.setFrontInstanceId(rs.getInt(InstanceReferenceMetricTable.COLUMN_FRONT_INSTANCE_ID));
instanceReferenceMetric.setBehindInstanceId(rs.getInt(InstanceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID));
instanceReferenceMetric.setTransactionCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_TRANSACTION_CALLS));
instanceReferenceMetric.setTransactionErrorCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
instanceReferenceMetric.setTransactionDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
instanceReferenceMetric.setTransactionErrorDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
instanceReferenceMetric.setBusinessTransactionCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
instanceReferenceMetric.setBusinessTransactionErrorCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
instanceReferenceMetric.setBusinessTransactionDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
instanceReferenceMetric.setBusinessTransactionErrorDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
instanceReferenceMetric.setMqTransactionCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
instanceReferenceMetric.setMqTransactionErrorCalls(rs.getLong(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
instanceReferenceMetric.setMqTransactionDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
instanceReferenceMetric.setMqTransactionErrorDurationSum(rs.getLong(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
instanceReferenceMetric.setTimeBucket(rs.getLong(InstanceReferenceMetricTable.COLUMN_TIME_BUCKET));
return instanceReferenceMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceReferenceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceReferenceMetricTable.COLUMN_ID, data.getId());
source.put(InstanceReferenceMetricTable.COLUMN_FRONT_INSTANCE_ID, data.getFrontInstanceId());
source.put(InstanceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID, data.getBehindInstanceId());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(InstanceReferenceMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override public H2SqlEntity prepareBatchUpdate(InstanceReferenceMetric data) {
Map<String, Object> source = new HashMap<>();
H2SqlEntity entity = new H2SqlEntity();
source.put(InstanceReferenceMetricTable.COLUMN_FRONT_INSTANCE_ID, data.getFrontInstanceId());
source.put(InstanceReferenceMetricTable.COLUMN_BEHIND_INSTANCE_ID, data.getBehindInstanceId());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(InstanceReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(InstanceReferenceMetricTable.TABLE, source.keySet(), InstanceReferenceMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.smp.IServiceMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.service.ServiceMetric;
import org.apache.skywalking.apm.collector.storage.table.service.ServiceMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class ServiceMinuteMetricH2PersistenceDAO extends H2DAO implements IServiceMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceMetric> {
private final Logger logger = LoggerFactory.getLogger(ServiceMinuteMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public ServiceMinuteMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override
public ServiceMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ServiceMetricTable.TABLE, ServiceMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ServiceMetric serviceMetric = new ServiceMetric();
serviceMetric.setId(id);
serviceMetric.setServiceId(rs.getInt(ServiceMetricTable.COLUMN_SERVICE_ID));
serviceMetric.setTransactionCalls(rs.getLong(ServiceMetricTable.COLUMN_TRANSACTION_CALLS));
serviceMetric.setTransactionErrorCalls(rs.getLong(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
serviceMetric.setTransactionDurationSum(rs.getLong(ServiceMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
serviceMetric.setTransactionErrorDurationSum(rs.getLong(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
serviceMetric.setBusinessTransactionCalls(rs.getLong(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
serviceMetric.setBusinessTransactionErrorCalls(rs.getLong(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
serviceMetric.setBusinessTransactionDurationSum(rs.getLong(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
serviceMetric.setBusinessTransactionErrorDurationSum(rs.getLong(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
serviceMetric.setMqTransactionCalls(rs.getLong(ServiceMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
serviceMetric.setMqTransactionErrorCalls(rs.getLong(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
serviceMetric.setMqTransactionDurationSum(rs.getLong(ServiceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
serviceMetric.setMqTransactionErrorDurationSum(rs.getLong(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
serviceMetric.setTimeBucket(rs.getLong(ServiceMetricTable.COLUMN_TIME_BUCKET));
return serviceMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override
public H2SqlEntity prepareBatchInsert(ServiceMetric data) {
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(ServiceMetricTable.COLUMN_ID, data.getId());
source.put(ServiceMetricTable.COLUMN_SERVICE_ID, data.getServiceId());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ServiceMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override
public H2SqlEntity prepareBatchUpdate(ServiceMetric data) {
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(ServiceMetricTable.COLUMN_SERVICE_ID, data.getServiceId());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ServiceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ServiceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ServiceMetricTable.TABLE, source.keySet(), ServiceMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.client.h2.H2ClientException;
import org.apache.skywalking.apm.collector.storage.base.sql.SqlBuilder;
import org.apache.skywalking.apm.collector.storage.dao.srmp.IServiceReferenceMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.service.ServiceReferenceMetric;
import org.apache.skywalking.apm.collector.storage.table.service.ServiceReferenceMetricTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng, clevertension
*/
public class ServiceReferenceMetricH2PersistenceDAO extends H2DAO implements IServiceReferenceMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceReferenceMetric> {
private final Logger logger = LoggerFactory.getLogger(ServiceReferenceMetricH2PersistenceDAO.class);
private static final String GET_SQL = "select * from {0} where {1} = ?";
public ServiceReferenceMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override
public ServiceReferenceMetric get(String id) {
H2Client client = getClient();
String sql = SqlBuilder.buildSql(GET_SQL, ServiceReferenceMetricTable.TABLE, ServiceReferenceMetricTable.COLUMN_ID);
Object[] params = new Object[] {id};
try (ResultSet rs = client.executeQuery(sql, params)) {
if (rs.next()) {
ServiceReferenceMetric serviceReferenceMetric = new ServiceReferenceMetric();
serviceReferenceMetric.setId(id);
serviceReferenceMetric.setFrontServiceId(rs.getInt(ServiceReferenceMetricTable.COLUMN_FRONT_SERVICE_ID));
serviceReferenceMetric.setBehindServiceId(rs.getInt(ServiceReferenceMetricTable.COLUMN_BEHIND_SERVICE_ID));
serviceReferenceMetric.setSourceValue(rs.getInt(ServiceReferenceMetricTable.COLUMN_SOURCE_VALUE));
serviceReferenceMetric.setTransactionCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_TRANSACTION_CALLS));
serviceReferenceMetric.setTransactionErrorCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
serviceReferenceMetric.setTransactionDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
serviceReferenceMetric.setTransactionErrorDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
serviceReferenceMetric.setBusinessTransactionCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
serviceReferenceMetric.setBusinessTransactionErrorCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
serviceReferenceMetric.setBusinessTransactionDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
serviceReferenceMetric.setBusinessTransactionErrorDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
serviceReferenceMetric.setMqTransactionCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
serviceReferenceMetric.setMqTransactionErrorCalls(rs.getLong(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
serviceReferenceMetric.setMqTransactionDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
serviceReferenceMetric.setMqTransactionErrorDurationSum(rs.getLong(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
serviceReferenceMetric.setTimeBucket(rs.getLong(ServiceReferenceMetricTable.COLUMN_TIME_BUCKET));
return serviceReferenceMetric;
}
} catch (SQLException | H2ClientException e) {
logger.error(e.getMessage(), e);
}
return null;
}
@Override
public H2SqlEntity prepareBatchInsert(ServiceReferenceMetric data) {
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(ServiceReferenceMetricTable.COLUMN_ID, data.getId());
source.put(ServiceReferenceMetricTable.COLUMN_FRONT_SERVICE_ID, data.getFrontServiceId());
source.put(ServiceReferenceMetricTable.COLUMN_BEHIND_SERVICE_ID, data.getBehindServiceId());
source.put(ServiceReferenceMetricTable.COLUMN_SOURCE_VALUE, data.getSourceValue());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchInsertSql(ServiceReferenceMetricTable.TABLE, source.keySet());
entity.setSql(sql);
entity.setParams(source.values().toArray(new Object[0]));
return entity;
}
@Override
public H2SqlEntity prepareBatchUpdate(ServiceReferenceMetric data) {
H2SqlEntity entity = new H2SqlEntity();
Map<String, Object> source = new HashMap<>();
source.put(ServiceReferenceMetricTable.COLUMN_FRONT_SERVICE_ID, data.getFrontServiceId());
source.put(ServiceReferenceMetricTable.COLUMN_BEHIND_SERVICE_ID, data.getBehindServiceId());
source.put(ServiceReferenceMetricTable.COLUMN_SOURCE_VALUE, data.getSourceValue());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_CALLS, data.getTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, data.getTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_DURATION_SUM, data.getTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, data.getTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, data.getBusinessTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, data.getBusinessTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, data.getBusinessTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, data.getBusinessTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_CALLS, data.getMqTransactionCalls());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, data.getMqTransactionErrorCalls());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, data.getMqTransactionDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, data.getMqTransactionErrorDurationSum());
source.put(ServiceReferenceMetricTable.COLUMN_TIME_BUCKET, data.getTimeBucket());
String sql = SqlBuilder.buildBatchUpdateSql(ServiceReferenceMetricTable.TABLE, source.keySet(), ServiceReferenceMetricTable.COLUMN_ID);
entity.setSql(sql);
List<Object> values = new ArrayList<>(source.values());
values.add(data.getId());
entity.setParams(values.toArray(new Object[0]));
return entity;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.acp;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
/**
* @author peng-yongsheng
*/
public abstract class AbstractApplicationComponentH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationComponent> {
AbstractApplicationComponentH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected final ApplicationComponent h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationComponent applicationComponent = new ApplicationComponent();
applicationComponent.setId(resultSet.getString(ApplicationComponentTable.COLUMN_ID));
applicationComponent.setMetricId(resultSet.getString(ApplicationComponentTable.COLUMN_METRIC_ID));
applicationComponent.setComponentId(resultSet.getInt(ApplicationComponentTable.COLUMN_COMPONENT_ID));
applicationComponent.setPeerId(resultSet.getInt(ApplicationComponentTable.COLUMN_PEER_ID));
applicationComponent.setTimeBucket(resultSet.getLong(ApplicationComponentTable.COLUMN_TIME_BUCKET));
return applicationComponent;
}
@Override protected final Map<String, Object> streamDataToH2Data(ApplicationComponent streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationComponentTable.COLUMN_METRIC_ID, streamData.getMetricId());
source.put(ApplicationComponentTable.COLUMN_COMPONENT_ID, streamData.getComponentId());
source.put(ApplicationComponentTable.COLUMN_PEER_ID, streamData.getPeerId());
source.put(ApplicationComponentTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.acp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.acp.IApplicationComponentDayPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationComponentDayH2PersistenceDAO extends AbstractApplicationComponentH2PersistenceDAO implements IApplicationComponentDayPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationComponent> {
public ApplicationComponentDayH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationComponentTable.TABLE + Const.ID_SPLIT + TimePyramid.Day.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.acp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.acp.IApplicationComponentHourPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationComponentHourH2PersistenceDAO extends AbstractApplicationComponentH2PersistenceDAO implements IApplicationComponentHourPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationComponent> {
public ApplicationComponentHourH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationComponentTable.TABLE + Const.ID_SPLIT + TimePyramid.Hour.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.acp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.acp.IApplicationComponentMinutePersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationComponentMinuteH2PersistenceDAO extends AbstractApplicationComponentH2PersistenceDAO implements IApplicationComponentMinutePersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationComponent> {
public ApplicationComponentMinuteH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationComponentTable.TABLE + Const.ID_SPLIT + TimePyramid.Minute.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.acp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.acp.IApplicationComponentMonthPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponent;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationComponentTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationComponentMonthH2PersistenceDAO extends AbstractApplicationComponentH2PersistenceDAO implements IApplicationComponentMonthPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationComponent> {
public ApplicationComponentMonthH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationComponentTable.TABLE + Const.ID_SPLIT + TimePyramid.Month.getName();
}
}
......@@ -16,40 +16,58 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IApplicationReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IApplicationAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationReferenceAlarmList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationAlarm;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationAlarmTable;
/**
* @author peng-yongsheng
*/
public class ApplicationReferenceAlarmListH2PersistenceDAO extends H2DAO implements IApplicationReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationReferenceAlarmList> {
public class ApplicationAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationAlarm> implements IApplicationAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationAlarm> {
private final Logger logger = LoggerFactory.getLogger(ApplicationReferenceAlarmListH2PersistenceDAO.class);
public ApplicationReferenceAlarmListH2PersistenceDAO(H2Client client) {
public ApplicationAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationReferenceAlarmList get(String id) {
return null;
@Override protected String tableName() {
return ApplicationAlarmTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationReferenceAlarmList data) {
return null;
}
@Override protected ApplicationAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationAlarm applicationAlarm = new ApplicationAlarm();
applicationAlarm.setId(resultSet.getString(ApplicationAlarmTable.COLUMN_ID));
applicationAlarm.setSourceValue(resultSet.getInt(ApplicationAlarmTable.COLUMN_SOURCE_VALUE));
applicationAlarm.setAlarmType(resultSet.getInt(ApplicationAlarmTable.COLUMN_ALARM_TYPE));
applicationAlarm.setApplicationId(resultSet.getInt(ApplicationAlarmTable.COLUMN_APPLICATION_ID));
@Override public H2SqlEntity prepareBatchUpdate(ApplicationReferenceAlarmList data) {
return null;
applicationAlarm.setLastTimeBucket(resultSet.getLong(ApplicationAlarmTable.COLUMN_LAST_TIME_BUCKET));
applicationAlarm.setAlarmContent(resultSet.getString(ApplicationAlarmTable.COLUMN_ALARM_CONTENT));
return applicationAlarm;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(ApplicationAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ApplicationAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ApplicationAlarmTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ApplicationAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(ApplicationAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,58 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IApplicationAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationAlarmList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationAlarmListTable;
/**
* @author peng-yongsheng
*/
public class ApplicationAlarmListH2PersistenceDAO extends H2DAO implements IApplicationAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationAlarmList> {
private final Logger logger = LoggerFactory.getLogger(ApplicationAlarmListH2PersistenceDAO.class);
public class ApplicationAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationAlarmList> implements IApplicationAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationAlarmList> {
public ApplicationAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationAlarmList get(String id) {
return null;
@Override protected String tableName() {
return ApplicationAlarmListTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationAlarmList data) {
return null;
}
@Override protected ApplicationAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationAlarmList applicationAlarmList = new ApplicationAlarmList();
applicationAlarmList.setId(resultSet.getString(ApplicationAlarmListTable.COLUMN_ID));
applicationAlarmList.setSourceValue(resultSet.getInt(ApplicationAlarmListTable.COLUMN_SOURCE_VALUE));
applicationAlarmList.setAlarmType(resultSet.getInt(ApplicationAlarmListTable.COLUMN_ALARM_TYPE));
applicationAlarmList.setApplicationId(resultSet.getInt(ApplicationAlarmListTable.COLUMN_APPLICATION_ID));
@Override public H2SqlEntity prepareBatchUpdate(ApplicationAlarmList data) {
return null;
applicationAlarmList.setTimeBucket(resultSet.getLong(ApplicationAlarmListTable.COLUMN_TIME_BUCKET));
applicationAlarmList.setAlarmContent(resultSet.getString(ApplicationAlarmListTable.COLUMN_ALARM_CONTENT));
return applicationAlarmList;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(ApplicationAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ApplicationAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ApplicationAlarmListTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ApplicationAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(ApplicationAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,60 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IApplicationReferenceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationReferenceAlarm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationReferenceAlarmTable;
/**
* @author peng-yongsheng
*/
public class ApplicationReferenceAlarmH2PersistenceDAO extends H2DAO implements IApplicationReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationReferenceAlarm> {
private final Logger logger = LoggerFactory.getLogger(ApplicationReferenceAlarmH2PersistenceDAO.class);
public class ApplicationReferenceAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationReferenceAlarm> implements IApplicationReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationReferenceAlarm> {
public ApplicationReferenceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ApplicationReferenceAlarm get(String id) {
return null;
@Override protected String tableName() {
return ApplicationReferenceAlarmTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(ApplicationReferenceAlarm data) {
return null;
}
@Override protected ApplicationReferenceAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationReferenceAlarm applicationReferenceAlarm = new ApplicationReferenceAlarm();
applicationReferenceAlarm.setId(resultSet.getString(ApplicationReferenceAlarmTable.COLUMN_ID));
applicationReferenceAlarm.setSourceValue(resultSet.getInt(ApplicationReferenceAlarmTable.COLUMN_SOURCE_VALUE));
applicationReferenceAlarm.setAlarmType(resultSet.getInt(ApplicationReferenceAlarmTable.COLUMN_ALARM_TYPE));
applicationReferenceAlarm.setFrontApplicationId(resultSet.getInt(ApplicationReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID));
applicationReferenceAlarm.setBehindApplicationId(resultSet.getInt(ApplicationReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID));
@Override public H2SqlEntity prepareBatchUpdate(ApplicationReferenceAlarm data) {
return null;
applicationReferenceAlarm.setLastTimeBucket(resultSet.getLong(ApplicationReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET));
applicationReferenceAlarm.setAlarmContent(resultSet.getString(ApplicationReferenceAlarmTable.COLUMN_ALARM_CONTENT));
return applicationReferenceAlarm;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(ApplicationReferenceAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationReferenceAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ApplicationReferenceAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ApplicationReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(ApplicationReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(ApplicationReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(ApplicationReferenceAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IApplicationReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationReferenceAlarmList;
import org.apache.skywalking.apm.collector.storage.table.alarm.ApplicationReferenceAlarmListTable;
/**
* @author peng-yongsheng
*/
public class ApplicationReferenceAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationReferenceAlarmList> implements IApplicationReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationReferenceAlarmList> {
public ApplicationReferenceAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationReferenceAlarmListTable.TABLE;
}
@Override protected ApplicationReferenceAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationReferenceAlarmList applicationReferenceAlarmList = new ApplicationReferenceAlarmList();
applicationReferenceAlarmList.setId(resultSet.getString(ApplicationReferenceAlarmListTable.COLUMN_ID));
applicationReferenceAlarmList.setSourceValue(resultSet.getInt(ApplicationReferenceAlarmListTable.COLUMN_SOURCE_VALUE));
applicationReferenceAlarmList.setAlarmType(resultSet.getInt(ApplicationReferenceAlarmListTable.COLUMN_ALARM_TYPE));
applicationReferenceAlarmList.setFrontApplicationId(resultSet.getInt(ApplicationReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID));
applicationReferenceAlarmList.setBehindApplicationId(resultSet.getInt(ApplicationReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID));
applicationReferenceAlarmList.setTimeBucket(resultSet.getLong(ApplicationReferenceAlarmListTable.COLUMN_TIME_BUCKET));
applicationReferenceAlarmList.setAlarmContent(resultSet.getString(ApplicationReferenceAlarmListTable.COLUMN_ALARM_CONTENT));
return applicationReferenceAlarmList;
}
@Override protected Map<String, Object> streamDataToH2Data(ApplicationReferenceAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationReferenceAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ApplicationReferenceAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ApplicationReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(ApplicationReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(ApplicationReferenceAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(ApplicationReferenceAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,60 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarmList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceAlarm;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceAlarmTable;
/**
* @author peng-yongsheng
*/
public class InstanceReferenceAlarmListH2PersistenceDAO extends H2DAO implements IInstanceReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceReferenceAlarmList> {
public class InstanceAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<InstanceAlarm> implements IInstanceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceAlarm> {
private final Logger logger = LoggerFactory.getLogger(InstanceReferenceAlarmListH2PersistenceDAO.class);
public InstanceReferenceAlarmListH2PersistenceDAO(H2Client client) {
public InstanceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceReferenceAlarmList get(String id) {
return null;
@Override protected String tableName() {
return InstanceAlarmTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceReferenceAlarmList data) {
return null;
}
@Override protected InstanceAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
InstanceAlarm instanceAlarm = new InstanceAlarm();
instanceAlarm.setId(resultSet.getString(InstanceAlarmTable.COLUMN_ID));
instanceAlarm.setSourceValue(resultSet.getInt(InstanceAlarmTable.COLUMN_SOURCE_VALUE));
instanceAlarm.setAlarmType(resultSet.getInt(InstanceAlarmTable.COLUMN_ALARM_TYPE));
instanceAlarm.setApplicationId(resultSet.getInt(InstanceAlarmTable.COLUMN_APPLICATION_ID));
instanceAlarm.setInstanceId(resultSet.getInt(InstanceAlarmTable.COLUMN_INSTANCE_ID));
@Override public H2SqlEntity prepareBatchUpdate(InstanceReferenceAlarmList data) {
return null;
instanceAlarm.setLastTimeBucket(resultSet.getLong(InstanceAlarmTable.COLUMN_LAST_TIME_BUCKET));
instanceAlarm.setAlarmContent(resultSet.getString(InstanceAlarmTable.COLUMN_ALARM_CONTENT));
return instanceAlarm;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(InstanceAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(InstanceAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(InstanceAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(InstanceAlarmTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(InstanceAlarmTable.COLUMN_INSTANCE_ID, streamData.getInstanceId());
source.put(InstanceAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(InstanceAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,60 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceAlarmList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceAlarmListTable;
/**
* @author peng-yongsheng
*/
public class InstanceAlarmListH2PersistenceDAO extends H2DAO implements IInstanceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceAlarmList> {
private final Logger logger = LoggerFactory.getLogger(InstanceAlarmListH2PersistenceDAO.class);
public class InstanceAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<InstanceAlarmList> implements IInstanceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceAlarmList> {
public InstanceAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public InstanceAlarmList get(String id) {
return null;
@Override protected String tableName() {
return InstanceAlarmListTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(InstanceAlarmList data) {
return null;
}
@Override protected InstanceAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
InstanceAlarmList instanceAlarmList = new InstanceAlarmList();
instanceAlarmList.setId(resultSet.getString(InstanceAlarmListTable.COLUMN_ID));
instanceAlarmList.setSourceValue(resultSet.getInt(InstanceAlarmListTable.COLUMN_SOURCE_VALUE));
instanceAlarmList.setAlarmType(resultSet.getInt(InstanceAlarmListTable.COLUMN_ALARM_TYPE));
instanceAlarmList.setApplicationId(resultSet.getInt(InstanceAlarmListTable.COLUMN_APPLICATION_ID));
instanceAlarmList.setInstanceId(resultSet.getInt(InstanceAlarmListTable.COLUMN_INSTANCE_ID));
@Override public H2SqlEntity prepareBatchUpdate(InstanceAlarmList data) {
return null;
instanceAlarmList.setTimeBucket(resultSet.getLong(InstanceAlarmListTable.COLUMN_TIME_BUCKET));
instanceAlarmList.setAlarmContent(resultSet.getString(InstanceAlarmListTable.COLUMN_ALARM_CONTENT));
return instanceAlarmList;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(InstanceAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(InstanceAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(InstanceAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(InstanceAlarmListTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(InstanceAlarmListTable.COLUMN_INSTANCE_ID, streamData.getInstanceId());
source.put(InstanceAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(InstanceAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceReferenceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarm;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarmTable;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarmTable;
/**
* @author peng-yongsheng
*/
public class InstanceReferenceAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<InstanceReferenceAlarm> implements IInstanceReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceReferenceAlarm> {
public InstanceReferenceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return InstanceReferenceAlarmTable.TABLE;
}
@Override protected InstanceReferenceAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
InstanceReferenceAlarm instanceReferenceAlarm = new InstanceReferenceAlarm();
instanceReferenceAlarm.setId(resultSet.getString(ServiceReferenceAlarmTable.COLUMN_ID));
instanceReferenceAlarm.setSourceValue(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_SOURCE_VALUE));
instanceReferenceAlarm.setAlarmType(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_ALARM_TYPE));
instanceReferenceAlarm.setFrontApplicationId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID));
instanceReferenceAlarm.setFrontInstanceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_FRONT_INSTANCE_ID));
instanceReferenceAlarm.setBehindApplicationId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID));
instanceReferenceAlarm.setBehindInstanceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_BEHIND_INSTANCE_ID));
instanceReferenceAlarm.setLastTimeBucket(resultSet.getLong(ServiceReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET));
instanceReferenceAlarm.setAlarmContent(resultSet.getString(ServiceReferenceAlarmTable.COLUMN_ALARM_CONTENT));
return instanceReferenceAlarm;
}
@Override protected Map<String, Object> streamDataToH2Data(InstanceReferenceAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ServiceReferenceAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ServiceReferenceAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ServiceReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(ServiceReferenceAlarmTable.COLUMN_FRONT_INSTANCE_ID, streamData.getFrontInstanceId());
source.put(ServiceReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(ServiceReferenceAlarmTable.COLUMN_BEHIND_INSTANCE_ID, streamData.getBehindInstanceId());
source.put(ServiceReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(ServiceReferenceAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IInstanceReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarmList;
import org.apache.skywalking.apm.collector.storage.table.alarm.InstanceReferenceAlarmListTable;
/**
* @author peng-yongsheng
*/
public class InstanceReferenceAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<InstanceReferenceAlarmList> implements IInstanceReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, InstanceReferenceAlarmList> {
public InstanceReferenceAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return InstanceReferenceAlarmListTable.TABLE;
}
@Override protected InstanceReferenceAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
InstanceReferenceAlarmList instanceReferenceAlarmList = new InstanceReferenceAlarmList();
instanceReferenceAlarmList.setId(resultSet.getString(InstanceReferenceAlarmListTable.COLUMN_ID));
instanceReferenceAlarmList.setSourceValue(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_SOURCE_VALUE));
instanceReferenceAlarmList.setAlarmType(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_ALARM_TYPE));
instanceReferenceAlarmList.setFrontApplicationId(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID));
instanceReferenceAlarmList.setFrontInstanceId(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_FRONT_INSTANCE_ID));
instanceReferenceAlarmList.setBehindApplicationId(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID));
instanceReferenceAlarmList.setBehindInstanceId(resultSet.getInt(InstanceReferenceAlarmListTable.COLUMN_BEHIND_INSTANCE_ID));
instanceReferenceAlarmList.setTimeBucket(resultSet.getLong(InstanceReferenceAlarmListTable.COLUMN_TIME_BUCKET));
instanceReferenceAlarmList.setAlarmContent(resultSet.getString(InstanceReferenceAlarmListTable.COLUMN_ALARM_CONTENT));
return instanceReferenceAlarmList;
}
@Override protected Map<String, Object> streamDataToH2Data(InstanceReferenceAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(InstanceReferenceAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(InstanceReferenceAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(InstanceReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(InstanceReferenceAlarmListTable.COLUMN_FRONT_INSTANCE_ID, streamData.getFrontInstanceId());
source.put(InstanceReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(InstanceReferenceAlarmListTable.COLUMN_BEHIND_INSTANCE_ID, streamData.getBehindInstanceId());
source.put(InstanceReferenceAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(InstanceReferenceAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,62 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceReferenceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceAlarm;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceAlarmTable;
/**
* @author peng-yongsheng
*/
public class ServiceReferenceAlarmH2PersistenceDAO extends H2DAO implements IServiceReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceReferenceAlarm> {
public class ServiceAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<ServiceAlarm> implements IServiceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceAlarm> {
private final Logger logger = LoggerFactory.getLogger(ServiceReferenceAlarmH2PersistenceDAO.class);
public ServiceReferenceAlarmH2PersistenceDAO(H2Client client) {
public ServiceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ServiceReferenceAlarm get(String id) {
return null;
@Override protected String tableName() {
return ServiceAlarmTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(ServiceReferenceAlarm data) {
return null;
}
@Override protected ServiceAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
ServiceAlarm serviceAlarm = new ServiceAlarm();
serviceAlarm.setId(resultSet.getString(ServiceAlarmTable.COLUMN_ID));
serviceAlarm.setSourceValue(resultSet.getInt(ServiceAlarmTable.COLUMN_SOURCE_VALUE));
serviceAlarm.setAlarmType(resultSet.getInt(ServiceAlarmTable.COLUMN_ALARM_TYPE));
serviceAlarm.setApplicationId(resultSet.getInt(ServiceAlarmTable.COLUMN_APPLICATION_ID));
serviceAlarm.setInstanceId(resultSet.getInt(ServiceAlarmTable.COLUMN_INSTANCE_ID));
serviceAlarm.setServiceId(resultSet.getInt(ServiceAlarmTable.COLUMN_SERVICE_ID));
@Override public H2SqlEntity prepareBatchUpdate(ServiceReferenceAlarm data) {
return null;
serviceAlarm.setLastTimeBucket(resultSet.getLong(ServiceAlarmTable.COLUMN_LAST_TIME_BUCKET));
serviceAlarm.setAlarmContent(resultSet.getString(ServiceAlarmTable.COLUMN_ALARM_CONTENT));
return serviceAlarm;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(ServiceAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ServiceAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ServiceAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ServiceAlarmTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ServiceAlarmTable.COLUMN_INSTANCE_ID, streamData.getInstanceId());
source.put(ServiceAlarmTable.COLUMN_SERVICE_ID, streamData.getServiceId());
source.put(ServiceAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(ServiceAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
......@@ -16,40 +16,62 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarmList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceAlarmList;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceAlarmListTable;
/**
* @author peng-yongsheng
*/
public class ServiceReferenceAlarmListH2PersistenceDAO extends H2DAO implements IServiceReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceReferenceAlarmList> {
public class ServiceAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<ServiceAlarmList> implements IServiceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceAlarmList> {
private final Logger logger = LoggerFactory.getLogger(ServiceReferenceAlarmListH2PersistenceDAO.class);
public ServiceReferenceAlarmListH2PersistenceDAO(H2Client client) {
public ServiceAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ServiceReferenceAlarmList get(String id) {
return null;
@Override protected String tableName() {
return ServiceAlarmListTable.TABLE;
}
@Override public H2SqlEntity prepareBatchInsert(ServiceReferenceAlarmList data) {
return null;
}
@Override protected ServiceAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
ServiceAlarmList serviceAlarmList = new ServiceAlarmList();
serviceAlarmList.setId(resultSet.getString(ServiceAlarmListTable.COLUMN_ID));
serviceAlarmList.setSourceValue(resultSet.getInt(ServiceAlarmListTable.COLUMN_SOURCE_VALUE));
serviceAlarmList.setAlarmType(resultSet.getInt(ServiceAlarmListTable.COLUMN_ALARM_TYPE));
serviceAlarmList.setApplicationId(resultSet.getInt(ServiceAlarmListTable.COLUMN_APPLICATION_ID));
serviceAlarmList.setInstanceId(resultSet.getInt(ServiceAlarmListTable.COLUMN_INSTANCE_ID));
serviceAlarmList.setServiceId(resultSet.getInt(ServiceAlarmListTable.COLUMN_SERVICE_ID));
@Override public H2SqlEntity prepareBatchUpdate(ServiceReferenceAlarmList data) {
return null;
serviceAlarmList.setTimeBucket(resultSet.getLong(ServiceAlarmListTable.COLUMN_TIME_BUCKET));
serviceAlarmList.setAlarmContent(resultSet.getString(ServiceAlarmListTable.COLUMN_ALARM_CONTENT));
return serviceAlarmList;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected Map<String, Object> streamDataToH2Data(ServiceAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ServiceAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ServiceAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ServiceAlarmListTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ServiceAlarmListTable.COLUMN_INSTANCE_ID, streamData.getInstanceId());
source.put(ServiceAlarmListTable.COLUMN_SERVICE_ID, streamData.getServiceId());
source.put(ServiceAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(ServiceAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceReferenceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarm;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarmTable;
/**
* @author peng-yongsheng
*/
public class ServiceReferenceAlarmH2PersistenceDAO extends AbstractPersistenceH2DAO<ServiceReferenceAlarm> implements IServiceReferenceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceReferenceAlarm> {
public ServiceReferenceAlarmH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ServiceReferenceAlarmTable.TABLE;
}
@Override protected ServiceReferenceAlarm h2DataToStreamData(ResultSet resultSet) throws SQLException {
ServiceReferenceAlarm serviceReferenceAlarm = new ServiceReferenceAlarm();
serviceReferenceAlarm.setId(resultSet.getString(ServiceReferenceAlarmTable.COLUMN_ID));
serviceReferenceAlarm.setSourceValue(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_SOURCE_VALUE));
serviceReferenceAlarm.setAlarmType(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_ALARM_TYPE));
serviceReferenceAlarm.setFrontApplicationId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID));
serviceReferenceAlarm.setFrontInstanceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_FRONT_INSTANCE_ID));
serviceReferenceAlarm.setFrontServiceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_FRONT_SERVICE_ID));
serviceReferenceAlarm.setBehindApplicationId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID));
serviceReferenceAlarm.setBehindInstanceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_BEHIND_INSTANCE_ID));
serviceReferenceAlarm.setBehindServiceId(resultSet.getInt(ServiceReferenceAlarmTable.COLUMN_BEHIND_SERVICE_ID));
serviceReferenceAlarm.setLastTimeBucket(resultSet.getLong(ServiceReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET));
serviceReferenceAlarm.setAlarmContent(resultSet.getString(ServiceReferenceAlarmTable.COLUMN_ALARM_CONTENT));
return serviceReferenceAlarm;
}
@Override protected Map<String, Object> streamDataToH2Data(ServiceReferenceAlarm streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ServiceReferenceAlarmTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ServiceReferenceAlarmTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ServiceReferenceAlarmTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(ServiceReferenceAlarmTable.COLUMN_FRONT_INSTANCE_ID, streamData.getFrontInstanceId());
source.put(ServiceReferenceAlarmTable.COLUMN_FRONT_SERVICE_ID, streamData.getFrontServiceId());
source.put(ServiceReferenceAlarmTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(ServiceReferenceAlarmTable.COLUMN_BEHIND_INSTANCE_ID, streamData.getBehindInstanceId());
source.put(ServiceReferenceAlarmTable.COLUMN_BEHIND_SERVICE_ID, streamData.getBehindServiceId());
source.put(ServiceReferenceAlarmTable.COLUMN_LAST_TIME_BUCKET, streamData.getLastTimeBucket());
source.put(ServiceReferenceAlarmTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.alarm;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceReferenceAlarmListPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarmList;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceReferenceAlarmListTable;
/**
* @author peng-yongsheng
*/
public class ServiceReferenceAlarmListH2PersistenceDAO extends AbstractPersistenceH2DAO<ServiceReferenceAlarmList> implements IServiceReferenceAlarmListPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceReferenceAlarmList> {
public ServiceReferenceAlarmListH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ServiceReferenceAlarmListTable.TABLE;
}
@Override protected ServiceReferenceAlarmList h2DataToStreamData(ResultSet resultSet) throws SQLException {
ServiceReferenceAlarmList serviceReferenceAlarmList = new ServiceReferenceAlarmList();
serviceReferenceAlarmList.setId(resultSet.getString(ServiceReferenceAlarmListTable.COLUMN_ID));
serviceReferenceAlarmList.setSourceValue(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_SOURCE_VALUE));
serviceReferenceAlarmList.setAlarmType(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_ALARM_TYPE));
serviceReferenceAlarmList.setFrontApplicationId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID));
serviceReferenceAlarmList.setFrontInstanceId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_FRONT_INSTANCE_ID));
serviceReferenceAlarmList.setFrontServiceId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_FRONT_SERVICE_ID));
serviceReferenceAlarmList.setBehindApplicationId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID));
serviceReferenceAlarmList.setBehindInstanceId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_BEHIND_INSTANCE_ID));
serviceReferenceAlarmList.setBehindServiceId(resultSet.getInt(ServiceReferenceAlarmListTable.COLUMN_BEHIND_SERVICE_ID));
serviceReferenceAlarmList.setTimeBucket(resultSet.getLong(ServiceReferenceAlarmListTable.COLUMN_TIME_BUCKET));
serviceReferenceAlarmList.setAlarmContent(resultSet.getString(ServiceReferenceAlarmListTable.COLUMN_ALARM_CONTENT));
return serviceReferenceAlarmList;
}
@Override protected Map<String, Object> streamDataToH2Data(ServiceReferenceAlarmList streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ServiceReferenceAlarmListTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ServiceReferenceAlarmListTable.COLUMN_ALARM_TYPE, streamData.getAlarmType());
source.put(ServiceReferenceAlarmListTable.COLUMN_FRONT_APPLICATION_ID, streamData.getFrontApplicationId());
source.put(ServiceReferenceAlarmListTable.COLUMN_FRONT_INSTANCE_ID, streamData.getFrontInstanceId());
source.put(ServiceReferenceAlarmListTable.COLUMN_FRONT_SERVICE_ID, streamData.getFrontServiceId());
source.put(ServiceReferenceAlarmListTable.COLUMN_BEHIND_APPLICATION_ID, streamData.getBehindApplicationId());
source.put(ServiceReferenceAlarmListTable.COLUMN_BEHIND_INSTANCE_ID, streamData.getBehindInstanceId());
source.put(ServiceReferenceAlarmListTable.COLUMN_BEHIND_SERVICE_ID, streamData.getBehindServiceId());
source.put(ServiceReferenceAlarmListTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
source.put(ServiceReferenceAlarmListTable.COLUMN_ALARM_CONTENT, streamData.getAlarmContent());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.amp;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
/**
* @author peng-yongsheng
*/
public abstract class AbstractApplicationMetricH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationMetric> {
public AbstractApplicationMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected final ApplicationMetric h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationMetric applicationMetric = new ApplicationMetric();
applicationMetric.setId(resultSet.getString(ApplicationMetricTable.COLUMN_ID));
applicationMetric.setMetricId(resultSet.getString(ApplicationMetricTable.COLUMN_METRIC_ID));
applicationMetric.setApplicationId(resultSet.getInt(ApplicationMetricTable.COLUMN_APPLICATION_ID));
applicationMetric.setSourceValue(resultSet.getInt(ApplicationMetricTable.COLUMN_SOURCE_VALUE));
applicationMetric.setTransactionCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_CALLS));
applicationMetric.setTransactionErrorCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_CALLS));
applicationMetric.setTransactionDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_DURATION_SUM));
applicationMetric.setTransactionErrorDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setBusinessTransactionCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS));
applicationMetric.setBusinessTransactionErrorCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS));
applicationMetric.setBusinessTransactionDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM));
applicationMetric.setBusinessTransactionErrorDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setMqTransactionCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_CALLS));
applicationMetric.setMqTransactionErrorCalls(resultSet.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS));
applicationMetric.setMqTransactionDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM));
applicationMetric.setMqTransactionErrorDurationSum(resultSet.getLong(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM));
applicationMetric.setSatisfiedCount(resultSet.getLong(ApplicationMetricTable.COLUMN_SATISFIED_COUNT));
applicationMetric.setToleratingCount(resultSet.getLong(ApplicationMetricTable.COLUMN_TOLERATING_COUNT));
applicationMetric.setFrustratedCount(resultSet.getLong(ApplicationMetricTable.COLUMN_FRUSTRATED_COUNT));
applicationMetric.setTimeBucket(resultSet.getLong(ApplicationMetricTable.COLUMN_TIME_BUCKET));
return applicationMetric;
}
@Override protected final Map<String, Object> streamDataToH2Data(ApplicationMetric streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationMetricTable.COLUMN_METRIC_ID, streamData.getMetricId());
source.put(ApplicationMetricTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ApplicationMetricTable.COLUMN_SOURCE_VALUE, streamData.getSourceValue());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_CALLS, streamData.getTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_CALLS, streamData.getTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_DURATION_SUM, streamData.getTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_TRANSACTION_ERROR_DURATION_SUM, streamData.getTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_CALLS, streamData.getBusinessTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_CALLS, streamData.getBusinessTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_DURATION_SUM, streamData.getBusinessTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_BUSINESS_TRANSACTION_ERROR_DURATION_SUM, streamData.getBusinessTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_CALLS, streamData.getMqTransactionCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_CALLS, streamData.getMqTransactionErrorCalls());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_DURATION_SUM, streamData.getMqTransactionDurationSum());
source.put(ApplicationMetricTable.COLUMN_MQ_TRANSACTION_ERROR_DURATION_SUM, streamData.getMqTransactionErrorDurationSum());
source.put(ApplicationMetricTable.COLUMN_SATISFIED_COUNT, streamData.getSatisfiedCount());
source.put(ApplicationMetricTable.COLUMN_TOLERATING_COUNT, streamData.getToleratingCount());
source.put(ApplicationMetricTable.COLUMN_FRUSTRATED_COUNT, streamData.getFrustratedCount());
source.put(ApplicationMetricTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.amp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.amp.IApplicationDayMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationDayMetricH2PersistenceDAO extends AbstractApplicationMetricH2PersistenceDAO implements IApplicationDayMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMetric> {
public ApplicationDayMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationMetricTable.TABLE + Const.ID_SPLIT + TimePyramid.Day.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.amp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.amp.IApplicationHourMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationHourMetricH2PersistenceDAO extends AbstractApplicationMetricH2PersistenceDAO implements IApplicationHourMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMetric> {
public ApplicationHourMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationMetricTable.TABLE + Const.ID_SPLIT + TimePyramid.Hour.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.amp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.amp.IApplicationMinuteMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationMinuteMetricH2PersistenceDAO extends AbstractApplicationMetricH2PersistenceDAO implements IApplicationMinuteMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMetric> {
public ApplicationMinuteMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationMetricTable.TABLE + Const.ID_SPLIT + TimePyramid.Minute.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.amp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.amp.IApplicationMonthMetricPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetric;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMetricTable;
/**
* @author peng-yongsheng, clevertension
*/
public class ApplicationMonthMetricH2PersistenceDAO extends AbstractApplicationMetricH2PersistenceDAO implements IApplicationMonthMetricPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMetric> {
public ApplicationMonthMetricH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationMetricTable.TABLE + Const.ID_SPLIT + TimePyramid.Month.getName();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.ampp;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.AbstractPersistenceH2DAO;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMapping;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMappingTable;
/**
* @author peng-yongsheng
*/
public abstract class AbstractApplicationMappingH2PersistenceDAO extends AbstractPersistenceH2DAO<ApplicationMapping> {
AbstractApplicationMappingH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected final ApplicationMapping h2DataToStreamData(ResultSet resultSet) throws SQLException {
ApplicationMapping applicationMapping = new ApplicationMapping();
applicationMapping.setId(resultSet.getString(ApplicationMappingTable.COLUMN_ID));
applicationMapping.setMetricId(resultSet.getString(ApplicationMappingTable.COLUMN_METRIC_ID));
applicationMapping.setApplicationId(resultSet.getInt(ApplicationMappingTable.COLUMN_APPLICATION_ID));
applicationMapping.setAddressId(resultSet.getInt(ApplicationMappingTable.COLUMN_ADDRESS_ID));
applicationMapping.setTimeBucket(resultSet.getLong(ApplicationMappingTable.COLUMN_TIME_BUCKET));
return applicationMapping;
}
@Override protected final Map<String, Object> streamDataToH2Data(ApplicationMapping streamData) {
Map<String, Object> source = new HashMap<>();
source.put(ApplicationMappingTable.COLUMN_METRIC_ID, streamData.getMetricId());
source.put(ApplicationMappingTable.COLUMN_APPLICATION_ID, streamData.getApplicationId());
source.put(ApplicationMappingTable.COLUMN_ADDRESS_ID, streamData.getAddressId());
source.put(ApplicationMappingTable.COLUMN_TIME_BUCKET, streamData.getTimeBucket());
return source;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao.ampp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.ampp.IApplicationMappingDayPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMapping;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMappingTable;
/**
* @author peng-yongsheng
*/
public class ApplicationMappingDayH2PersistenceDAO extends AbstractApplicationMappingH2PersistenceDAO implements IApplicationMappingDayPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMapping> {
public ApplicationMappingDayH2PersistenceDAO(H2Client client) {
super(client);
}
@Override protected String tableName() {
return ApplicationMappingTable.TABLE + Const.ID_SPLIT + TimePyramid.Day.getName();
}
}
......@@ -16,40 +16,26 @@
*
*/
package org.apache.skywalking.apm.collector.storage.h2.dao;
package org.apache.skywalking.apm.collector.storage.h2.dao.ampp;
import org.apache.skywalking.apm.collector.client.h2.H2Client;
import org.apache.skywalking.apm.collector.storage.dao.alarm.IServiceAlarmPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.dao.H2DAO;
import org.apache.skywalking.apm.collector.core.storage.TimePyramid;
import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.dao.ampp.IApplicationMappingHourPersistenceDAO;
import org.apache.skywalking.apm.collector.storage.h2.base.define.H2SqlEntity;
import org.apache.skywalking.apm.collector.storage.table.alarm.ServiceAlarm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMapping;
import org.apache.skywalking.apm.collector.storage.table.application.ApplicationMappingTable;
/**
* @author peng-yongsheng
*/
public class ServiceAlarmH2PersistenceDAO extends H2DAO implements IServiceAlarmPersistenceDAO<H2SqlEntity, H2SqlEntity, ServiceAlarm> {
public class ApplicationMappingHourH2PersistenceDAO extends AbstractApplicationMappingH2PersistenceDAO implements IApplicationMappingHourPersistenceDAO<H2SqlEntity, H2SqlEntity, ApplicationMapping> {
private final Logger logger = LoggerFactory.getLogger(ServiceAlarmH2PersistenceDAO.class);
public ServiceAlarmH2PersistenceDAO(H2Client client) {
public ApplicationMappingHourH2PersistenceDAO(H2Client client) {
super(client);
}
@Override public ServiceAlarm get(String id) {
return null;
}
@Override public H2SqlEntity prepareBatchInsert(ServiceAlarm data) {
return null;
}
@Override public H2SqlEntity prepareBatchUpdate(ServiceAlarm data) {
return null;
}
@Override public void deleteHistory(Long startTimestamp, Long endTimestamp) {
@Override protected String tableName() {
return ApplicationMappingTable.TABLE + Const.ID_SPLIT + TimePyramid.Hour.getName();
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册