未验证 提交 fc23dabf 编写于 作者: wu-sheng's avatar wu-sheng 提交者: GitHub

Support multiple implementations of StorageBuilder in different storage...

Support multiple implementations of StorageBuilder in different storage implementations - stage 1. (#6334)
上级 5445b7d5
......@@ -80,7 +80,7 @@ public class OALRuntime implements OALEngine {
private static final String CLASS_FILE_CHARSET = "UTF-8";
private static final String METRICS_FUNCTION_PACKAGE = "org.apache.skywalking.oap.server.core.analysis.metrics.";
private static final String WITH_METADATA_INTERFACE = "org.apache.skywalking.oap.server.core.analysis.metrics.WithMetadata";
private static final String STORAGE_BUILDER_INTERFACE = "org.apache.skywalking.oap.server.core.storage.StorageBuilder";
private static final String STORAGE_BUILDER_INTERFACE = "org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder";
private static final String DISPATCHER_INTERFACE = "org.apache.skywalking.oap.server.core.analysis.SourceDispatcher";
private static final String METRICS_STREAM_PROCESSOR = "org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor";
private static final String[] METRICS_CLASS_METHODS = {
......@@ -95,8 +95,8 @@ public class OALRuntime implements OALEngine {
"toDay"
};
private static final String[] METRICS_BUILDER_CLASS_METHODS = {
"data2Map",
"map2Data"
"entity2Storage",
"storage2Entity"
};
private static boolean IS_RT_TEMP_FOLDER_INIT_COMPLETED = false;
......
public java.util.Map data2Map(org.apache.skywalking.oap.server.core.storage.StorageData input) {
public java.util.Map entity2Storage(org.apache.skywalking.oap.server.core.storage.StorageData input) {
${metricsClassPackage}${metricsName}Metrics storageData = (${metricsClassPackage}${metricsName}Metrics)input;
java.util.Map map = new java.util.HashMap();
<#list fieldsFromSource as field>
......
public org.apache.skywalking.oap.server.core.storage.StorageData map2Data(java.util.Map dbMap) {
public org.apache.skywalking.oap.server.core.storage.StorageData storage2Entity(java.util.Map dbMap) {
${metricsClassPackage}${metricsName}Metrics metrics = new ${metricsClassPackage}${metricsName}Metrics();
<#list fieldsFromSource as field>
<#if field.typeName == "long" || field.typeName == "int" || field.typeName == "double" || field.typeName == "float">
......
......@@ -26,7 +26,7 @@ import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import java.util.HashMap;
......@@ -69,10 +69,10 @@ public class AlarmRecord extends Record {
@Column(columnName = RULE_NAME)
private String ruleName;
public static class Builder implements StorageBuilder<AlarmRecord> {
public static class Builder implements StorageHashMapBuilder<AlarmRecord> {
@Override
public Map<String, Object> data2Map(AlarmRecord storageData) {
public Map<String, Object> entity2Storage(AlarmRecord storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SCOPE, storageData.getScope());
map.put(NAME, storageData.getName());
......@@ -86,7 +86,7 @@ public class AlarmRecord extends Record {
}
@Override
public AlarmRecord map2Data(Map<String, Object> dbMap) {
public AlarmRecord storage2Entity(Map<String, Object> dbMap) {
AlarmRecord record = new AlarmRecord();
record.setScope(((Number) dbMap.get(SCOPE)).intValue());
record.setName((String) dbMap.get(NAME));
......
......@@ -22,13 +22,12 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.analysis.worker.NoneStreamProcessor;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.analysis.worker.TopNStreamProcessor;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
/**
* Stream annotation represents a metadata definition. Include the key values of the distributed streaming calculation.
......@@ -49,7 +48,9 @@ public @interface Stream {
int scopeId();
/**
* @return the converter type between {@link StorageBuilder} and {@link Map} for persistence.
* @return the converter type between entity and storage record persistence. The converter could be override by the
* storage implementation if necessary. Default, return {@link org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder}
* for general suitable.
*/
Class<? extends StorageBuilder> builder();
......
......@@ -20,7 +20,7 @@ package org.apache.skywalking.oap.server.core.analysis;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@RequiredArgsConstructor
@Getter
......
......@@ -27,7 +27,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
import org.apache.skywalking.oap.server.core.analysis.worker.TopNStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
/**
......@@ -64,10 +64,10 @@ public class TopNDatabaseStatement extends TopN {
return Objects.hash(getServiceId());
}
public static class Builder implements StorageBuilder<TopNDatabaseStatement> {
public static class Builder implements StorageHashMapBuilder<TopNDatabaseStatement> {
@Override
public TopNDatabaseStatement map2Data(Map<String, Object> dbMap) {
public TopNDatabaseStatement storage2Entity(Map<String, Object> dbMap) {
TopNDatabaseStatement statement = new TopNDatabaseStatement();
statement.setStatement((String) dbMap.get(STATEMENT));
statement.setTraceId((String) dbMap.get(TRACE_ID));
......@@ -78,7 +78,7 @@ public class TopNDatabaseStatement extends TopN {
}
@Override
public Map<String, Object> data2Map(TopNDatabaseStatement storageData) {
public Map<String, Object> entity2Storage(TopNDatabaseStatement storageData) {
Map<String, Object> map = new HashMap<>();
map.put(STATEMENT, storageData.getStatement());
map.put(TRACE_ID, storageData.getTraceId());
......
......@@ -32,7 +32,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = EndpointTraffic.INDEX_NAME, scopeId = DefaultScopeDefine.ENDPOINT,
......@@ -86,10 +86,10 @@ public class EndpointTraffic extends Metrics {
return hashCode();
}
public static class Builder implements StorageBuilder<EndpointTraffic> {
public static class Builder implements StorageHashMapBuilder<EndpointTraffic> {
@Override
public EndpointTraffic map2Data(Map<String, Object> dbMap) {
public EndpointTraffic storage2Entity(Map<String, Object> dbMap) {
EndpointTraffic inventory = new EndpointTraffic();
inventory.setServiceId((String) dbMap.get(SERVICE_ID));
inventory.setName((String) dbMap.get(NAME));
......@@ -98,7 +98,7 @@ public class EndpointTraffic extends Metrics {
}
@Override
public Map<String, Object> data2Map(EndpointTraffic storageData) {
public Map<String, Object> entity2Storage(EndpointTraffic storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SERVICE_ID, storageData.getServiceId());
map.put(NAME, storageData.getName());
......
......@@ -33,7 +33,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.SERVICE_INSTANCE;
......@@ -125,9 +125,9 @@ public class InstanceTraffic extends Metrics {
return IDManager.ServiceInstanceID.buildId(serviceId, name);
}
public static class Builder implements StorageBuilder<InstanceTraffic> {
public static class Builder implements StorageHashMapBuilder<InstanceTraffic> {
@Override
public InstanceTraffic map2Data(final Map<String, Object> dbMap) {
public InstanceTraffic storage2Entity(final Map<String, Object> dbMap) {
InstanceTraffic instanceTraffic = new InstanceTraffic();
instanceTraffic.setServiceId((String) dbMap.get(SERVICE_ID));
instanceTraffic.setName((String) dbMap.get(NAME));
......@@ -141,7 +141,7 @@ public class InstanceTraffic extends Metrics {
}
@Override
public Map<String, Object> data2Map(final InstanceTraffic storageData) {
public Map<String, Object> entity2Storage(final InstanceTraffic storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SERVICE_ID, storageData.getServiceId());
map.put(NAME, storageData.getName());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -114,7 +114,7 @@ public abstract class AbstractLogRecord extends Record {
throw new UnexpectedException("AbstractLogRecord doesn't provide id()");
}
public static abstract class Builder<T extends AbstractLogRecord> implements StorageBuilder<T> {
public static abstract class Builder<T extends AbstractLogRecord> implements StorageHashMapBuilder<T> {
protected void data2Map(Map<String, Object> map, AbstractLogRecord record) {
map.put(SERVICE_ID, record.getServiceId());
......
......@@ -48,7 +48,7 @@ public class LogRecord extends AbstractLogRecord {
public static class Builder extends AbstractLogRecord.Builder<LogRecord> {
@Override
public LogRecord map2Data(final Map<String, Object> dbMap) {
public LogRecord storage2Entity(final Map<String, Object> dbMap) {
LogRecord record = new LogRecord();
map2Data(record, dbMap);
record.setUniqueId((String) dbMap.get(UNIQUE_ID));
......@@ -56,7 +56,7 @@ public class LogRecord extends AbstractLogRecord {
}
@Override
public Map<String, Object> data2Map(final LogRecord record) {
public Map<String, Object> entity2Storage(final LogRecord record) {
Map<String, Object> dbMap = new HashMap<>();
data2Map(dbMap, record);
dbMap.put(UNIQUE_ID, record.getUniqueId());
......
......@@ -30,7 +30,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.NETWORK_ADDRESS_ALIAS;
......@@ -113,9 +113,9 @@ public class NetworkAddressAlias extends Metrics {
return builder;
}
public static class Builder implements StorageBuilder<NetworkAddressAlias> {
public static class Builder implements StorageHashMapBuilder<NetworkAddressAlias> {
@Override
public NetworkAddressAlias map2Data(final Map<String, Object> dbMap) {
public NetworkAddressAlias storage2Entity(final Map<String, Object> dbMap) {
final NetworkAddressAlias networkAddressAlias = new NetworkAddressAlias();
networkAddressAlias.setAddress((String) dbMap.get(ADDRESS));
networkAddressAlias.setRepresentServiceId((String) dbMap.get(REPRESENT_SERVICE_ID));
......@@ -126,7 +126,7 @@ public class NetworkAddressAlias extends Metrics {
}
@Override
public Map<String, Object> data2Map(final NetworkAddressAlias storageData) {
public Map<String, Object> entity2Storage(final NetworkAddressAlias storageData) {
Map<String, Object> map = new HashMap<>();
map.put(ADDRESS, storageData.getAddress());
map.put(REPRESENT_SERVICE_ID, storageData.getRepresentServiceId());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = EndpointRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.ENDPOINT_RELATION,
......@@ -133,10 +133,10 @@ public class EndpointRelationServerSideMetrics extends Metrics {
return remoteBuilder;
}
public static class Builder implements StorageBuilder<EndpointRelationServerSideMetrics> {
public static class Builder implements StorageHashMapBuilder<EndpointRelationServerSideMetrics> {
@Override
public EndpointRelationServerSideMetrics map2Data(Map<String, Object> dbMap) {
public EndpointRelationServerSideMetrics storage2Entity(Map<String, Object> dbMap) {
EndpointRelationServerSideMetrics metrics = new EndpointRelationServerSideMetrics();
metrics.setSourceEndpoint((String) dbMap.get(SOURCE_ENDPOINT));
metrics.setDestEndpoint((String) dbMap.get(DEST_ENDPOINT));
......@@ -147,7 +147,7 @@ public class EndpointRelationServerSideMetrics extends Metrics {
}
@Override
public Map<String, Object> data2Map(EndpointRelationServerSideMetrics storageData) {
public Map<String, Object> entity2Storage(EndpointRelationServerSideMetrics storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SOURCE_ENDPOINT, storageData.getSourceEndpoint());
map.put(DEST_ENDPOINT, storageData.getDestEndpoint());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = ServiceInstanceRelationClientSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_INSTANCE_RELATION,
......@@ -146,10 +146,10 @@ public class ServiceInstanceRelationClientSideMetrics extends Metrics {
return remoteBuilder;
}
public static class Builder implements StorageBuilder<ServiceInstanceRelationClientSideMetrics> {
public static class Builder implements StorageHashMapBuilder<ServiceInstanceRelationClientSideMetrics> {
@Override
public ServiceInstanceRelationClientSideMetrics map2Data(Map<String, Object> dbMap) {
public ServiceInstanceRelationClientSideMetrics storage2Entity(Map<String, Object> dbMap) {
ServiceInstanceRelationClientSideMetrics metrics = new ServiceInstanceRelationClientSideMetrics();
metrics.setEntityId((String) dbMap.get(ENTITY_ID));
metrics.setSourceServiceId((String) dbMap.get(SOURCE_SERVICE_ID));
......@@ -162,7 +162,7 @@ public class ServiceInstanceRelationClientSideMetrics extends Metrics {
}
@Override
public Map<String, Object> data2Map(ServiceInstanceRelationClientSideMetrics storageData) {
public Map<String, Object> entity2Storage(ServiceInstanceRelationClientSideMetrics storageData) {
Map<String, Object> map = new HashMap<>();
map.put(ENTITY_ID, storageData.getEntityId());
map.put(SOURCE_SERVICE_ID, storageData.getSourceServiceId());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = ServiceInstanceRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_INSTANCE_RELATION,
......@@ -146,10 +146,10 @@ public class ServiceInstanceRelationServerSideMetrics extends Metrics {
return remoteBuilder;
}
public static class Builder implements StorageBuilder<ServiceInstanceRelationServerSideMetrics> {
public static class Builder implements StorageHashMapBuilder<ServiceInstanceRelationServerSideMetrics> {
@Override
public ServiceInstanceRelationServerSideMetrics map2Data(Map<String, Object> dbMap) {
public ServiceInstanceRelationServerSideMetrics storage2Entity(Map<String, Object> dbMap) {
ServiceInstanceRelationServerSideMetrics metrics = new ServiceInstanceRelationServerSideMetrics();
metrics.setEntityId((String) dbMap.get(ENTITY_ID));
metrics.setSourceServiceId((String) dbMap.get(SOURCE_SERVICE_ID));
......@@ -162,7 +162,7 @@ public class ServiceInstanceRelationServerSideMetrics extends Metrics {
}
@Override
public Map<String, Object> data2Map(ServiceInstanceRelationServerSideMetrics storageData) {
public Map<String, Object> entity2Storage(ServiceInstanceRelationServerSideMetrics storageData) {
Map<String, Object> map = new HashMap<>();
map.put(ENTITY_ID, storageData.getEntityId());
map.put(SOURCE_SERVICE_ID, storageData.getSourceServiceId());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = ServiceRelationClientSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_RELATION,
......@@ -127,10 +127,10 @@ public class ServiceRelationClientSideMetrics extends Metrics {
return remoteBuilder;
}
public static class Builder implements StorageBuilder<ServiceRelationClientSideMetrics> {
public static class Builder implements StorageHashMapBuilder<ServiceRelationClientSideMetrics> {
@Override
public ServiceRelationClientSideMetrics map2Data(Map<String, Object> dbMap) {
public ServiceRelationClientSideMetrics storage2Entity(Map<String, Object> dbMap) {
ServiceRelationClientSideMetrics metrics = new ServiceRelationClientSideMetrics();
metrics.setSourceServiceId((String) dbMap.get(SOURCE_SERVICE_ID));
metrics.setDestServiceId((String) dbMap.get(DEST_SERVICE_ID));
......@@ -141,7 +141,7 @@ public class ServiceRelationClientSideMetrics extends Metrics {
}
@Override
public Map<String, Object> data2Map(ServiceRelationClientSideMetrics storageData) {
public Map<String, Object> entity2Storage(ServiceRelationClientSideMetrics storageData) {
Map<String, Object> map = new HashMap<>();
map.put(TIME_BUCKET, storageData.getTimeBucket());
map.put(SOURCE_SERVICE_ID, storageData.getSourceServiceId());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@Stream(name = ServiceRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_RELATION,
......@@ -127,10 +127,10 @@ public class ServiceRelationServerSideMetrics extends Metrics {
return remoteBuilder;
}
public static class Builder implements StorageBuilder<ServiceRelationServerSideMetrics> {
public static class Builder implements StorageHashMapBuilder<ServiceRelationServerSideMetrics> {
@Override
public ServiceRelationServerSideMetrics map2Data(Map<String, Object> dbMap) {
public ServiceRelationServerSideMetrics storage2Entity(Map<String, Object> dbMap) {
ServiceRelationServerSideMetrics metrics = new ServiceRelationServerSideMetrics();
metrics.setEntityId((String) dbMap.get(ENTITY_ID));
metrics.setSourceServiceId((String) dbMap.get(SOURCE_SERVICE_ID));
......@@ -141,7 +141,7 @@ public class ServiceRelationServerSideMetrics extends Metrics {
}
@Override
public Map<String, Object> data2Map(ServiceRelationServerSideMetrics storageData) {
public Map<String, Object> entity2Storage(ServiceRelationServerSideMetrics storageData) {
Map<String, Object> map = new HashMap<>();
map.put(ENTITY_ID, storageData.getEntityId());
map.put(SOURCE_SERVICE_ID, storageData.getSourceServiceId());
......
......@@ -33,7 +33,7 @@ import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -126,10 +126,10 @@ public class SegmentRecord extends Record {
return segmentId;
}
public static class Builder implements StorageBuilder<SegmentRecord> {
public static class Builder implements StorageHashMapBuilder<SegmentRecord> {
@Override
public Map<String, Object> data2Map(SegmentRecord storageData) {
public Map<String, Object> entity2Storage(SegmentRecord storageData) {
storageData.statement = Strings.join(new String[] {
storageData.endpointName,
storageData.traceId
......@@ -158,7 +158,7 @@ public class SegmentRecord extends Record {
}
@Override
public SegmentRecord map2Data(Map<String, Object> dbMap) {
public SegmentRecord storage2Entity(Map<String, Object> dbMap) {
SegmentRecord record = new SegmentRecord();
record.setSegmentId((String) dbMap.get(SEGMENT_ID));
record.setTraceId((String) dbMap.get(TRACE_ID));
......
......@@ -32,7 +32,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.Const.DOUBLE_COLONS_SPLIT;
......@@ -94,10 +94,10 @@ public class ServiceTraffic extends Metrics {
return builder;
}
public static class Builder implements StorageBuilder<ServiceTraffic> {
public static class Builder implements StorageHashMapBuilder<ServiceTraffic> {
@Override
public ServiceTraffic map2Data(final Map<String, Object> dbMap) {
public ServiceTraffic storage2Entity(final Map<String, Object> dbMap) {
ServiceTraffic serviceTraffic = new ServiceTraffic();
serviceTraffic.setName((String) dbMap.get(NAME));
serviceTraffic.setNodeType(NodeType.valueOf(((Number) dbMap.get(NODE_TYPE)).intValue()));
......@@ -106,7 +106,7 @@ public class ServiceTraffic extends Metrics {
}
@Override
public Map<String, Object> data2Map(final ServiceTraffic storageData) {
public Map<String, Object> entity2Storage(final ServiceTraffic storageData) {
final String serviceName = storageData.getName();
if (NodeType.Normal.equals(storageData.getNodeType())) {
int groupIdx = serviceName.indexOf(DOUBLE_COLONS_SPLIT);
......
......@@ -19,7 +19,7 @@
package org.apache.skywalking.oap.server.core.analysis.meter.function;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
/**
* Indicate this function accepting the data of type T.
......@@ -35,7 +35,7 @@ public interface AcceptableValue<T> {
/**
* @return builder
*/
Class<? extends StorageBuilder> builder();
Class<? extends StorageHashMapBuilder> builder();
void setTimeBucket(long timeBucket);
......
......@@ -32,7 +32,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
/**
......@@ -143,14 +143,14 @@ public abstract class HistogramFunction extends Metrics implements AcceptableVal
}
@Override
public Class<? extends StorageBuilder> builder() {
public Class<? extends StorageHashMapBuilder> builder() {
return HistogramFunctionBuilder.class;
}
public static class HistogramFunctionBuilder implements StorageBuilder<HistogramFunction> {
public static class HistogramFunctionBuilder implements StorageHashMapBuilder<HistogramFunction> {
@Override
public HistogramFunction map2Data(final Map<String, Object> dbMap) {
public HistogramFunction storage2Entity(final Map<String, Object> dbMap) {
HistogramFunction metrics = new HistogramFunction() {
@Override
public AcceptableValue<BucketedValues> createNew() {
......@@ -164,7 +164,7 @@ public abstract class HistogramFunction extends Metrics implements AcceptableVal
}
@Override
public Map<String, Object> data2Map(final HistogramFunction storageData) {
public Map<String, Object> entity2Storage(final HistogramFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(DATASET, storageData.getDataset());
map.put(TIME_BUCKET, storageData.getTimeBucket());
......
......@@ -38,7 +38,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.MultiIntValuesHold
import org.apache.skywalking.oap.server.core.analysis.metrics.PercentileMetrics;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
/**
......@@ -251,7 +251,7 @@ public abstract class PercentileFunction extends Metrics implements AcceptableVa
}
@Override
public Class<? extends StorageBuilder> builder() {
public Class<? extends StorageHashMapBuilder> builder() {
return PercentileFunctionBuilder.class;
}
......@@ -262,10 +262,10 @@ public abstract class PercentileFunction extends Metrics implements AcceptableVa
private final int[] ranks;
}
public static class PercentileFunctionBuilder implements StorageBuilder<PercentileFunction> {
public static class PercentileFunctionBuilder implements StorageHashMapBuilder<PercentileFunction> {
@Override
public PercentileFunction map2Data(final Map<String, Object> dbMap) {
public PercentileFunction storage2Entity(final Map<String, Object> dbMap) {
PercentileFunction metrics = new PercentileFunction() {
@Override
public AcceptableValue<PercentileArgument> createNew() {
......@@ -281,7 +281,7 @@ public abstract class PercentileFunction extends Metrics implements AcceptableVa
}
@Override
public Map<String, Object> data2Map(final PercentileFunction storageData) {
public Map<String, Object> entity2Storage(final PercentileFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(DATASET, storageData.getDataset());
map.put(RANKS, storageData.getRanks());
......
......@@ -37,7 +37,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.Entranc
import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.SourceFrom;
import org.apache.skywalking.oap.server.core.query.sql.Function;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@MeterFunction(functionName = "avg")
......@@ -161,13 +161,13 @@ public abstract class AvgFunction extends Metrics implements AcceptableValue<Lon
}
@Override
public Class<? extends StorageBuilder> builder() {
public Class<? extends StorageHashMapBuilder> builder() {
return AvgStorageBuilder.class;
}
public static class AvgStorageBuilder implements StorageBuilder<AvgFunction> {
public static class AvgStorageBuilder implements StorageHashMapBuilder<AvgFunction> {
@Override
public AvgFunction map2Data(final Map<String, Object> dbMap) {
public AvgFunction storage2Entity(final Map<String, Object> dbMap) {
AvgFunction metrics = new AvgFunction() {
@Override
public AcceptableValue<Long> createNew() {
......@@ -184,7 +184,7 @@ public abstract class AvgFunction extends Metrics implements AcceptableValue<Lon
}
@Override
public Map<String, Object> data2Map(final AvgFunction storageData) {
public Map<String, Object> entity2Storage(final AvgFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SUMMATION, storageData.getSummation());
map.put(VALUE, storageData.getValue());
......
......@@ -36,7 +36,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
/**
......@@ -182,10 +182,10 @@ public abstract class AvgHistogramFunction extends Metrics implements Acceptable
return AvgHistogramFunctionBuilder.class;
}
public static class AvgHistogramFunctionBuilder implements StorageBuilder<AvgHistogramFunction> {
public static class AvgHistogramFunctionBuilder implements StorageHashMapBuilder<AvgHistogramFunction> {
@Override
public AvgHistogramFunction map2Data(final Map<String, Object> dbMap) {
public AvgHistogramFunction storage2Entity(final Map<String, Object> dbMap) {
AvgHistogramFunction metrics = new AvgHistogramFunction() {
@Override
public AcceptableValue<BucketedValues> createNew() {
......@@ -201,7 +201,7 @@ public abstract class AvgHistogramFunction extends Metrics implements Acceptable
}
@Override
public Map<String, Object> data2Map(final AvgHistogramFunction storageData) {
public Map<String, Object> entity2Storage(final AvgHistogramFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(DATASET, storageData.getDataset());
map.put(COUNT, storageData.getCount());
......
......@@ -44,7 +44,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.metrics.MultiIntValuesHolder;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static java.util.stream.Collectors.groupingBy;
......@@ -326,10 +326,10 @@ public abstract class AvgHistogramPercentileFunction extends Metrics implements
return AvgPercentileFunctionBuilder.class;
}
public static class AvgPercentileFunctionBuilder implements StorageBuilder<AvgHistogramPercentileFunction> {
public static class AvgPercentileFunctionBuilder implements StorageHashMapBuilder<AvgHistogramPercentileFunction> {
@Override
public AvgHistogramPercentileFunction map2Data(final Map<String, Object> dbMap) {
public AvgHistogramPercentileFunction storage2Entity(final Map<String, Object> dbMap) {
AvgHistogramPercentileFunction metrics = new AvgHistogramPercentileFunction() {
@Override
public AcceptableValue<PercentileArgument> createNew() {
......@@ -347,7 +347,7 @@ public abstract class AvgHistogramPercentileFunction extends Metrics implements
}
@Override
public Map<String, Object> data2Map(final AvgHistogramPercentileFunction storageData) {
public Map<String, Object> entity2Storage(final AvgHistogramPercentileFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SUMMATION, storageData.getSummation());
map.put(COUNT, storageData.getCount());
......
......@@ -35,7 +35,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.LabeledValueHolder;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@MeterFunction(functionName = "avgLabeled")
......@@ -169,9 +169,9 @@ public abstract class AvgLabeledFunction extends Metrics implements AcceptableVa
return AvgLabeledStorageBuilder.class;
}
public static class AvgLabeledStorageBuilder implements StorageBuilder<AvgLabeledFunction> {
public static class AvgLabeledStorageBuilder implements StorageHashMapBuilder<AvgLabeledFunction> {
@Override
public AvgLabeledFunction map2Data(final Map<String, Object> dbMap) {
public AvgLabeledFunction storage2Entity(final Map<String, Object> dbMap) {
AvgLabeledFunction metrics = new AvgLabeledFunction() {
@Override
public AcceptableValue<DataTable> createNew() {
......@@ -188,7 +188,7 @@ public abstract class AvgLabeledFunction extends Metrics implements AcceptableVa
}
@Override
public Map<String, Object> data2Map(final AvgLabeledFunction storageData) {
public Map<String, Object> entity2Storage(final AvgLabeledFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(SUMMATION, storageData.getSummation());
map.put(VALUE, storageData.getValue());
......
......@@ -36,7 +36,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.Entranc
import org.apache.skywalking.oap.server.core.analysis.metrics.annotation.SourceFrom;
import org.apache.skywalking.oap.server.core.query.sql.Function;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@MeterFunction(functionName = "latest")
......@@ -138,9 +138,9 @@ public abstract class LatestFunction extends Metrics implements AcceptableValue<
return LatestFunction.LastestStorageBuilder.class;
}
public static class LastestStorageBuilder implements StorageBuilder<LatestFunction> {
public static class LastestStorageBuilder implements StorageHashMapBuilder<LatestFunction> {
@Override
public LatestFunction map2Data(final Map<String, Object> dbMap) {
public LatestFunction storage2Entity(final Map<String, Object> dbMap) {
LatestFunction metrics = new LatestFunction() {
@Override
public AcceptableValue<Long> createNew() {
......@@ -155,7 +155,7 @@ public abstract class LatestFunction extends Metrics implements AcceptableValue<
}
@Override
public Map<String, Object> data2Map(final LatestFunction storageData) {
public Map<String, Object> entity2Storage(final LatestFunction storageData) {
Map<String, Object> map = new HashMap<>();
map.put(VALUE, storageData.getValue());
map.put(TIME_BUCKET, storageData.getTimeBucket());
......
......@@ -28,7 +28,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -91,9 +91,9 @@ public class BrowserErrorLogRecord extends Record {
@Column(columnName = DATA_BINARY)
private byte[] dataBinary;
public static class Builder implements StorageBuilder<BrowserErrorLogRecord> {
public static class Builder implements StorageHashMapBuilder<BrowserErrorLogRecord> {
@Override
public BrowserErrorLogRecord map2Data(final Map<String, Object> dbMap) {
public BrowserErrorLogRecord storage2Entity(final Map<String, Object> dbMap) {
BrowserErrorLogRecord record = new BrowserErrorLogRecord();
record.setUniqueId((String) dbMap.get(UNIQUE_ID));
record.setServiceId((String) dbMap.get(SERVICE_ID));
......@@ -113,7 +113,7 @@ public class BrowserErrorLogRecord extends Record {
}
@Override
public Map<String, Object> data2Map(final BrowserErrorLogRecord storageData) {
public Map<String, Object> entity2Storage(final BrowserErrorLogRecord storageData) {
Map<String, Object> map = new HashMap<>();
map.put(UNIQUE_ID, storageData.getUniqueId());
map.put(SERVICE_ID, storageData.getServiceId());
......
......@@ -31,7 +31,7 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.EVENT;
......@@ -193,9 +193,9 @@ public class Event extends Metrics {
return hashCode();
}
public static class Builder implements StorageBuilder<Event> {
public static class Builder implements StorageHashMapBuilder<Event> {
@Override
public Map<String, Object> data2Map(Event storageData) {
public Map<String, Object> entity2Storage(Event storageData) {
Map<String, Object> map = new HashMap<>();
map.put(UUID, storageData.getUuid());
map.put(SERVICE, storageData.getService());
......@@ -212,7 +212,7 @@ public class Event extends Metrics {
}
@Override
public Event map2Data(Map<String, Object> dbMap) {
public Event storage2Entity(Map<String, Object> dbMap) {
Event record = new Event();
record.setUuid((String) dbMap.get(UUID));
record.setService((String) dbMap.get(SERVICE));
......
......@@ -27,7 +27,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.analysis.worker.ManagementStreamProcessor;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.UI_TEMPLATE;
......@@ -66,9 +66,9 @@ public class UITemplate extends ManagementData {
return name;
}
public static class Builder implements StorageBuilder<UITemplate> {
public static class Builder implements StorageHashMapBuilder<UITemplate> {
@Override
public UITemplate map2Data(final Map<String, Object> dbMap) {
public UITemplate storage2Entity(final Map<String, Object> dbMap) {
UITemplate uiTemplate = new UITemplate();
uiTemplate.setName((String) dbMap.get(NAME));
uiTemplate.setType((String) dbMap.get(TYPE));
......@@ -79,7 +79,7 @@ public class UITemplate extends ManagementData {
}
@Override
public Map<String, Object> data2Map(final UITemplate storageData) {
public Map<String, Object> entity2Storage(final UITemplate storageData) {
final HashMap<String, Object> map = new HashMap<>();
map.put(NAME, storageData.getName());
map.put(TYPE, storageData.getType());
......
......@@ -27,7 +27,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.PROFILE_TASK_LOG;
......@@ -61,10 +61,10 @@ public class ProfileTaskLogRecord extends Record {
return getTaskId() + Const.ID_CONNECTOR + getInstanceId() + Const.ID_CONNECTOR + getOperationType() + Const.ID_CONNECTOR + getOperationTime();
}
public static class Builder implements StorageBuilder<ProfileTaskLogRecord> {
public static class Builder implements StorageHashMapBuilder<ProfileTaskLogRecord> {
@Override
public ProfileTaskLogRecord map2Data(Map<String, Object> dbMap) {
public ProfileTaskLogRecord storage2Entity(Map<String, Object> dbMap) {
final ProfileTaskLogRecord log = new ProfileTaskLogRecord();
log.setTaskId((String) dbMap.get(TASK_ID));
log.setInstanceId((String) dbMap.get(INSTANCE_ID));
......@@ -75,7 +75,7 @@ public class ProfileTaskLogRecord extends Record {
}
@Override
public Map<String, Object> data2Map(ProfileTaskLogRecord storageData) {
public Map<String, Object> entity2Storage(ProfileTaskLogRecord storageData) {
final HashMap<String, Object> map = new HashMap<>();
map.put(TASK_ID, storageData.getTaskId());
map.put(INSTANCE_ID, storageData.getInstanceId());
......
......@@ -27,7 +27,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.analysis.worker.NoneStreamProcessor;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.PROFILE_TASK;
......@@ -73,10 +73,10 @@ public class ProfileTaskRecord extends NoneStream {
@Column(columnName = MAX_SAMPLING_COUNT)
private int maxSamplingCount;
public static class Builder implements StorageBuilder<ProfileTaskRecord> {
public static class Builder implements StorageHashMapBuilder<ProfileTaskRecord> {
@Override
public ProfileTaskRecord map2Data(Map<String, Object> dbMap) {
public ProfileTaskRecord storage2Entity(Map<String, Object> dbMap) {
final ProfileTaskRecord record = new ProfileTaskRecord();
record.setServiceId((String) dbMap.get(SERVICE_ID));
record.setEndpointName((String) dbMap.get(ENDPOINT_NAME));
......@@ -91,7 +91,7 @@ public class ProfileTaskRecord extends NoneStream {
}
@Override
public Map<String, Object> data2Map(ProfileTaskRecord storageData) {
public Map<String, Object> entity2Storage(ProfileTaskRecord storageData) {
final HashMap<String, Object> map = new HashMap<>();
map.put(SERVICE_ID, storageData.getServiceId());
map.put(ENDPOINT_NAME, storageData.getEndpointName());
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.QueryUnifiedIndex;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -71,10 +71,10 @@ public class ProfileThreadSnapshotRecord extends Record {
return getTaskId() + Const.ID_CONNECTOR + getSegmentId() + Const.ID_CONNECTOR + getSequence() + Const.ID_CONNECTOR;
}
public static class Builder implements StorageBuilder<ProfileThreadSnapshotRecord> {
public static class Builder implements StorageHashMapBuilder<ProfileThreadSnapshotRecord> {
@Override
public ProfileThreadSnapshotRecord map2Data(Map<String, Object> dbMap) {
public ProfileThreadSnapshotRecord storage2Entity(Map<String, Object> dbMap) {
final ProfileThreadSnapshotRecord snapshot = new ProfileThreadSnapshotRecord();
snapshot.setTaskId((String) dbMap.get(TASK_ID));
snapshot.setSegmentId((String) dbMap.get(SEGMENT_ID));
......@@ -90,7 +90,7 @@ public class ProfileThreadSnapshotRecord extends Record {
}
@Override
public Map<String, Object> data2Map(ProfileThreadSnapshotRecord storageData) {
public Map<String, Object> entity2Storage(ProfileThreadSnapshotRecord storageData) {
final HashMap<String, Object> map = new HashMap<>();
map.put(TASK_ID, storageData.getTaskId());
map.put(SEGMENT_ID, storageData.getSegmentId());
......
......@@ -18,10 +18,7 @@
package org.apache.skywalking.oap.server.core.storage;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.module.Service;
/**
......@@ -29,11 +26,11 @@ import org.apache.skywalking.oap.server.library.module.Service;
*/
public interface StorageDAO extends Service {
IMetricsDAO newMetricsDao(StorageBuilder<Metrics> storageBuilder);
IMetricsDAO newMetricsDao(StorageBuilder storageBuilder);
IRecordDAO newRecordDao(StorageBuilder<Record> storageBuilder);
IRecordDAO newRecordDao(StorageBuilder storageBuilder);
INoneStreamDAO newNoneStreamDao(StorageBuilder<NoneStream> storageBuilder);
INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder);
IManagementDAO newManagementDao(StorageBuilder<ManagementData> storageBuilder);
IManagementDAO newManagementDao(StorageBuilder storageBuilder);
}
......@@ -19,15 +19,16 @@
package org.apache.skywalking.oap.server.core.storage;
import java.util.Map;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
/**
* Converter between the give T and Map.
* Converter between the give T and Map. Map is a general usable type using in the storage implementation.
*
* @param <T> A storage entity implementation.
*/
public interface StorageBuilder<T extends StorageData> {
public interface StorageHashMapBuilder<T extends StorageData> extends StorageBuilder<T, Map<String, Object>> {
T map2Data(Map<String, Object> dbMap);
T storage2Entity(Map<String, Object> dbMap);
Map<String, Object> data2Map(T storageData);
Map<String, Object> entity2Storage(T storageData);
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.storage.type;
import org.apache.skywalking.oap.server.core.storage.StorageData;
/**
* Converter between the give T and K.
*
* @param <T> A storage entity implementation.
* @param <K> The data structure recognized by the storage implementation.
*/
public interface StorageBuilder<T extends StorageData, K> {
T storage2Entity(K storageData);
K entity2Storage(T entity);
}
......@@ -52,7 +52,7 @@ public class EndpointTrafficTest {
endpointTraffic.setName(endpointName);
final EndpointTraffic.Builder builder = new EndpointTraffic.Builder();
final EndpointTraffic another = builder.map2Data(builder.data2Map(endpointTraffic));
final EndpointTraffic another = builder.storage2Entity(builder.entity2Storage(endpointTraffic));
Assert.assertEquals(endpointTraffic, another);
}
......
......@@ -29,7 +29,7 @@ public class ServiceTrafficTest {
ServiceTraffic traffic = new ServiceTraffic();
traffic.setName("group-name::service-name");
traffic.setNodeType(NodeType.Normal);
final Map<String, Object> stringObjectMap = new ServiceTraffic.Builder().data2Map(traffic);
final Map<String, Object> stringObjectMap = new ServiceTraffic.Builder().entity2Storage(traffic);
Assert.assertEquals("group-name", stringObjectMap.get(ServiceTraffic.GROUP));
}
......@@ -38,7 +38,7 @@ public class ServiceTrafficTest {
ServiceTraffic traffic = new ServiceTraffic();
traffic.setName("group-name:service-name:no");
traffic.setNodeType(NodeType.Normal);
final Map<String, Object> stringObjectMap = new ServiceTraffic.Builder().data2Map(traffic);
final Map<String, Object> stringObjectMap = new ServiceTraffic.Builder().entity2Storage(traffic);
Assert.assertNull(stringObjectMap.get(ServiceTraffic.GROUP));
}
}
......@@ -24,7 +24,7 @@ import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.query.type.HeatMap;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Assert;
import org.junit.Test;
......@@ -200,13 +200,13 @@ public class HistogramFunctionTest {
})
);
final StorageBuilder storageBuilder = inst.builder().newInstance();
final StorageHashMapBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
final Map map = storageBuilder.data2Map(inst);
final Map map = storageBuilder.entity2Storage(inst);
map.put(DATASET, ((DataTable) map.get(DATASET)).toStorageData());
final HistogramFunction inst2 = (HistogramFunction) storageBuilder.map2Data(map);
final HistogramFunction inst2 = (HistogramFunction) storageBuilder.storage2Entity(map);
Assert.assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
Assert.assertEquals(inst.getDataset(), inst2.getDataset());
......
......@@ -22,7 +22,7 @@ import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.IntList;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Assert;
import org.junit.Test;
......@@ -183,15 +183,15 @@ public class PercentileFunctionTest {
);
inst.calculate();
final StorageBuilder storageBuilder = inst.builder().newInstance();
final StorageHashMapBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
final Map map = storageBuilder.data2Map(inst);
final Map map = storageBuilder.entity2Storage(inst);
map.put(PercentileFunction.DATASET, ((DataTable) map.get(PercentileFunction.DATASET)).toStorageData());
map.put(PercentileFunction.VALUE, ((DataTable) map.get(PercentileFunction.VALUE)).toStorageData());
map.put(PercentileFunction.RANKS, ((IntList) map.get(PercentileFunction.RANKS)).toStorageData());
final PercentileFunction inst2 = (PercentileFunction) storageBuilder.map2Data(map);
final PercentileFunction inst2 = (PercentileFunction) storageBuilder.storage2Entity(map);
Assert.assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
Assert.assertEquals(inst.getDataset(), inst2.getDataset());
......
......@@ -26,7 +26,7 @@ import org.apache.skywalking.oap.server.core.analysis.meter.function.BucketedVal
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.query.type.Bucket;
import org.apache.skywalking.oap.server.core.query.type.HeatMap;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Assert;
import org.junit.Test;
......@@ -176,15 +176,15 @@ public class AvgHistogramFunctionTest {
);
inst.calculate();
final StorageBuilder storageBuilder = inst.builder().newInstance();
final StorageHashMapBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
Map<String, Object> map = storageBuilder.data2Map(inst);
Map<String, Object> map = storageBuilder.entity2Storage(inst);
map.put(SUMMATION, ((DataTable) map.get(SUMMATION)).toStorageData());
map.put(COUNT, ((DataTable) map.get(COUNT)).toStorageData());
map.put(DATASET, ((DataTable) map.get(DATASET)).toStorageData());
final AvgHistogramFunction inst2 = (AvgHistogramFunction) storageBuilder.map2Data(map);
final AvgHistogramFunction inst2 = (AvgHistogramFunction) storageBuilder.storage2Entity(map);
Assert.assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
Assert.assertEquals(inst.getDataset(), inst2.getDataset());
......
......@@ -25,7 +25,7 @@ import org.apache.skywalking.oap.server.core.analysis.meter.function.BucketedVal
import org.apache.skywalking.oap.server.core.analysis.meter.function.PercentileArgument;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.IntList;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Assert;
import org.junit.Test;
......@@ -146,10 +146,10 @@ public class AvgHistogramPercentileFunctionTest {
);
inst.calculate();
final StorageBuilder storageBuilder = inst.builder().newInstance();
final StorageHashMapBuilder storageBuilder = inst.builder().newInstance();
// Simulate the storage layer do, convert the datatable to string.
final Map map = storageBuilder.data2Map(inst);
final Map map = storageBuilder.entity2Storage(inst);
map.put(
AvgHistogramPercentileFunction.COUNT,
((DataTable) map.get(AvgHistogramPercentileFunction.COUNT)).toStorageData()
......@@ -171,7 +171,7 @@ public class AvgHistogramPercentileFunctionTest {
((IntList) map.get(AvgHistogramPercentileFunction.RANKS)).toStorageData()
);
final AvgHistogramPercentileFunction inst2 = (AvgHistogramPercentileFunction) storageBuilder.map2Data(map);
final AvgHistogramPercentileFunction inst2 = (AvgHistogramPercentileFunction) storageBuilder.storage2Entity(map);
assertEquals(inst, inst2);
// HistogramFunction equal doesn't include dataset.
assertEquals(inst.getDataset(), inst2.getDataset());
......
......@@ -24,7 +24,7 @@ import java.util.List;
import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
......@@ -74,14 +74,14 @@ public class AvgLabeledFunctionTest {
public void testBuilder() throws IllegalAccessException, InstantiationException {
function.accept(MeterEntity.newService("request_count"), build(asList("200", "404"), asList(10L, 2L)));
function.calculate();
StorageBuilder<AvgLabeledFunction> storageBuilder = function.builder().newInstance();
StorageHashMapBuilder<AvgLabeledFunction> storageBuilder = function.builder().newInstance();
Map<String, Object> map = storageBuilder.data2Map(function);
Map<String, Object> map = storageBuilder.entity2Storage(function);
map.put(SUMMATION, ((DataTable) map.get(SUMMATION)).toStorageData());
map.put(COUNT, ((DataTable) map.get(COUNT)).toStorageData());
map.put(VALUE, ((DataTable) map.get(VALUE)).toStorageData());
AvgLabeledFunction function2 = storageBuilder.map2Data(map);
AvgLabeledFunction function2 = storageBuilder.storage2Entity(map);
assertThat(function2.getValue(), is(function.getValue()));
}
......
......@@ -20,7 +20,7 @@ package org.apache.skywalking.oap.server.core.analysis.meter.function.latest;
import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
......@@ -71,12 +71,12 @@ public class LatestFunctionTest {
long time = 1597113447737L;
function.accept(MeterEntity.newService("latest_sync_time"), time);
function.calculate();
StorageBuilder<LatestFunction> storageBuilder = function.builder().newInstance();
StorageHashMapBuilder<LatestFunction> storageBuilder = function.builder().newInstance();
Map<String, Object> map = storageBuilder.data2Map(function);
Map<String, Object> map = storageBuilder.entity2Storage(function);
map.put(LatestFunction.VALUE, map.get(LatestFunction.VALUE));
LatestFunction function2 = storageBuilder.map2Data(map);
LatestFunction function2 = storageBuilder.storage2Entity(map);
assertThat(function2.getValue(), is(function.getValue()));
}
}
\ No newline at end of file
......@@ -34,7 +34,7 @@ public class UITemplateTest {
uiTemplate.setDisabled(BooleanUtils.FALSE);
final UITemplate.Builder builder = new UITemplate.Builder();
final UITemplate uiTemplate2 = builder.map2Data(builder.data2Map(uiTemplate));
final UITemplate uiTemplate2 = builder.storage2Entity(builder.entity2Storage(uiTemplate));
Assert.assertEquals(uiTemplate, uiTemplate2);
......
......@@ -24,7 +24,7 @@ import org.apache.skywalking.oap.server.core.analysis.DownSampling;
import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.StorageException;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
......@@ -92,15 +92,15 @@ public class StorageModelsTest {
@Column(columnName = "column", storageOnly = true)
private String column4;
static class Builder implements StorageBuilder {
static class Builder implements StorageHashMapBuilder<StorageData> {
@Override
public StorageData map2Data(final Map dbMap) {
public StorageData storage2Entity(final Map dbMap) {
return null;
}
@Override
public Map<String, Object> data2Map(final StorageData storageData) {
public Map<String, Object> entity2Storage(final StorageData storageData) {
return null;
}
}
......
......@@ -21,16 +21,16 @@ package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base;
import java.io.IOException;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.xcontent.XContentBuilder;
public class ManagementEsDAO extends EsDAO implements IManagementDAO {
private final StorageBuilder<ManagementData> storageBuilder;
private final StorageHashMapBuilder<ManagementData> storageBuilder;
public ManagementEsDAO(ElasticSearchClient client, StorageBuilder<ManagementData> storageBuilder) {
public ManagementEsDAO(ElasticSearchClient client, StorageHashMapBuilder<ManagementData> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
......@@ -44,7 +44,7 @@ public class ManagementEsDAO extends EsDAO implements IManagementDAO {
return;
}
XContentBuilder builder = map2builder(storageBuilder.data2Map(managementData));
XContentBuilder builder = map2builder(storageBuilder.entity2Storage(managementData));
getClient().forceInsert(modelName, id, builder);
}
}
\ No newline at end of file
......@@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
......@@ -33,9 +33,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
public class MetricsEsDAO extends EsDAO implements IMetricsDAO {
protected final StorageBuilder<Metrics> storageBuilder;
protected final StorageHashMapBuilder<Metrics> storageBuilder;
protected MetricsEsDAO(ElasticSearchClient client, StorageBuilder<Metrics> storageBuilder) {
protected MetricsEsDAO(ElasticSearchClient client, StorageHashMapBuilder<Metrics> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
......@@ -47,7 +47,7 @@ public class MetricsEsDAO extends EsDAO implements IMetricsDAO {
List<Metrics> result = new ArrayList<>(response.getHits().getHits().length);
for (int i = 0; i < response.getHits().getHits().length; i++) {
Metrics source = storageBuilder.map2Data(response.getHits().getAt(i).getSourceAsMap());
Metrics source = storageBuilder.storage2Entity(response.getHits().getAt(i).getSourceAsMap());
result.add(source);
}
return result;
......@@ -55,14 +55,14 @@ public class MetricsEsDAO extends EsDAO implements IMetricsDAO {
@Override
public InsertRequest prepareBatchInsert(Model model, Metrics metrics) throws IOException {
XContentBuilder builder = map2builder(storageBuilder.data2Map(metrics));
XContentBuilder builder = map2builder(storageBuilder.entity2Storage(metrics));
String modelName = TimeSeriesUtils.writeIndexName(model, metrics.getTimeBucket());
return getClient().prepareInsert(modelName, metrics.id(), builder);
}
@Override
public UpdateRequest prepareBatchUpdate(Model model, Metrics metrics) throws IOException {
XContentBuilder builder = map2builder(storageBuilder.data2Map(metrics));
XContentBuilder builder = map2builder(storageBuilder.entity2Storage(metrics));
String modelName = TimeSeriesUtils.writeIndexName(model, metrics.getTimeBucket());
return getClient().prepareUpdate(modelName, metrics.id(), builder);
}
......
......@@ -21,7 +21,7 @@ package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base;
import java.io.IOException;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
......@@ -31,16 +31,16 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
*/
public class NoneStreamEsDAO extends EsDAO implements INoneStreamDAO {
private final StorageBuilder<NoneStream> storageBuilder;
private final StorageHashMapBuilder<NoneStream> storageBuilder;
public NoneStreamEsDAO(ElasticSearchClient client, StorageBuilder<NoneStream> storageBuilder) {
public NoneStreamEsDAO(ElasticSearchClient client, StorageHashMapBuilder<NoneStream> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
@Override
public void insert(Model model, NoneStream noneStream) throws IOException {
XContentBuilder builder = map2builder(storageBuilder.data2Map(noneStream));
XContentBuilder builder = map2builder(storageBuilder.entity2Storage(noneStream));
String modelName = TimeSeriesUtils.writeIndexName(model, noneStream.getTimeBucket());
getClient().forceInsert(modelName, noneStream.id(), builder);
}
......
......@@ -21,7 +21,7 @@ package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base;
import java.io.IOException;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
......@@ -29,16 +29,16 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
public class RecordEsDAO extends EsDAO implements IRecordDAO {
private final StorageBuilder<Record> storageBuilder;
private final StorageHashMapBuilder<Record> storageBuilder;
public RecordEsDAO(ElasticSearchClient client, StorageBuilder<Record> storageBuilder) {
public RecordEsDAO(ElasticSearchClient client, StorageHashMapBuilder<Record> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
@Override
public InsertRequest prepareBatchInsert(Model model, Record record) throws IOException {
XContentBuilder builder = map2builder(storageBuilder.data2Map(record));
XContentBuilder builder = map2builder(storageBuilder.entity2Storage(record));
String modelName = TimeSeriesUtils.writeIndexName(model, record.getTimeBucket());
return getClient().prepareInsert(modelName, record.id(), builder);
}
......
......@@ -26,8 +26,9 @@ import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
public class StorageEsDAO extends EsDAO implements StorageDAO {
......@@ -37,22 +38,22 @@ public class StorageEsDAO extends EsDAO implements StorageDAO {
}
@Override
public IMetricsDAO newMetricsDao(StorageBuilder<Metrics> storageBuilder) {
return new MetricsEsDAO(getClient(), storageBuilder);
public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
return new MetricsEsDAO(getClient(), (StorageHashMapBuilder<Metrics>) storageBuilder);
}
@Override
public IRecordDAO newRecordDao(StorageBuilder<Record> storageBuilder) {
return new RecordEsDAO(getClient(), storageBuilder);
public IRecordDAO newRecordDao(StorageBuilder storageBuilder) {
return new RecordEsDAO(getClient(), (StorageHashMapBuilder<Record>) storageBuilder);
}
@Override
public INoneStreamDAO newNoneStreamDao(StorageBuilder<NoneStream> storageBuilder) {
return new NoneStreamEsDAO(getClient(), storageBuilder);
public INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder) {
return new NoneStreamEsDAO(getClient(), (StorageHashMapBuilder<NoneStream>) storageBuilder);
}
@Override
public IManagementDAO newManagementDao(final StorageBuilder<ManagementData> storageBuilder) {
return new ManagementEsDAO(getClient(), storageBuilder);
public IManagementDAO newManagementDao(StorageBuilder storageBuilder) {
return new ManagementEsDAO(getClient(), (StorageHashMapBuilder<ManagementData>) storageBuilder);
}
}
......@@ -53,7 +53,7 @@ public class NetworkAddressAliasEsDAO extends EsDAO implements INetworkAddressAl
final NetworkAddressAlias.Builder builder = new NetworkAddressAlias.Builder();
for (SearchHit searchHit : response.getHits().getHits()) {
networkAddressAliases.add(builder.map2Data(searchHit.getSourceAsMap()));
networkAddressAliases.add(builder.storage2Entity(searchHit.getSourceAsMap()));
}
} catch (Throwable t) {
log.error(t.getMessage(), t);
......
......@@ -70,7 +70,7 @@ public class AlarmQueryEsDAO extends EsDAO implements IAlarmQueryDAO {
for (SearchHit searchHit : response.getHits().getHits()) {
AlarmRecord.Builder builder = new AlarmRecord.Builder();
AlarmRecord alarmRecord = builder.map2Data(searchHit.getSourceAsMap());
AlarmRecord alarmRecord = builder.storage2Entity(searchHit.getSourceAsMap());
AlarmMessage message = new AlarmMessage();
message.setId(String.valueOf(alarmRecord.getId0()));
......
......@@ -166,7 +166,7 @@ public class MetadataQueryEsDAO extends EsDAO implements IMetadataQueryDAO {
for (SearchHit searchHit : response.getHits()) {
Map<String, Object> sourceAsMap = searchHit.getSourceAsMap();
final EndpointTraffic endpointTraffic = new EndpointTraffic.Builder().map2Data(sourceAsMap);
final EndpointTraffic endpointTraffic = new EndpointTraffic.Builder().storage2Entity(sourceAsMap);
Endpoint endpoint = new Endpoint();
endpoint.setId(endpointTraffic.id());
......@@ -199,7 +199,7 @@ public class MetadataQueryEsDAO extends EsDAO implements IMetadataQueryDAO {
for (SearchHit searchHit : response.getHits()) {
Map<String, Object> sourceAsMap = searchHit.getSourceAsMap();
final InstanceTraffic instanceTraffic = new InstanceTraffic.Builder().map2Data(sourceAsMap);
final InstanceTraffic instanceTraffic = new InstanceTraffic.Builder().storage2Entity(sourceAsMap);
ServiceInstance serviceInstance = new ServiceInstance();
serviceInstance.setId(instanceTraffic.id());
......@@ -231,7 +231,7 @@ public class MetadataQueryEsDAO extends EsDAO implements IMetadataQueryDAO {
Map<String, Object> sourceAsMap = searchHit.getSourceAsMap();
final ServiceTraffic.Builder builder = new ServiceTraffic.Builder();
final ServiceTraffic serviceTraffic = builder.map2Data(sourceAsMap);
final ServiceTraffic serviceTraffic = builder.storage2Entity(sourceAsMap);
Service service = new Service();
service.setId(serviceTraffic.id());
......
......@@ -141,7 +141,7 @@ public class ProfileThreadSnapshotQueryEsDAO extends EsDAO implements IProfileTh
List<ProfileThreadSnapshotRecord> result = new ArrayList<>(maxSequence - minSequence);
for (SearchHit searchHit : response.getHits().getHits()) {
ProfileThreadSnapshotRecord record = builder.map2Data(searchHit.getSourceAsMap());
ProfileThreadSnapshotRecord record = builder.storage2Entity(searchHit.getSourceAsMap());
result.add(record);
}
......
......@@ -65,7 +65,7 @@ public class UITemplateManagementEsDAO extends EsDAO implements UITemplateManage
for (SearchHit searchHit : response.getHits()) {
Map<String, Object> sourceAsMap = searchHit.getSourceAsMap();
final UITemplate uiTemplate = builder.map2Data(sourceAsMap);
final UITemplate uiTemplate = builder.storage2Entity(sourceAsMap);
configs.add(new DashboardConfiguration().fromEntity(uiTemplate));
}
return configs;
......@@ -82,7 +82,7 @@ public class UITemplateManagementEsDAO extends EsDAO implements UITemplateManage
return TemplateChangeStatus.builder().status(false).message("Template exists").build();
}
XContentBuilder xContentBuilder = map2builder(builder.data2Map(uiTemplate));
XContentBuilder xContentBuilder = map2builder(builder.entity2Storage(uiTemplate));
getClient().forceInsert(UITemplate.INDEX_NAME, uiTemplate.id(), xContentBuilder);
return TemplateChangeStatus.builder().status(true).build();
} catch (IOException e) {
......@@ -102,7 +102,7 @@ public class UITemplateManagementEsDAO extends EsDAO implements UITemplateManage
return TemplateChangeStatus.builder().status(false).message("Can't find the template").build();
}
XContentBuilder xContentBuilder = map2builder(builder.data2Map(uiTemplate));
XContentBuilder xContentBuilder = map2builder(builder.entity2Storage(uiTemplate));
getClient().forceUpdate(UITemplate.INDEX_NAME, uiTemplate.id(), xContentBuilder);
return TemplateChangeStatus.builder().status(true).build();
} catch (IOException e) {
......@@ -116,10 +116,10 @@ public class UITemplateManagementEsDAO extends EsDAO implements UITemplateManage
final GetResponse response = getClient().get(UITemplate.INDEX_NAME, name);
if (response.isExists()) {
final UITemplate.Builder builder = new UITemplate.Builder();
final UITemplate uiTemplate = builder.map2Data(response.getSourceAsMap());
final UITemplate uiTemplate = builder.storage2Entity(response.getSourceAsMap());
uiTemplate.setDisabled(BooleanUtils.TRUE);
XContentBuilder xContentBuilder = map2builder(builder.data2Map(uiTemplate));
XContentBuilder xContentBuilder = map2builder(builder.entity2Storage(uiTemplate));
getClient().forceUpdate(UITemplate.INDEX_NAME, uiTemplate.id(), xContentBuilder);
return TemplateChangeStatus.builder().status(true).build();
} else {
......
......@@ -22,7 +22,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MetricsEsDAO;
......@@ -30,7 +30,7 @@ import org.elasticsearch.action.search.SearchResponse;
public class MetricsEs7DAO extends MetricsEsDAO {
MetricsEs7DAO(final ElasticSearchClient client, final StorageBuilder<Metrics> storageBuilder) {
MetricsEs7DAO(final ElasticSearchClient client, final StorageHashMapBuilder<Metrics> storageBuilder) {
super(client, storageBuilder);
}
......@@ -41,7 +41,7 @@ public class MetricsEs7DAO extends MetricsEsDAO {
List<Metrics> result = new ArrayList<>(response.getHits().getHits().length);
for (int i = 0; i < response.getHits().getHits().length; i++) {
Metrics source = storageBuilder.map2Data(response.getHits().getAt(i).getSourceAsMap());
Metrics source = storageBuilder.storage2Entity(response.getHits().getAt(i).getSourceAsMap());
result.add(source);
}
return result;
......
......@@ -26,8 +26,9 @@ import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.ManagementEsDAO;
......@@ -41,22 +42,22 @@ public class StorageEs7DAO extends EsDAO implements StorageDAO {
}
@Override
public IMetricsDAO newMetricsDao(StorageBuilder<Metrics> storageBuilder) {
return new MetricsEs7DAO(getClient(), storageBuilder);
public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
return new MetricsEs7DAO(getClient(), (StorageHashMapBuilder<Metrics>) storageBuilder);
}
@Override
public IRecordDAO newRecordDao(StorageBuilder<Record> storageBuilder) {
return new RecordEsDAO(getClient(), storageBuilder);
public IRecordDAO newRecordDao(StorageBuilder storageBuilder) {
return new RecordEsDAO(getClient(), (StorageHashMapBuilder<Record>) storageBuilder);
}
@Override
public INoneStreamDAO newNoneStreamDao(StorageBuilder<NoneStream> storageBuilder) {
return new NoneStreamEsDAO(getClient(), storageBuilder);
public INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder) {
return new NoneStreamEsDAO(getClient(), (StorageHashMapBuilder<NoneStream>) storageBuilder);
}
@Override
public IManagementDAO newManagementDao(final StorageBuilder<ManagementData> storageBuilder) {
return new ManagementEsDAO(getClient(), storageBuilder);
public IManagementDAO newManagementDao(StorageBuilder storageBuilder) {
return new ManagementEsDAO(getClient(), (StorageHashMapBuilder<ManagementData>) storageBuilder);
}
}
......@@ -71,7 +71,7 @@ public class AlarmQueryEs7DAO extends EsDAO implements IAlarmQueryDAO {
for (SearchHit searchHit : response.getHits().getHits()) {
AlarmRecord.Builder builder = new AlarmRecord.Builder();
AlarmRecord alarmRecord = builder.map2Data(searchHit.getSourceAsMap());
AlarmRecord alarmRecord = builder.storage2Entity(searchHit.getSourceAsMap());
AlarmMessage message = new AlarmMessage();
message.setId(String.valueOf(alarmRecord.getId0()));
......
......@@ -23,7 +23,7 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
......@@ -40,8 +40,8 @@ public class InfluxInsertRequest implements InsertRequest, UpdateRequest {
private final Point.Builder builder;
private final Map<String, Object> fields = Maps.newHashMap();
public <T extends StorageData> InfluxInsertRequest(Model model, T storageData, StorageBuilder<T> storageBuilder) {
final Map<String, Object> objectMap = storageBuilder.data2Map(storageData);
public <T extends StorageData> InfluxInsertRequest(Model model, T storageData, StorageHashMapBuilder<T> storageBuilder) {
final Map<String, Object> objectMap = storageBuilder.entity2Storage(storageData);
if (SegmentRecord.INDEX_NAME.equals(model.getName()) || LogRecord.INDEX_NAME.equals(model.getName())) {
objectMap.remove(SegmentRecord.TAGS);
}
......
......@@ -26,8 +26,9 @@ import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxClient;
public class InfluxStorageDAO implements StorageDAO {
......@@ -38,22 +39,22 @@ public class InfluxStorageDAO implements StorageDAO {
}
@Override
public IMetricsDAO newMetricsDao(StorageBuilder<Metrics> storageBuilder) {
return new MetricsDAO(influxClient, storageBuilder);
public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
return new MetricsDAO(influxClient, (StorageHashMapBuilder<Metrics>) storageBuilder);
}
@Override
public IRecordDAO newRecordDao(StorageBuilder<Record> storageBuilder) {
return new RecordDAO(storageBuilder);
public IRecordDAO newRecordDao(StorageBuilder storageBuilder) {
return new RecordDAO((StorageHashMapBuilder<Record>) storageBuilder);
}
@Override
public INoneStreamDAO newNoneStreamDao(StorageBuilder<NoneStream> storageBuilder) {
return new NoneStreamDAO(influxClient, storageBuilder);
public INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder) {
return new NoneStreamDAO(influxClient, (StorageHashMapBuilder<NoneStream>) storageBuilder);
}
@Override
public IManagementDAO newManagementDao(final StorageBuilder<ManagementData> storageBuilder) {
return new ManagementDAO(influxClient, storageBuilder);
public IManagementDAO newManagementDao(StorageBuilder storageBuilder) {
return new ManagementDAO(influxClient, (StorageHashMapBuilder<ManagementData>) storageBuilder);
}
}
......@@ -24,7 +24,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.management.ui.template.UITemplate;
import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxClient;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxConstants;
......@@ -41,9 +41,9 @@ import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.select;
@Slf4j
public class ManagementDAO implements IManagementDAO {
private final InfluxClient client;
private final StorageBuilder<ManagementData> storageBuilder;
private final StorageHashMapBuilder<ManagementData> storageBuilder;
public ManagementDAO(InfluxClient client, StorageBuilder<ManagementData> storageBuilder) {
public ManagementDAO(InfluxClient client, StorageHashMapBuilder<ManagementData> storageBuilder) {
this.client = client;
this.storageBuilder = storageBuilder;
}
......@@ -65,7 +65,7 @@ public class ManagementDAO implements IManagementDAO {
Point point = Point.measurement(UITemplate.INDEX_NAME)
.tag(InfluxConstants.TagName.ID_COLUMN, managementData.id())
.time(1L, TimeUnit.NANOSECONDS)
.fields(storageBuilder.data2Map(managementData)).build();
.fields(storageBuilder.entity2Storage(managementData)).build();
client.write(point);
}
}
......@@ -34,7 +34,7 @@ import org.apache.skywalking.oap.server.core.analysis.manual.instance.InstanceTr
import org.apache.skywalking.oap.server.core.analysis.manual.service.ServiceTraffic;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
......@@ -54,10 +54,10 @@ import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.select;
@Slf4j
public class MetricsDAO implements IMetricsDAO {
private final StorageBuilder<Metrics> storageBuilder;
private final StorageHashMapBuilder<Metrics> storageBuilder;
private final InfluxClient client;
public MetricsDAO(InfluxClient client, StorageBuilder<Metrics> storageBuilder) {
public MetricsDAO(InfluxClient client, StorageHashMapBuilder<Metrics> storageBuilder) {
this.client = client;
this.storageBuilder = storageBuilder;
}
......@@ -127,7 +127,7 @@ public class MetricsDAO implements IMetricsDAO {
data.put(storageAndColumnMap.get(columns.get(i)), value);
}
newMetrics.add(storageBuilder.map2Data(data));
newMetrics.add(storageBuilder.storage2Entity(data));
});
return newMetrics;
......
......@@ -23,7 +23,7 @@ import org.apache.skywalking.apm.commons.datacarrier.common.AtomicRangeInteger;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxClient;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.TableMetaInfo;
......@@ -33,9 +33,9 @@ public class NoneStreamDAO implements INoneStreamDAO {
private static final AtomicRangeInteger SUFFIX = new AtomicRangeInteger(0, PADDING_SIZE);
private final InfluxClient client;
private final StorageBuilder<NoneStream> storageBuilder;
private final StorageHashMapBuilder<NoneStream> storageBuilder;
public NoneStreamDAO(InfluxClient client, StorageBuilder<NoneStream> storageBuilder) {
public NoneStreamDAO(InfluxClient client, StorageHashMapBuilder<NoneStream> storageBuilder) {
this.client = client;
this.storageBuilder = storageBuilder;
}
......
......@@ -30,7 +30,7 @@ import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.TableMetaInfo;
......@@ -41,9 +41,9 @@ public class RecordDAO implements IRecordDAO {
private static final int PADDING_SIZE = 1_000_000;
private static final AtomicRangeInteger SUFFIX = new AtomicRangeInteger(0, PADDING_SIZE);
private final StorageBuilder<Record> storageBuilder;
private final StorageHashMapBuilder<Record> storageBuilder;
public RecordDAO(StorageBuilder<Record> storageBuilder) {
public RecordDAO(StorageHashMapBuilder<Record> storageBuilder) {
this.storageBuilder = storageBuilder;
}
......
......@@ -75,7 +75,7 @@ public class NetworkAddressAliasDAO implements INetworkAddressAliasDAO {
for (int i = 1; i < columns.size(); i++) {
map.put(columnAndFieldMap.get(columns.get(i)), values.get(i));
}
networkAddressAliases.add(builder.map2Data(map));
networkAddressAliases.add(builder.storage2Entity(map));
}
} catch (IOException e) {
log.error(e.getMessage(), e);
......
......@@ -70,7 +70,7 @@ public class UITemplateManagementDAOImpl implements UITemplateManagementDAO {
for (int i = 1; i < columnNames.size(); i++) {
data.put(columnNames.get(i), columnValues.get(i));
}
UITemplate uiTemplate = builder.map2Data(data);
UITemplate uiTemplate = builder.storage2Entity(data);
configs.add(new DashboardConfiguration().fromEntity(uiTemplate));
}
}
......@@ -84,7 +84,7 @@ public class UITemplateManagementDAOImpl implements UITemplateManagementDAO {
final Point point = Point.measurement(UITemplate.INDEX_NAME)
.tag(InfluxConstants.TagName.ID_COLUMN, uiTemplate.id())
.fields(builder.data2Map(uiTemplate))
.fields(builder.entity2Storage(uiTemplate))
.time(1L, TimeUnit.NANOSECONDS)
.build();
client.write(point);
......@@ -103,7 +103,7 @@ public class UITemplateManagementDAOImpl implements UITemplateManagementDAO {
QueryResult.Series series = client.queryForSingleSeries(query);
if (Objects.nonNull(series)) {
final Point point = Point.measurement(UITemplate.INDEX_NAME)
.fields(builder.data2Map(uiTemplate))
.fields(builder.entity2Storage(uiTemplate))
.tag(InfluxConstants.TagName.ID_COLUMN, uiTemplate.id())
.time(1L, TimeUnit.NANOSECONDS)
.build();
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -105,10 +105,10 @@ public class JaegerSpanRecord extends Record {
return traceId + "-" + spanId;
}
public static class Builder implements StorageBuilder<JaegerSpanRecord> {
public static class Builder implements StorageHashMapBuilder<JaegerSpanRecord> {
@Override
public Map<String, Object> data2Map(JaegerSpanRecord storageData) {
public Map<String, Object> entity2Storage(JaegerSpanRecord storageData) {
Map<String, Object> map = new HashMap<>();
map.put(TRACE_ID, storageData.getTraceId());
map.put(SPAN_ID, storageData.getSpanId());
......@@ -131,7 +131,7 @@ public class JaegerSpanRecord extends Record {
}
@Override
public JaegerSpanRecord map2Data(Map<String, Object> dbMap) {
public JaegerSpanRecord storage2Entity(Map<String, Object> dbMap) {
JaegerSpanRecord record = new JaegerSpanRecord();
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setSpanId((String) dbMap.get(SPAN_ID));
......
......@@ -21,9 +21,9 @@ import java.util.List;
import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
public abstract class AbstractSearchTagBuilder<T extends Record> implements StorageBuilder<T> {
public abstract class AbstractSearchTagBuilder<T extends Record> implements StorageHashMapBuilder<T> {
private final int numOfSearchableValuesPerTag;
private final List<String> searchTagKeys;
......
......@@ -50,7 +50,7 @@ public class H2LogRecordBuilder extends AbstractSearchTagBuilder<Record> {
}
@Override
public Record map2Data(final Map<String, Object> dbMap) {
public Record storage2Entity(final Map<String, Object> dbMap) {
LogRecord record = new LogRecord();
record.setUniqueId((String) dbMap.get(UNIQUE_ID));
record.setServiceId((String) dbMap.get(SERVICE_ID));
......@@ -74,7 +74,7 @@ public class H2LogRecordBuilder extends AbstractSearchTagBuilder<Record> {
}
@Override
public Map<String, Object> data2Map(final Record record) {
public Map<String, Object> entity2Storage(final Record record) {
LogRecord storageData = (LogRecord) record;
Map<String, Object> map = new HashMap<>();
map.put(UNIQUE_ID, storageData.getUniqueId());
......
......@@ -23,7 +23,7 @@ import java.sql.Connection;
import java.sql.SQLException;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
......@@ -35,9 +35,9 @@ import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
public class H2ManagementDAO extends H2SQLExecutor implements IManagementDAO {
private JDBCHikariCPClient h2Client;
private StorageBuilder<ManagementData> storageBuilder;
private StorageHashMapBuilder<ManagementData> storageBuilder;
public H2ManagementDAO(JDBCHikariCPClient h2Client, StorageBuilder<ManagementData> storageBuilder) {
public H2ManagementDAO(JDBCHikariCPClient h2Client, StorageHashMapBuilder<ManagementData> storageBuilder) {
this.h2Client = h2Client;
this.storageBuilder = storageBuilder;
}
......
......@@ -24,7 +24,7 @@ import java.util.List;
import java.util.stream.Collectors;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
......@@ -33,9 +33,9 @@ import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
public class H2MetricsDAO extends H2SQLExecutor implements IMetricsDAO {
private JDBCHikariCPClient h2Client;
private StorageBuilder<Metrics> storageBuilder;
private StorageHashMapBuilder<Metrics> storageBuilder;
public H2MetricsDAO(JDBCHikariCPClient h2Client, StorageBuilder<Metrics> storageBuilder) {
public H2MetricsDAO(JDBCHikariCPClient h2Client, StorageHashMapBuilder<Metrics> storageBuilder) {
this.h2Client = h2Client;
this.storageBuilder = storageBuilder;
}
......
......@@ -20,7 +20,7 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.dao;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
......@@ -35,9 +35,9 @@ import java.sql.SQLException;
public class H2NoneStreamDAO extends H2SQLExecutor implements INoneStreamDAO {
private JDBCHikariCPClient h2Client;
private StorageBuilder<NoneStream> storageBuilder;
private StorageHashMapBuilder<NoneStream> storageBuilder;
public H2NoneStreamDAO(JDBCHikariCPClient h2Client, StorageBuilder<NoneStream> storageBuilder) {
public H2NoneStreamDAO(JDBCHikariCPClient h2Client, StorageHashMapBuilder<NoneStream> storageBuilder) {
this.h2Client = h2Client;
this.storageBuilder = storageBuilder;
}
......
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentReco
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
......@@ -37,19 +37,19 @@ import org.apache.skywalking.oap.server.library.module.ModuleManager;
public class H2RecordDAO extends H2SQLExecutor implements IRecordDAO {
private JDBCHikariCPClient h2Client;
private StorageBuilder<Record> storageBuilder;
private StorageHashMapBuilder<Record> storageBuilder;
private final int maxSizeOfArrayColumn;
public H2RecordDAO(ModuleManager manager,
JDBCHikariCPClient h2Client,
StorageBuilder<Record> storageBuilder,
StorageHashMapBuilder<Record> storageBuilder,
final int maxSizeOfArrayColumn,
final int numOfSearchableValuesPerTag) {
this.h2Client = h2Client;
try {
if (SegmentRecord.class
.equals(
storageBuilder.getClass().getMethod("map2Data", Map.class).getReturnType()
storageBuilder.getClass().getMethod("storage2Entity", Map.class).getReturnType()
)
) {
this.maxSizeOfArrayColumn = maxSizeOfArrayColumn;
......@@ -62,7 +62,7 @@ public class H2RecordDAO extends H2SQLExecutor implements IRecordDAO {
Arrays.asList(configService.getSearchableTracesTags().split(Const.COMMA))
);
} else if (LogRecord.class.equals(
storageBuilder.getClass().getMethod("map2Data", Map.class).getReturnType())) {
storageBuilder.getClass().getMethod("storage2Entity", Map.class).getReturnType())) {
this.maxSizeOfArrayColumn = maxSizeOfArrayColumn;
final ConfigService configService = manager.find(CoreModule.NAME)
.provider()
......
......@@ -27,7 +27,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
......@@ -43,7 +43,7 @@ public class H2SQLExecutor {
protected <T extends StorageData> List<StorageData> getByIDs(JDBCHikariCPClient h2Client,
String modelName,
String[] ids,
StorageBuilder<T> storageBuilder) throws IOException {
StorageHashMapBuilder<T> storageBuilder) throws IOException {
/*
* Although H2 database or other database support createArrayOf and setArray operate,
* Mysql 5.1.44 driver doesn't.
......@@ -70,7 +70,7 @@ public class H2SQLExecutor {
}
protected <T extends StorageData> StorageData getByID(JDBCHikariCPClient h2Client, String modelName, String id,
StorageBuilder<T> storageBuilder) throws IOException {
StorageHashMapBuilder<T> storageBuilder) throws IOException {
try (Connection connection = h2Client.getConnection();
ResultSet rs = h2Client.executeQuery(connection, "SELECT * FROM " + modelName + " WHERE id = ?", id)) {
return toStorageData(rs, modelName, storageBuilder);
......@@ -80,7 +80,7 @@ public class H2SQLExecutor {
}
protected StorageData getByColumn(JDBCHikariCPClient h2Client, String modelName, String columnName, Object value,
StorageBuilder<? extends StorageData> storageBuilder) throws IOException {
StorageHashMapBuilder<? extends StorageData> storageBuilder) throws IOException {
try (Connection connection = h2Client.getConnection();
ResultSet rs = h2Client.executeQuery(
connection, "SELECT * FROM " + modelName + " WHERE " + columnName + " = ?", value)) {
......@@ -91,27 +91,27 @@ public class H2SQLExecutor {
}
protected StorageData toStorageData(ResultSet rs, String modelName,
StorageBuilder<? extends StorageData> storageBuilder) throws SQLException {
StorageHashMapBuilder<? extends StorageData> storageBuilder) throws SQLException {
if (rs.next()) {
Map<String, Object> data = new HashMap<>();
List<ModelColumn> columns = TableMetaInfo.get(modelName).getColumns();
for (ModelColumn column : columns) {
data.put(column.getColumnName().getName(), rs.getObject(column.getColumnName().getStorageName()));
}
return storageBuilder.map2Data(data);
return storageBuilder.storage2Entity(data);
}
return null;
}
protected <T extends StorageData> SQLExecutor getInsertExecutor(String modelName, T metrics,
StorageBuilder<T> storageBuilder) throws IOException {
StorageHashMapBuilder<T> storageBuilder) throws IOException {
return getInsertExecutor(modelName, metrics, storageBuilder, 1);
}
protected <T extends StorageData> SQLExecutor getInsertExecutor(String modelName, T metrics,
StorageBuilder<T> storageBuilder,
StorageHashMapBuilder<T> storageBuilder,
int maxSizeOfArrayColumn) throws IOException {
Map<String, Object> objectMap = storageBuilder.data2Map(metrics);
Map<String, Object> objectMap = storageBuilder.entity2Storage(metrics);
SQLBuilder sqlBuilder = new SQLBuilder("INSERT INTO " + modelName + " VALUES");
List<ModelColumn> columns = TableMetaInfo.get(modelName).getColumns();
......@@ -149,8 +149,8 @@ public class H2SQLExecutor {
}
protected <T extends StorageData> SQLExecutor getUpdateExecutor(String modelName, T metrics,
StorageBuilder<T> storageBuilder) throws IOException {
Map<String, Object> objectMap = storageBuilder.data2Map(metrics);
StorageHashMapBuilder<T> storageBuilder) throws IOException {
Map<String, Object> objectMap = storageBuilder.entity2Storage(metrics);
SQLBuilder sqlBuilder = new SQLBuilder("UPDATE " + modelName + " SET ");
List<ModelColumn> columns = TableMetaInfo.get(modelName).getColumns();
......
......@@ -58,7 +58,7 @@ public class H2SegmentRecordBuilder extends AbstractSearchTagBuilder<Record> {
}
@Override
public Map<String, Object> data2Map(Record record) {
public Map<String, Object> entity2Storage(Record record) {
SegmentRecord storageData = (SegmentRecord) record;
storageData.setStatement(Strings.join(new String[] {
storageData.getEndpointName(),
......@@ -88,7 +88,7 @@ public class H2SegmentRecordBuilder extends AbstractSearchTagBuilder<Record> {
}
@Override
public Record map2Data(Map<String, Object> dbMap) {
public Record storage2Entity(Map<String, Object> dbMap) {
SegmentRecord record = new SegmentRecord();
record.setSegmentId((String) dbMap.get(SEGMENT_ID));
record.setTraceId((String) dbMap.get(TRACE_ID));
......
......@@ -27,8 +27,9 @@ import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
......@@ -40,22 +41,22 @@ public class H2StorageDAO implements StorageDAO {
private final int numOfSearchableValuesPerTag;
@Override
public IMetricsDAO newMetricsDao(StorageBuilder<Metrics> storageBuilder) {
return new H2MetricsDAO(h2Client, storageBuilder);
public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
return new H2MetricsDAO(h2Client, (StorageHashMapBuilder<Metrics>) storageBuilder);
}
@Override
public IRecordDAO newRecordDao(StorageBuilder<Record> storageBuilder) {
return new H2RecordDAO(manager, h2Client, storageBuilder, maxSizeOfArrayColumn, numOfSearchableValuesPerTag);
public IRecordDAO newRecordDao(StorageBuilder storageBuilder) {
return new H2RecordDAO(manager, h2Client, (StorageHashMapBuilder<Record>) storageBuilder, maxSizeOfArrayColumn, numOfSearchableValuesPerTag);
}
@Override
public INoneStreamDAO newNoneStreamDao(StorageBuilder<NoneStream> storageBuilder) {
return new H2NoneStreamDAO(h2Client, storageBuilder);
public INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder) {
return new H2NoneStreamDAO(h2Client, (StorageHashMapBuilder<NoneStream>) storageBuilder);
}
@Override
public IManagementDAO newManagementDao(final StorageBuilder<ManagementData> storageBuilder) {
return new H2ManagementDAO(h2Client, storageBuilder);
public IManagementDAO newManagementDao(StorageBuilder storageBuilder) {
return new H2ManagementDAO(h2Client, (StorageHashMapBuilder<ManagementData>) storageBuilder);
}
}
......@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageHashMapBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
......@@ -105,10 +105,10 @@ public class ZipkinSpanRecord extends Record {
return traceId + "-" + spanId;
}
public static class Builder implements StorageBuilder<ZipkinSpanRecord> {
public static class Builder implements StorageHashMapBuilder<ZipkinSpanRecord> {
@Override
public Map<String, Object> data2Map(ZipkinSpanRecord storageData) {
public Map<String, Object> entity2Storage(ZipkinSpanRecord storageData) {
Map<String, Object> map = new HashMap<>();
map.put(TRACE_ID, storageData.getTraceId());
map.put(SPAN_ID, storageData.getSpanId());
......@@ -131,7 +131,7 @@ public class ZipkinSpanRecord extends Record {
}
@Override
public ZipkinSpanRecord map2Data(Map<String, Object> dbMap) {
public ZipkinSpanRecord storage2Entity(Map<String, Object> dbMap) {
ZipkinSpanRecord record = new ZipkinSpanRecord();
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setSpanId((String) dbMap.get(SPAN_ID));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册