提交 8d234a91 编写于 作者: 彭勇升 pengys 提交者: wu-sheng

Feature of database session in OAP server. (#3147)

* Feature of database session

* Make it configurable.

* Make the OAP server can't startup.
上级 bf504ab7
...@@ -37,6 +37,7 @@ public class CoreModuleConfig extends ModuleConfig { ...@@ -37,6 +37,7 @@ public class CoreModuleConfig extends ModuleConfig {
@Setter private int gRPCPort; @Setter private int gRPCPort;
@Setter private int maxConcurrentCallsPerConnection; @Setter private int maxConcurrentCallsPerConnection;
@Setter private int maxMessageSize; @Setter private int maxMessageSize;
@Setter private boolean enableDatabaseSession;
private final List<String> downsampling; private final List<String> downsampling;
/** /**
* The period of doing data persistence. * The period of doing data persistence.
......
...@@ -20,65 +20,25 @@ package org.apache.skywalking.oap.server.core; ...@@ -20,65 +20,25 @@ package org.apache.skywalking.oap.server.core;
import java.io.IOException; import java.io.IOException;
import org.apache.skywalking.oap.server.configuration.api.ConfigurationModule; import org.apache.skywalking.oap.server.configuration.api.ConfigurationModule;
import org.apache.skywalking.oap.server.core.analysis.DisableRegister; import org.apache.skywalking.oap.server.core.analysis.*;
import org.apache.skywalking.oap.server.core.analysis.StreamAnnotationListener; import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.annotation.AnnotationScan; import org.apache.skywalking.oap.server.core.annotation.AnnotationScan;
import org.apache.skywalking.oap.server.core.cache.CacheUpdateTimer; import org.apache.skywalking.oap.server.core.cache.*;
import org.apache.skywalking.oap.server.core.cache.EndpointInventoryCache; import org.apache.skywalking.oap.server.core.cluster.*;
import org.apache.skywalking.oap.server.core.cache.NetworkAddressInventoryCache; import org.apache.skywalking.oap.server.core.config.*;
import org.apache.skywalking.oap.server.core.cache.ServiceInstanceInventoryCache; import org.apache.skywalking.oap.server.core.oal.rt.*;
import org.apache.skywalking.oap.server.core.cache.ServiceInventoryCache; import org.apache.skywalking.oap.server.core.query.*;
import org.apache.skywalking.oap.server.core.cluster.ClusterModule; import org.apache.skywalking.oap.server.core.register.service.*;
import org.apache.skywalking.oap.server.core.cluster.ClusterRegister; import org.apache.skywalking.oap.server.core.remote.*;
import org.apache.skywalking.oap.server.core.cluster.RemoteInstance; import org.apache.skywalking.oap.server.core.remote.client.*;
import org.apache.skywalking.oap.server.core.config.ComponentLibraryCatalogService;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.config.DownsamplingConfigService;
import org.apache.skywalking.oap.server.core.config.IComponentLibraryCatalogService;
import org.apache.skywalking.oap.server.core.oal.rt.OALEngine;
import org.apache.skywalking.oap.server.core.oal.rt.OALEngineLoader;
import org.apache.skywalking.oap.server.core.query.AggregationQueryService;
import org.apache.skywalking.oap.server.core.query.AlarmQueryService;
import org.apache.skywalking.oap.server.core.query.LogQueryService;
import org.apache.skywalking.oap.server.core.query.MetadataQueryService;
import org.apache.skywalking.oap.server.core.query.MetricQueryService;
import org.apache.skywalking.oap.server.core.query.TopNRecordsQueryService;
import org.apache.skywalking.oap.server.core.query.TopologyQueryService;
import org.apache.skywalking.oap.server.core.query.TraceQueryService;
import org.apache.skywalking.oap.server.core.register.service.EndpointInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.IEndpointInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.INetworkAddressInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.IServiceInstanceInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.IServiceInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.NetworkAddressInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.ServiceInstanceInventoryRegister;
import org.apache.skywalking.oap.server.core.register.service.ServiceInventoryRegister;
import org.apache.skywalking.oap.server.core.remote.RemoteSenderService;
import org.apache.skywalking.oap.server.core.remote.RemoteServiceHandler;
import org.apache.skywalking.oap.server.core.remote.client.Address;
import org.apache.skywalking.oap.server.core.remote.client.RemoteClientManager;
import org.apache.skywalking.oap.server.core.remote.health.HealthCheckServiceHandler; import org.apache.skywalking.oap.server.core.remote.health.HealthCheckServiceHandler;
import org.apache.skywalking.oap.server.core.server.GRPCHandlerRegister; import org.apache.skywalking.oap.server.core.server.*;
import org.apache.skywalking.oap.server.core.server.GRPCHandlerRegisterImpl; import org.apache.skywalking.oap.server.core.source.*;
import org.apache.skywalking.oap.server.core.server.JettyHandlerRegister;
import org.apache.skywalking.oap.server.core.server.JettyHandlerRegisterImpl;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.source.SourceReceiver;
import org.apache.skywalking.oap.server.core.source.SourceReceiverImpl;
import org.apache.skywalking.oap.server.core.storage.PersistenceTimer; import org.apache.skywalking.oap.server.core.storage.PersistenceTimer;
import org.apache.skywalking.oap.server.core.storage.model.IModelGetter; import org.apache.skywalking.oap.server.core.storage.model.*;
import org.apache.skywalking.oap.server.core.storage.model.IModelOverride;
import org.apache.skywalking.oap.server.core.storage.model.IModelSetter;
import org.apache.skywalking.oap.server.core.storage.model.StorageModels;
import org.apache.skywalking.oap.server.core.storage.ttl.DataTTLKeeperTimer; import org.apache.skywalking.oap.server.core.storage.ttl.DataTTLKeeperTimer;
import org.apache.skywalking.oap.server.core.worker.IWorkerInstanceGetter; import org.apache.skywalking.oap.server.core.worker.*;
import org.apache.skywalking.oap.server.core.worker.IWorkerInstanceSetter; import org.apache.skywalking.oap.server.library.module.*;
import org.apache.skywalking.oap.server.core.worker.WorkerInstancesService;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
import org.apache.skywalking.oap.server.library.server.ServerException; import org.apache.skywalking.oap.server.library.server.ServerException;
import org.apache.skywalking.oap.server.library.server.grpc.GRPCServer; import org.apache.skywalking.oap.server.library.server.grpc.GRPCServer;
import org.apache.skywalking.oap.server.library.server.jetty.JettyServer; import org.apache.skywalking.oap.server.library.server.jetty.JettyServer;
...@@ -96,7 +56,6 @@ public class CoreModuleProvider extends ModuleProvider { ...@@ -96,7 +56,6 @@ public class CoreModuleProvider extends ModuleProvider {
private final AnnotationScan annotationScan; private final AnnotationScan annotationScan;
private final StorageModels storageModels; private final StorageModels storageModels;
private final SourceReceiverImpl receiver; private final SourceReceiverImpl receiver;
private StreamAnnotationListener streamAnnotationListener;
private OALEngine oalEngine; private OALEngine oalEngine;
public CoreModuleProvider() { public CoreModuleProvider() {
...@@ -120,7 +79,7 @@ public class CoreModuleProvider extends ModuleProvider { ...@@ -120,7 +79,7 @@ public class CoreModuleProvider extends ModuleProvider {
} }
@Override public void prepare() throws ServiceNotProvidedException, ModuleStartException { @Override public void prepare() throws ServiceNotProvidedException, ModuleStartException {
streamAnnotationListener = new StreamAnnotationListener(getManager()); StreamAnnotationListener streamAnnotationListener = new StreamAnnotationListener(getManager());
AnnotationScan scopeScan = new AnnotationScan(); AnnotationScan scopeScan = new AnnotationScan();
scopeScan.registerListener(new DefaultScopeDefine.Listener()); scopeScan.registerListener(new DefaultScopeDefine.Listener());
...@@ -200,6 +159,8 @@ public class CoreModuleProvider extends ModuleProvider { ...@@ -200,6 +159,8 @@ public class CoreModuleProvider extends ModuleProvider {
this.remoteClientManager = new RemoteClientManager(getManager()); this.remoteClientManager = new RemoteClientManager(getManager());
this.registerServiceImplementation(RemoteClientManager.class, remoteClientManager); this.registerServiceImplementation(RemoteClientManager.class, remoteClientManager);
MetricsStreamProcessor.getInstance().setEnableDatabaseSession(moduleConfig.isEnableDatabaseSession());
} }
@Override public void start() throws ModuleStartException { @Override public void start() throws ModuleStartException {
......
...@@ -35,6 +35,7 @@ public abstract class Metrics extends StreamData implements StorageData { ...@@ -35,6 +35,7 @@ public abstract class Metrics extends StreamData implements StorageData {
public static final String ENTITY_ID = "entity_id"; public static final String ENTITY_ID = "entity_id";
@Getter @Setter @Column(columnName = TIME_BUCKET) private long timeBucket; @Getter @Setter @Column(columnName = TIME_BUCKET) private long timeBucket;
@Getter @Setter private long survivalTime = 0L;
public abstract String id(); public abstract String id();
......
...@@ -43,7 +43,7 @@ public abstract class PxxMetrics extends Metrics implements IntValueHolder { ...@@ -43,7 +43,7 @@ public abstract class PxxMetrics extends Metrics implements IntValueHolder {
@Getter @Setter @Column(columnName = DETAIL_GROUP) private IntKeyLongValueArray detailGroup; @Getter @Setter @Column(columnName = DETAIL_GROUP) private IntKeyLongValueArray detailGroup;
private final int percentileRank; private final int percentileRank;
private Map<Integer, IntKeyLongValue> detailIndex; @Getter private Map<Integer, IntKeyLongValue> detailIndex;
public PxxMetrics(int percentileRank) { public PxxMetrics(int percentileRank) {
this.percentileRank = percentileRank; this.percentileRank = percentileRank;
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.skywalking.oap.server.core.analysis.worker; package org.apache.skywalking.oap.server.core.analysis.worker;
import java.io.IOException;
import java.util.*; import java.util.*;
import org.apache.skywalking.apm.commons.datacarrier.DataCarrier; import org.apache.skywalking.apm.commons.datacarrier.DataCarrier;
import org.apache.skywalking.apm.commons.datacarrier.consumer.*; import org.apache.skywalking.apm.commons.datacarrier.consumer.*;
...@@ -40,16 +41,20 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat ...@@ -40,16 +41,20 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
private static final Logger logger = LoggerFactory.getLogger(MetricsPersistentWorker.class); private static final Logger logger = LoggerFactory.getLogger(MetricsPersistentWorker.class);
private final Model model; private final Model model;
private final Map<Metrics, Metrics> databaseSession;
private final MergeDataCache<Metrics> mergeDataCache; private final MergeDataCache<Metrics> mergeDataCache;
private final IMetricsDAO metricsDAO; private final IMetricsDAO metricsDAO;
private final AbstractWorker<Metrics> nextAlarmWorker; private final AbstractWorker<Metrics> nextAlarmWorker;
private final AbstractWorker<ExportEvent> nextExportWorker; private final AbstractWorker<ExportEvent> nextExportWorker;
private final DataCarrier<Metrics> dataCarrier; private final DataCarrier<Metrics> dataCarrier;
private final boolean enableDatabaseSession;
MetricsPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, IMetricsDAO metricsDAO, AbstractWorker<Metrics> nextAlarmWorker, MetricsPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, IMetricsDAO metricsDAO, AbstractWorker<Metrics> nextAlarmWorker,
AbstractWorker<ExportEvent> nextExportWorker) { AbstractWorker<ExportEvent> nextExportWorker, boolean enableDatabaseSession) {
super(moduleDefineHolder); super(moduleDefineHolder);
this.model = model; this.model = model;
this.databaseSession = new HashMap<>(100);
this.enableDatabaseSession = enableDatabaseSession;
this.mergeDataCache = new MergeDataCache<>(); this.mergeDataCache = new MergeDataCache<>();
this.metricsDAO = metricsDAO; this.metricsDAO = metricsDAO;
this.nextAlarmWorker = nextAlarmWorker; this.nextAlarmWorker = nextAlarmWorker;
...@@ -83,23 +88,21 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat ...@@ -83,23 +88,21 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
return mergeDataCache; return mergeDataCache;
} }
@Override public void prepareBatch(MergeDataCache<Metrics> cache, List<PrepareRequest> prepareRequests) { @Override public void prepareBatch(Collection<Metrics> lastCollection, List<PrepareRequest> prepareRequests) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
Collection<Metrics> collection = cache.getLast().collection();
int i = 0; int i = 0;
int batchGetSize = 2000;
Metrics[] metrics = null; Metrics[] metrics = null;
for (Metrics data : collection) { for (Metrics data : lastCollection) {
if (Objects.nonNull(nextExportWorker)) { if (Objects.nonNull(nextExportWorker)) {
ExportEvent event = new ExportEvent(data, ExportEvent.EventType.INCREMENT); ExportEvent event = new ExportEvent(data, ExportEvent.EventType.INCREMENT);
nextExportWorker.in(event); nextExportWorker.in(event);
} }
int batchGetSize = 2000;
int mod = i % batchGetSize; int mod = i % batchGetSize;
if (mod == 0) { if (mod == 0) {
int residual = collection.size() - i; int residual = lastCollection.size() - i;
if (residual >= batchGetSize) { if (residual >= batchGetSize) {
metrics = new Metrics[batchGetSize]; metrics = new Metrics[batchGetSize];
} else { } else {
...@@ -110,23 +113,18 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat ...@@ -110,23 +113,18 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
if (mod == metrics.length - 1) { if (mod == metrics.length - 1) {
try { try {
Map<String, Metrics> dbMetricsMap = metricsDAO.get(model, metrics); syncStorageToCache(metrics);
for (Metrics metric : metrics) { for (Metrics metric : metrics) {
if (dbMetricsMap.containsKey(metric.id())) { Metrics cacheMetric = databaseSession.get(metric);
metric.combine(dbMetricsMap.get(metric.id())); if (cacheMetric != null) {
metric.calculate(); cacheMetric.combine(metric);
prepareRequests.add(metricsDAO.prepareBatchUpdate(model, metric)); cacheMetric.calculate();
prepareRequests.add(metricsDAO.prepareBatchUpdate(model, cacheMetric));
nextWorker(cacheMetric);
} else { } else {
prepareRequests.add(metricsDAO.prepareBatchInsert(model, metric)); prepareRequests.add(metricsDAO.prepareBatchInsert(model, metric));
} nextWorker(metric);
if (Objects.nonNull(nextAlarmWorker)) {
nextAlarmWorker.in(metric);
}
if (Objects.nonNull(nextExportWorker)) {
ExportEvent event = new ExportEvent(metric, ExportEvent.EventType.TOTAL);
nextExportWorker.in(event);
} }
} }
} catch (Throwable t) { } catch (Throwable t) {
...@@ -142,6 +140,16 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat ...@@ -142,6 +140,16 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
} }
} }
private void nextWorker(Metrics metric) {
if (Objects.nonNull(nextAlarmWorker)) {
nextAlarmWorker.in(metric);
}
if (Objects.nonNull(nextExportWorker)) {
ExportEvent event = new ExportEvent(metric, ExportEvent.EventType.TOTAL);
nextExportWorker.in(event);
}
}
@Override public void cacheData(Metrics input) { @Override public void cacheData(Metrics input) {
mergeDataCache.writing(); mergeDataCache.writing();
if (mergeDataCache.containsKey(input)) { if (mergeDataCache.containsKey(input)) {
...@@ -156,6 +164,39 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat ...@@ -156,6 +164,39 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
mergeDataCache.finishWriting(); mergeDataCache.finishWriting();
} }
private void syncStorageToCache(Metrics[] metrics) throws IOException {
if (!enableDatabaseSession) {
databaseSession.clear();
}
List<String> notInCacheIds = new ArrayList<>();
for (Metrics metric : metrics) {
if (!databaseSession.containsKey(metric)) {
notInCacheIds.add(metric.id());
}
}
if (notInCacheIds.size() > 0) {
List<Metrics> metricsList = metricsDAO.multiGet(model, notInCacheIds);
for (Metrics metric : metricsList) {
databaseSession.put(metric, metric);
}
}
}
@Override public void endOfRound(long tookTime) {
if (enableDatabaseSession) {
Iterator<Metrics> iterator = databaseSession.values().iterator();
while (iterator.hasNext()) {
Metrics metrics = iterator.next();
metrics.setSurvivalTime(tookTime + metrics.getSurvivalTime());
if (metrics.getSurvivalTime() > 70000) {
iterator.remove();
}
}
}
}
private class PersistentConsumer implements IConsumer<Metrics> { private class PersistentConsumer implements IConsumer<Metrics> {
private final MetricsPersistentWorker persistent; private final MetricsPersistentWorker persistent;
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
package org.apache.skywalking.oap.server.core.analysis.worker; package org.apache.skywalking.oap.server.core.analysis.worker;
import java.util.*; import java.util.*;
import lombok.Getter; import lombok.*;
import org.apache.skywalking.oap.server.core.*; import org.apache.skywalking.oap.server.core.*;
import org.apache.skywalking.oap.server.core.analysis.*; import org.apache.skywalking.oap.server.core.analysis.*;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics; import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
...@@ -39,6 +39,7 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> { ...@@ -39,6 +39,7 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
private Map<Class<? extends Metrics>, MetricsAggregateWorker> entryWorkers = new HashMap<>(); private Map<Class<? extends Metrics>, MetricsAggregateWorker> entryWorkers = new HashMap<>();
@Getter private List<MetricsPersistentWorker> persistentWorkers = new ArrayList<>(); @Getter private List<MetricsPersistentWorker> persistentWorkers = new ArrayList<>();
@Setter @Getter private boolean enableDatabaseSession;
public static MetricsStreamProcessor getInstance() { public static MetricsStreamProcessor getInstance() {
return PROCESSOR; return PROCESSOR;
...@@ -100,19 +101,18 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> { ...@@ -100,19 +101,18 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
entryWorkers.put(metricsClass, aggregateWorker); entryWorkers.put(metricsClass, aggregateWorker);
} }
private MetricsPersistentWorker minutePersistentWorker(ModuleDefineHolder moduleDefineHolder, private MetricsPersistentWorker minutePersistentWorker(ModuleDefineHolder moduleDefineHolder, IMetricsDAO metricsDAO, Model model) {
IMetricsDAO metricsDAO, Model model) {
AlarmNotifyWorker alarmNotifyWorker = new AlarmNotifyWorker(moduleDefineHolder); AlarmNotifyWorker alarmNotifyWorker = new AlarmNotifyWorker(moduleDefineHolder);
ExportWorker exportWorker = new ExportWorker(moduleDefineHolder); ExportWorker exportWorker = new ExportWorker(moduleDefineHolder);
MetricsPersistentWorker minutePersistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, alarmNotifyWorker, exportWorker); MetricsPersistentWorker minutePersistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, alarmNotifyWorker, exportWorker, enableDatabaseSession);
persistentWorkers.add(minutePersistentWorker); persistentWorkers.add(minutePersistentWorker);
return minutePersistentWorker; return minutePersistentWorker;
} }
private MetricsPersistentWorker worker(ModuleDefineHolder moduleDefineHolder, IMetricsDAO metricsDAO, Model model) { private MetricsPersistentWorker worker(ModuleDefineHolder moduleDefineHolder, IMetricsDAO metricsDAO, Model model) {
MetricsPersistentWorker persistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, null, null); MetricsPersistentWorker persistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, null, null, enableDatabaseSession);
persistentWorkers.add(persistentWorker); persistentWorkers.add(persistentWorker);
return persistentWorker; return persistentWorker;
......
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
package org.apache.skywalking.oap.server.core.analysis.worker; package org.apache.skywalking.oap.server.core.analysis.worker;
import java.util.List; import java.util.*;
import org.apache.skywalking.oap.server.core.analysis.data.Window; import org.apache.skywalking.oap.server.core.analysis.data.*;
import org.apache.skywalking.oap.server.core.storage.StorageData; import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.worker.AbstractWorker; import org.apache.skywalking.oap.server.core.worker.AbstractWorker;
import org.apache.skywalking.oap.server.library.client.request.PrepareRequest; import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
...@@ -45,6 +45,8 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends ...@@ -45,6 +45,8 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends
public abstract CACHE getCache(); public abstract CACHE getCache();
public abstract void endOfRound(long tookTime);
public boolean flushAndSwitch() { public boolean flushAndSwitch() {
boolean isSwitch; boolean isSwitch;
try { try {
...@@ -57,11 +59,12 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends ...@@ -57,11 +59,12 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends
return isSwitch; return isSwitch;
} }
public abstract void prepareBatch(CACHE cache, List<PrepareRequest> prepareRequests); public abstract void prepareBatch(Collection<INPUT> lastCollection, List<PrepareRequest> prepareRequests);
public final void buildBatchRequests(List<PrepareRequest> prepareRequests) { public final void buildBatchRequests(List<PrepareRequest> prepareRequests) {
try { try {
while (getCache().getLast().isWriting()) { SWCollection<INPUT> last = getCache().getLast();
while (last.isWriting()) {
try { try {
Thread.sleep(10); Thread.sleep(10);
} catch (InterruptedException e) { } catch (InterruptedException e) {
...@@ -69,8 +72,8 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends ...@@ -69,8 +72,8 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends
} }
} }
if (getCache().getLast().collection() != null) { if (last.collection() != null) {
prepareBatch(getCache(), prepareRequests); prepareBatch(last.collection(), prepareRequests);
} }
} finally { } finally {
getCache().finishReadingLast(); getCache().finishReadingLast();
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.skywalking.oap.server.core.analysis.worker; package org.apache.skywalking.oap.server.core.analysis.worker;
import java.util.List; import java.util.*;
import org.apache.skywalking.apm.commons.datacarrier.DataCarrier; import org.apache.skywalking.apm.commons.datacarrier.DataCarrier;
import org.apache.skywalking.apm.commons.datacarrier.consumer.IConsumer; import org.apache.skywalking.apm.commons.datacarrier.consumer.IConsumer;
import org.apache.skywalking.oap.server.core.analysis.data.LimitedSizeDataCache; import org.apache.skywalking.oap.server.core.analysis.data.LimitedSizeDataCache;
...@@ -86,8 +86,8 @@ public class TopNWorker extends PersistenceWorker<TopN, LimitedSizeDataCache<Top ...@@ -86,8 +86,8 @@ public class TopNWorker extends PersistenceWorker<TopN, LimitedSizeDataCache<Top
return super.flushAndSwitch(); return super.flushAndSwitch();
} }
@Override public void prepareBatch(LimitedSizeDataCache<TopN> cache, List<PrepareRequest> prepareRequests) { @Override public void prepareBatch(Collection<TopN> lastCollection, List<PrepareRequest> prepareRequests) {
cache.getLast().collection().forEach(record -> { lastCollection.forEach(record -> {
try { try {
prepareRequests.add(recordDAO.prepareBatchInsert(model, record)); prepareRequests.add(recordDAO.prepareBatchInsert(model, record));
} catch (Throwable t) { } catch (Throwable t) {
...@@ -96,6 +96,12 @@ public class TopNWorker extends PersistenceWorker<TopN, LimitedSizeDataCache<Top ...@@ -96,6 +96,12 @@ public class TopNWorker extends PersistenceWorker<TopN, LimitedSizeDataCache<Top
}); });
} }
/**
* This method used to clear the expired cache, but TopN is not following it.
*/
@Override public void endOfRound(long tookTime) {
}
@Override public void in(TopN n) { @Override public void in(TopN n) {
dataCarrier.produce(n); dataCarrier.produce(n);
} }
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
package org.apache.skywalking.oap.server.core.storage; package org.apache.skywalking.oap.server.core.storage;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics; import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.model.Model; import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.request.*; import org.apache.skywalking.oap.server.library.client.request.*;
...@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.library.client.request.*; ...@@ -29,7 +29,7 @@ import org.apache.skywalking.oap.server.library.client.request.*;
*/ */
public interface IMetricsDAO extends DAO { public interface IMetricsDAO extends DAO {
Map<String, Metrics> get(Model model, Metrics[] metrics) throws IOException; List<Metrics> multiGet(Model model, List<String> ids) throws IOException;
InsertRequest prepareBatchInsert(Model model, Metrics metrics) throws IOException; InsertRequest prepareBatchInsert(Model model, Metrics metrics) throws IOException;
......
...@@ -43,6 +43,8 @@ public enum PersistenceTimer { ...@@ -43,6 +43,8 @@ public enum PersistenceTimer {
private CounterMetrics errorCounter; private CounterMetrics errorCounter;
private HistogramMetrics prepareLatency; private HistogramMetrics prepareLatency;
private HistogramMetrics executeLatency; private HistogramMetrics executeLatency;
private long lastTime = System.currentTimeMillis();
private final List<PrepareRequest> prepareRequests = new ArrayList<>(50000);
PersistenceTimer() { PersistenceTimer() {
this.debug = System.getProperty("debug") != null; this.debug = System.getProperty("debug") != null;
...@@ -61,7 +63,7 @@ public enum PersistenceTimer { ...@@ -61,7 +63,7 @@ public enum PersistenceTimer {
MetricsTag.EMPTY_KEY, MetricsTag.EMPTY_VALUE); MetricsTag.EMPTY_KEY, MetricsTag.EMPTY_VALUE);
if (!isStarted) { if (!isStarted) {
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate( Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(
new RunnableWithExceptionProtection(() -> extractDataAndSave(batchDAO), new RunnableWithExceptionProtection(() -> extractDataAndSave(batchDAO),
t -> logger.error("Extract data and save failure.", t)), 5, moduleConfig.getPersistentPeriod(), TimeUnit.SECONDS); t -> logger.error("Extract data and save failure.", t)), 5, moduleConfig.getPersistentPeriod(), TimeUnit.SECONDS);
...@@ -75,10 +77,10 @@ public enum PersistenceTimer { ...@@ -75,10 +77,10 @@ public enum PersistenceTimer {
} }
long startTime = System.currentTimeMillis(); long startTime = System.currentTimeMillis();
try { try {
HistogramMetrics.Timer timer = prepareLatency.createTimer(); HistogramMetrics.Timer timer = prepareLatency.createTimer();
List<PrepareRequest> prepareRequests = new LinkedList<>();
try { try {
List<PersistenceWorker> persistenceWorkers = new ArrayList<>(); List<PersistenceWorker> persistenceWorkers = new ArrayList<>();
persistenceWorkers.addAll(TopNStreamProcessor.getInstance().getPersistentWorkers()); persistenceWorkers.addAll(TopNStreamProcessor.getInstance().getPersistentWorkers());
...@@ -92,6 +94,8 @@ public enum PersistenceTimer { ...@@ -92,6 +94,8 @@ public enum PersistenceTimer {
if (worker.flushAndSwitch()) { if (worker.flushAndSwitch()) {
worker.buildBatchRequests(prepareRequests); worker.buildBatchRequests(prepareRequests);
} }
worker.endOfRound(System.currentTimeMillis() - lastTime);
}); });
if (debug) { if (debug) {
...@@ -116,6 +120,9 @@ public enum PersistenceTimer { ...@@ -116,6 +120,9 @@ public enum PersistenceTimer {
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {
logger.debug("Persistence data save finish"); logger.debug("Persistence data save finish");
} }
prepareRequests.clear();
lastTime = System.currentTimeMillis();
} }
if (debug) { if (debug) {
......
...@@ -52,9 +52,9 @@ core: ...@@ -52,9 +52,9 @@ core:
gRPCHost: ${SW_CORE_GRPC_HOST:0.0.0.0} gRPCHost: ${SW_CORE_GRPC_HOST:0.0.0.0}
gRPCPort: ${SW_CORE_GRPC_PORT:11800} gRPCPort: ${SW_CORE_GRPC_PORT:11800}
downsampling: downsampling:
- Hour - Hour
- Day - Day
- Month - Month
# Set a timeout on metrics data. After the timeout has expired, the metrics data will automatically be deleted. # Set a timeout on metrics data. After the timeout has expired, the metrics data will automatically be deleted.
enableDataKeeperExecutor: ${SW_CORE_ENABLE_DATA_KEEPER_EXECUTOR:true} # Turn it off then automatically metrics data delete will be close. enableDataKeeperExecutor: ${SW_CORE_ENABLE_DATA_KEEPER_EXECUTOR:true} # Turn it off then automatically metrics data delete will be close.
recordDataTTL: ${SW_CORE_RECORD_DATA_TTL:90} # Unit is minute recordDataTTL: ${SW_CORE_RECORD_DATA_TTL:90} # Unit is minute
...@@ -62,6 +62,9 @@ core: ...@@ -62,6 +62,9 @@ core:
hourMetricsDataTTL: ${SW_CORE_HOUR_METRIC_DATA_TTL:36} # Unit is hour hourMetricsDataTTL: ${SW_CORE_HOUR_METRIC_DATA_TTL:36} # Unit is hour
dayMetricsDataTTL: ${SW_CORE_DAY_METRIC_DATA_TTL:45} # Unit is day dayMetricsDataTTL: ${SW_CORE_DAY_METRIC_DATA_TTL:45} # Unit is day
monthMetricsDataTTL: ${SW_CORE_MONTH_METRIC_DATA_TTL:18} # Unit is month monthMetricsDataTTL: ${SW_CORE_MONTH_METRIC_DATA_TTL:18} # Unit is month
# Cache metric data for 1 minute to reduce database queries, and if the OAP cluster changes within that minute,
# the metrics may not be accurate within that minute.
enableDatabaseSession: ${SW_CORE_ENABLE_DATABASE_SESSION:true}
storage: storage:
# elasticsearch: # elasticsearch:
# nameSpace: ${SW_NAMESPACE:""} # nameSpace: ${SW_NAMESPACE:""}
......
...@@ -62,6 +62,9 @@ core: ...@@ -62,6 +62,9 @@ core:
hourMetricsDataTTL: ${SW_CORE_HOUR_METRIC_DATA_TTL:36} # Unit is hour hourMetricsDataTTL: ${SW_CORE_HOUR_METRIC_DATA_TTL:36} # Unit is hour
dayMetricsDataTTL: ${SW_CORE_DAY_METRIC_DATA_TTL:45} # Unit is day dayMetricsDataTTL: ${SW_CORE_DAY_METRIC_DATA_TTL:45} # Unit is day
monthMetricsDataTTL: ${SW_CORE_MONTH_METRIC_DATA_TTL:18} # Unit is month monthMetricsDataTTL: ${SW_CORE_MONTH_METRIC_DATA_TTL:18} # Unit is month
# Cache metric data for 1 minute to reduce database queries, and if the OAP cluster changes within that minute,
# the metrics may not be accurate within that minute.
enableDatabaseSession: ${SW_CORE_ENABLE_DATABASE_SESSION:true}
storage: storage:
elasticsearch: elasticsearch:
nameSpace: ${SW_NAMESPACE:""} nameSpace: ${SW_NAMESPACE:""}
......
...@@ -39,18 +39,13 @@ public class MetricsEsDAO extends EsDAO implements IMetricsDAO { ...@@ -39,18 +39,13 @@ public class MetricsEsDAO extends EsDAO implements IMetricsDAO {
this.storageBuilder = storageBuilder; this.storageBuilder = storageBuilder;
} }
@Override public Map<String, Metrics> get(Model model, Metrics[] metrics) throws IOException { @Override public List<Metrics> multiGet(Model model, List<String> ids) throws IOException {
Map<String, Metrics> result = new HashMap<>(); SearchResponse response = getClient().ids(model.getName(), ids.toArray(new String[0]));
String[] ids = new String[metrics.length]; List<Metrics> result = new ArrayList<>((int)response.getHits().totalHits);
for (int i = 0; i < metrics.length; i++) {
ids[i] = metrics[i].id();
}
SearchResponse response = getClient().ids(model.getName(), ids);
for (int i = 0; i < response.getHits().totalHits; i++) { for (int i = 0; i < response.getHits().totalHits; i++) {
Metrics source = storageBuilder.map2Data(response.getHits().getAt(i).getSourceAsMap()); Metrics source = storageBuilder.map2Data(response.getHits().getAt(i).getSourceAsMap());
result.put(source.id(), source); result.add(source);
} }
return result; return result;
} }
......
...@@ -39,18 +39,11 @@ public class H2MetricsDAO extends H2SQLExecutor implements IMetricsDAO { ...@@ -39,18 +39,11 @@ public class H2MetricsDAO extends H2SQLExecutor implements IMetricsDAO {
this.storageBuilder = storageBuilder; this.storageBuilder = storageBuilder;
} }
@Override public Map<String, Metrics> get(Model model, Metrics[] metrics) throws IOException { @Override public List<Metrics> multiGet(Model model, List<String> ids) throws IOException {
Map<String, Metrics> result = new HashMap<>(); List<StorageData> storageDataList = getByIDs(h2Client, model.getName(), ids.toArray(new String[0]), storageBuilder);
List<Metrics> result = new ArrayList<>(storageDataList.size());
String[] ids = new String[metrics.length];
for (int i = 0; i < metrics.length; i++) {
ids[i] = metrics[i].id();
}
List<StorageData> storageDataList = getByIDs(h2Client, model.getName(), ids, storageBuilder);
for (StorageData storageData : storageDataList) { for (StorageData storageData : storageDataList) {
result.put(storageData.id(), (Metrics)storageData); result.add((Metrics)storageData);
} }
return result; return result;
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册