提交 e577b791 编写于 作者: wu-sheng's avatar wu-sheng 提交者: GitHub

Merge pull request #165 from wu-sheng/feature/159

Fix a bug and improve performance
...@@ -26,6 +26,11 @@ ...@@ -26,6 +26,11 @@
<artifactId>akka-cluster_2.11</artifactId> <artifactId>akka-cluster_2.11</artifactId>
<version>${akka.version}</version> <version>${akka.version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.11</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId> <artifactId>log4j-core</artifactId>
......
...@@ -6,6 +6,7 @@ import com.a.eye.skywalking.collector.cluster.ClusterConfig; ...@@ -6,6 +6,7 @@ import com.a.eye.skywalking.collector.cluster.ClusterConfig;
import com.a.eye.skywalking.collector.cluster.Const; import com.a.eye.skywalking.collector.cluster.Const;
import com.typesafe.config.Config; import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigFactory;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
...@@ -17,10 +18,12 @@ public enum AkkaSystem { ...@@ -17,10 +18,12 @@ public enum AkkaSystem {
private Logger logger = LogManager.getFormatterLogger(AkkaSystem.class); private Logger logger = LogManager.getFormatterLogger(AkkaSystem.class);
public ActorSystem create() { public ActorSystem create() {
Level logLevel = logger.getLevel();
final Config config = ConfigFactory.parseString("akka.remote.netty.tcp.HOSTNAME=" + ClusterConfig.Cluster.Current.HOSTNAME). final Config config = ConfigFactory.parseString("akka.remote.netty.tcp.HOSTNAME=" + ClusterConfig.Cluster.Current.HOSTNAME).
withFallback(ConfigFactory.parseString("akka.remote.netty.tcp.PORT=" + ClusterConfig.Cluster.Current.PORT)). withFallback(ConfigFactory.parseString("akka.remote.netty.tcp.PORT=" + ClusterConfig.Cluster.Current.PORT)).
withFallback(ConfigFactory.parseString("akka.loggers=[\"akka.event.slf4j.Slf4jLogger\"]")). withFallback(ConfigFactory.parseString("akka.loggers=[\"akka.event.slf4j.Slf4jLogger\"]")).
withFallback(ConfigFactory.parseString("akka.loglevel=\"ERROR\"")). withFallback(ConfigFactory.parseString("akka.loglevel=\"" + logLevel.name() + "\"")).
withFallback(ConfigFactory.load("application.conf")); withFallback(ConfigFactory.load("application.conf"));
if (!StringUtil.isEmpty(ClusterConfig.Cluster.SEED_NODES)) { if (!StringUtil.isEmpty(ClusterConfig.Cluster.SEED_NODES)) {
......
...@@ -7,7 +7,7 @@ import akka.cluster.Member; ...@@ -7,7 +7,7 @@ import akka.cluster.Member;
import akka.cluster.MemberStatus; import akka.cluster.MemberStatus;
import com.a.eye.skywalking.collector.cluster.WorkerListenerMessage; import com.a.eye.skywalking.collector.cluster.WorkerListenerMessage;
import com.a.eye.skywalking.collector.cluster.WorkersListener; import com.a.eye.skywalking.collector.cluster.WorkersListener;
import org.apache.logging.log4j.LogManager; import com.a.eye.skywalking.collector.log.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
/** /**
...@@ -52,7 +52,7 @@ public abstract class AbstractClusterWorker extends AbstractWorker { ...@@ -52,7 +52,7 @@ public abstract class AbstractClusterWorker extends AbstractWorker {
protected abstract void onWork(Object message) throws Exception; protected abstract void onWork(Object message) throws Exception;
static class WorkerWithAkka extends UntypedActor { static class WorkerWithAkka extends UntypedActor {
private Logger logger = LogManager.getFormatterLogger(WorkerWithAkka.class); private Logger logger = LogManager.INSTANCE.getFormatterLogger(WorkerWithAkka.class);
private Cluster cluster; private Cluster cluster;
private final AbstractClusterWorker ownerWorker; private final AbstractClusterWorker ownerWorker;
......
package com.a.eye.skywalking.collector.actor; package com.a.eye.skywalking.collector.actor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class AbstractWorker { public abstract class AbstractWorker {
private final Logger logger;
private final LocalWorkerContext selfContext; private final LocalWorkerContext selfContext;
private final Role role; private final Role role;
...@@ -14,6 +20,11 @@ public abstract class AbstractWorker { ...@@ -14,6 +20,11 @@ public abstract class AbstractWorker {
this.role = role; this.role = role;
this.clusterContext = clusterContext; this.clusterContext = clusterContext;
this.selfContext = selfContext; this.selfContext = selfContext;
this.logger = LogManager.getFormatterLogger(role.roleName());
}
final public Logger logger() {
return logger;
} }
public abstract void preStart() throws ProviderNotFoundException; public abstract void preStart() throws ProviderNotFoundException;
...@@ -35,6 +46,6 @@ public abstract class AbstractWorker { ...@@ -35,6 +46,6 @@ public abstract class AbstractWorker {
} }
final protected void saveException(Exception e) { final protected void saveException(Exception e) {
// e.printStackTrace(); logger().error(e);
} }
} }
...@@ -24,9 +24,6 @@ public abstract class AbstractWorkerProvider<T extends AbstractWorker> implement ...@@ -24,9 +24,6 @@ public abstract class AbstractWorkerProvider<T extends AbstractWorker> implement
final public WorkerRef create( final public WorkerRef create(
AbstractWorker workerOwner) throws IllegalArgumentException, ProviderNotFoundException { AbstractWorker workerOwner) throws IllegalArgumentException, ProviderNotFoundException {
if (workerInstance(clusterContext) == null) {
throw new IllegalArgumentException("cannot get worker instance with nothing obtained from workerInstance()");
}
if (workerOwner == null) { if (workerOwner == null) {
return onCreate(null); return onCreate(null);
......
package com.a.eye.skywalking.collector.log;
import org.apache.logging.log4j.Logger;
/**
* @author pengys5
*/
public enum LogManager {
INSTANCE;
public Logger getFormatterLogger(final Class<?> clazz) {
return org.apache.logging.log4j.LogManager.getFormatterLogger(clazz);
}
}
package com.a.eye.skywalking.collector.actor;
import akka.actor.ActorSystem;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.log.LogManager;
import org.apache.logging.log4j.Logger;
import org.mockito.Mockito;
import org.powermock.reflect.Whitebox;
/**
* @author pengys5
*/
//@RunWith(PowerMockRunner.class)
//@PrepareForTest({LogManager.class})
public class AbstractClusterWorkerProviderTestCase {
// @Test
public void testOnCreate() throws ProviderNotFoundException {
LogManager logManager = Mockito.mock(LogManager.class);
Whitebox.setInternalState(LogManager.class, "INSTANCE", logManager);
Logger logger = Mockito.mock(Logger.class);
Mockito.when(logManager.getFormatterLogger(Mockito.any())).thenReturn(logger);
ActorSystem actorSystem = Mockito.mock(ActorSystem.class);
ClusterWorkerContext clusterWorkerContext = new ClusterWorkerContext(actorSystem);
Impl impl = new Impl();
impl.onCreate(null);
}
class Impl extends AbstractClusterWorkerProvider<AbstractClusterWorkerTestCase.Impl> {
@Override public Role role() {
return Role.INSTANCE;
}
@Override public AbstractClusterWorkerTestCase.Impl workerInstance(ClusterWorkerContext clusterContext) {
return new AbstractClusterWorkerTestCase.Impl(role(), clusterContext, new LocalWorkerContext());
}
@Override public int workerNum() {
return 0;
}
}
enum Role implements com.a.eye.skywalking.collector.actor.Role {
INSTANCE;
@Override
public String roleName() {
return AbstractClusterWorkerTestCase.Impl.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new RollingSelector();
}
}
}
package com.a.eye.skywalking.collector.actor; package com.a.eye.skywalking.collector.actor;
import akka.actor.Address;
import akka.cluster.ClusterEvent;
import akka.cluster.Member;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import org.apache.logging.log4j.Logger;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito; import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import static org.mockito.Mockito.CALLS_REAL_METHODS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/** /**
* @author pengys5 * @author pengys5
*/ */
@RunWith(PowerMockRunner.class)
@PrepareForTest({ClusterEvent.MemberUp.class, Address.class})
public class AbstractClusterWorkerTestCase { public class AbstractClusterWorkerTestCase {
private AbstractClusterWorker.WorkerWithAkka workerWithAkka = mock(AbstractClusterWorker.WorkerWithAkka.class, CALLS_REAL_METHODS);
private AbstractClusterWorker worker = PowerMockito.spy(new Impl(WorkerRole.INSTANCE, null, null));
@Before
public void init(){
Logger logger = mock(Logger.class);
Whitebox.setInternalState(workerWithAkka, "logger", logger);
Whitebox.setInternalState(workerWithAkka, "ownerWorker", worker);
}
@Test @Test
public void testAllocateJob() throws Exception { public void testAllocateJob() throws Exception {
AbstractClusterWorker worker = PowerMockito.mock(AbstractClusterWorker.class);
String jobStr = "TestJob"; String jobStr = "TestJob";
worker.allocateJob(jobStr); worker.allocateJob(jobStr);
Mockito.verify(worker).onWork(jobStr); verify(worker).onWork(jobStr);
}
@Test
public void testMemberUp() throws Throwable {
ClusterEvent.MemberUp memberUp = mock(ClusterEvent.MemberUp.class);
Address address = mock(Address.class);
when(address.toString()).thenReturn("address");
Member member = mock(Member.class);
when(member.address()).thenReturn(address);
when(memberUp.member()).thenReturn(member);
workerWithAkka.onReceive(memberUp);
verify(workerWithAkka).register(member);
}
@Test
public void testMessage() throws Throwable {
String message = "test";
workerWithAkka.onReceive(message);
verify(worker).allocateJob(message);
}
static class Impl extends AbstractClusterWorker {
@Override public void preStart() throws ProviderNotFoundException {
}
public Impl(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
}
@Override protected void onWork(Object message) throws Exception {
}
}
public enum WorkerRole implements Role {
INSTANCE;
@Override
public String roleName() {
return Impl.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new RollingSelector();
}
} }
} }
package com.a.eye.skywalking.collector.actor.selector;
import org.junit.Assert;
import org.junit.Test;
/**
* @author pengys5
*/
public class AbstractHashMessageTestCase {
@Test
public void testGetHashCode() {
String key = "key";
Impl impl = new Impl(key);
Assert.assertEquals(key.hashCode(), impl.getHashCode());
}
class Impl extends AbstractHashMessage {
public Impl(String key) {
super(key);
}
}
}
...@@ -46,4 +46,10 @@ public class HashCodeSelectorTestCase { ...@@ -46,4 +46,10 @@ public class HashCodeSelectorTestCase {
WorkerRef select_3 = selector.select(members, message_3); WorkerRef select_3 = selector.select(members, message_3);
Assert.assertEquals(workerRef_3.hashCode(), select_3.hashCode()); Assert.assertEquals(workerRef_3.hashCode(), select_3.hashCode());
} }
@Test(expected = IllegalArgumentException.class)
public void testSelectError() {
HashCodeSelector selector = new HashCodeSelector();
selector.select(null, new Object());
}
} }
package com.a.eye.skywalking.collector.log;
import org.apache.logging.log4j.Logger;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
/**
* @author pengys5
*/
public class MockLog {
public Logger mockito() {
LogManager logManager = PowerMockito.mock(LogManager.class);
Logger logger = Mockito.mock(Logger.class);
Mockito.when(logManager.getFormatterLogger(Mockito.any())).thenReturn(logger);
return logger;
}
}
akka {
actor {
provider = "akka.cluster.ClusterActorRefProvider"
serializers {
java = "akka.serialization.JavaSerializer"
proto = "akka.remote.serialization.ProtobufSerializer"
// data = "com.a.eye.skywalking.collector.worker.TraceSegmentSerializer"
// json = "com.a.eye.skywalking.collector.commons.serializer.JsonSerializer"
}
serialization-bindings {
"java.lang.String" = java
"com.google.protobuf.Message" = proto
// "com.a.eye.skywalking.messages.ISerializable" = data
// "com.google.gson.JsonObject" = json
// "java.io.Serializable" = none
}
// serialize-messages = on
warn-about-java-serializer-usage = on
}
remote {
log-remote-lifecycle-events = off
netty.tcp {
hostname = "127.0.0.1"
port = 1000
}
}
cluster {
auto-down-unreachable-after = off
metrics.enabled = off
}
}
\ No newline at end of file
...@@ -2,6 +2,7 @@ package com.a.eye.skywalking.collector.worker; ...@@ -2,6 +2,7 @@ package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.*; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.queue.EndOfBatchCommand; import com.a.eye.skywalking.collector.queue.EndOfBatchCommand;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
/** /**
* @author pengys5 * @author pengys5
...@@ -12,6 +13,8 @@ public abstract class AnalysisMember extends AbstractLocalAsyncWorker { ...@@ -12,6 +13,8 @@ public abstract class AnalysisMember extends AbstractLocalAsyncWorker {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
private int messageNum;
public abstract void analyse(Object message) throws Exception; public abstract void analyse(Object message) throws Exception;
@Override @Override
...@@ -23,11 +26,17 @@ public abstract class AnalysisMember extends AbstractLocalAsyncWorker { ...@@ -23,11 +26,17 @@ public abstract class AnalysisMember extends AbstractLocalAsyncWorker {
if (message instanceof EndOfBatchCommand) { if (message instanceof EndOfBatchCommand) {
aggregation(); aggregation();
} else { } else {
messageNum++;
try { try {
analyse(message); analyse(message);
} catch (Exception e) { } catch (Exception e) {
saveException(e); saveException(e);
} }
if (messageNum >= CacheSizeConfig.Cache.Analysis.SIZE) {
aggregation();
messageNum = 0;
}
} }
} }
......
...@@ -5,6 +5,7 @@ import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; ...@@ -5,6 +5,7 @@ import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.worker.httpserver.HttpServer; import com.a.eye.skywalking.collector.worker.httpserver.HttpServer;
import com.a.eye.skywalking.collector.worker.storage.EsClient; import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.IndexCreator; import com.a.eye.skywalking.collector.worker.storage.IndexCreator;
import com.a.eye.skywalking.collector.worker.storage.PersistenceTimer;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
...@@ -20,6 +21,7 @@ public class CollectorBootStartUp { ...@@ -20,6 +21,7 @@ public class CollectorBootStartUp {
collectorSystem.boot(); collectorSystem.boot();
EsClient.INSTANCE.boot(); EsClient.INSTANCE.boot();
IndexCreator.INSTANCE.create(); IndexCreator.INSTANCE.create();
PersistenceTimer.INSTANCE.boot();
HttpServer.INSTANCE.boot((ClusterWorkerContext)collectorSystem.getClusterContext()); HttpServer.INSTANCE.boot((ClusterWorkerContext)collectorSystem.getClusterContext());
} }
} }
package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.actor.WorkerRefs;
import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitAnalysisData;
/**
* @author pengys5
*/
public abstract class JoinAndSplitAnalysisMember extends AnalysisMember {
private JoinAndSplitAnalysisData joinAndSplitAnalysisData;
public JoinAndSplitAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
joinAndSplitAnalysisData = new JoinAndSplitAnalysisData();
}
private JoinAndSplitAnalysisData getJoinAndSplitAnalysisData() {
return joinAndSplitAnalysisData;
}
final protected void set(String id, String attributeName, String value) throws Exception {
getJoinAndSplitAnalysisData().getOrCreate(id).set(attributeName, value);
}
@Override
final protected void aggregation() throws Exception {
getJoinAndSplitAnalysisData().asMap().forEach((key, value) -> {
try {
aggWorkRefs().tell(value);
} catch (Exception e) {
logger().error(e);
}
});
getJoinAndSplitAnalysisData().asMap().clear();
}
protected abstract WorkerRefs aggWorkRefs();
}
package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitData;
import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitPersistenceData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
import java.util.List;
import java.util.Map;
/**
* @author pengys5
*/
public abstract class JoinAndSplitPersistenceMember extends PersistenceMember<JoinAndSplitPersistenceData, JoinAndSplitData> {
private Logger logger = LogManager.getFormatterLogger(JoinAndSplitPersistenceMember.class);
protected JoinAndSplitPersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
}
@Override
public JoinAndSplitPersistenceData initializeData() {
return new JoinAndSplitPersistenceData();
}
@Override
final public void analyse(Object message) throws Exception {
if (message instanceof JoinAndSplitData) {
JoinAndSplitData joinAndSplitData = (JoinAndSplitData) message;
JoinAndSplitPersistenceData data = getPersistenceData();
data.hold();
data.getOrCreate(joinAndSplitData.getId()).merge(joinAndSplitData);
data.release();
} else {
logger.error("unhandled message, message instance must JoinAndSplitData, but is %s", message.getClass().toString());
}
}
@Override
final protected void prepareIndex(List<IndexRequestBuilder> builderList) {
Map<String, JoinAndSplitData> lastData = getPersistenceData().getLast().asMap();
extractData(lastData);
Client client = EsClient.INSTANCE.getClient();
lastData.forEach((key, value) -> {
IndexRequestBuilder builder = client.prepareIndex(esIndex(), esType(), key).setSource(value.asMap());
builderList.add(builder);
});
lastData.clear();
}
}
package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
import com.a.eye.skywalking.collector.worker.storage.MergeData;
import com.a.eye.skywalking.collector.worker.storage.MergePersistenceData;
/**
* @author pengys5
*/
public abstract class MergeAnalysisMember extends AnalysisMember {
private MergePersistenceData persistenceData;
protected MergeAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
persistenceData = new MergePersistenceData();
}
private MergePersistenceData getPersistenceData() {
return persistenceData;
}
final protected void setMergeData(String id, String column, String value) throws Exception {
getPersistenceData().getElseCreate(id).setMergeData(column, value);
if (getPersistenceData().size() >= CacheSizeConfig.Cache.Analysis.SIZE) {
aggregation();
}
}
final public MergeData pushOne() {
if (getPersistenceData().iterator().hasNext()) {
return getPersistenceData().pushOne();
}
return null;
}
}
package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.MergeData;
import com.a.eye.skywalking.collector.worker.storage.MergePersistenceData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.client.Client;
import java.util.Iterator;
import java.util.Map;
/**
* @author pengys5
*/
public abstract class MergePersistenceMember extends PersistenceMember {
private Logger logger = LogManager.getFormatterLogger(MergePersistenceMember.class);
private MergePersistenceData persistenceData;
protected MergePersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
persistenceData = new MergePersistenceData();
}
private MergePersistenceData getPersistenceData() {
return persistenceData;
}
@Override final public void analyse(Object message) throws Exception {
if (message instanceof MergeData) {
MergeData mergeData = (MergeData)message;
getPersistenceData().getElseCreate(mergeData.getId()).merge(mergeData);
if (getPersistenceData().size() >= CacheSizeConfig.Cache.Persistence.SIZE) {
persistence();
}
} else {
logger.error("message unhandled");
}
}
final protected void persistence() {
MultiGetResponse multiGetResponse = searchFromEs();
for (MultiGetItemResponse itemResponse : multiGetResponse) {
GetResponse response = itemResponse.getResponse();
if (response != null && response.isExists()) {
getPersistenceData().getElseCreate(response.getId()).merge(response.getSource());
}
}
boolean success = saveToEs();
if (success) {
getPersistenceData().clear();
}
}
private MultiGetResponse searchFromEs() {
Client client = EsClient.INSTANCE.getClient();
MultiGetRequestBuilder multiGetRequestBuilder = client.prepareMultiGet();
Iterator<Map.Entry<String, MergeData>> iterator = getPersistenceData().iterator();
while (iterator.hasNext()) {
multiGetRequestBuilder.add(esIndex(), esType(), iterator.next().getKey());
}
return multiGetRequestBuilder.get();
}
private boolean saveToEs() {
Client client = EsClient.INSTANCE.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk();
logger.debug("persistenceData SIZE: %s", getPersistenceData().size());
Iterator<Map.Entry<String, MergeData>> iterator = getPersistenceData().iterator();
while (iterator.hasNext()) {
MergeData mergeData = iterator.next().getValue();
bulkRequest.add(client.prepareIndex(esIndex(), esType(), mergeData.getId()).setSource(mergeData.toMap()));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
return !bulkResponse.hasFailures();
}
}
...@@ -3,32 +3,38 @@ package com.a.eye.skywalking.collector.worker; ...@@ -3,32 +3,38 @@ package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role; import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig; import com.a.eye.skywalking.collector.actor.WorkerRefs;
import com.a.eye.skywalking.collector.worker.storage.MetricData; import com.a.eye.skywalking.collector.worker.storage.MetricAnalysisData;
import com.a.eye.skywalking.collector.worker.storage.MetricPersistenceData;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class MetricAnalysisMember extends AnalysisMember { public abstract class MetricAnalysisMember extends AnalysisMember {
private MetricAnalysisData metricAnalysisData = new MetricAnalysisData();
private MetricPersistenceData persistenceData = new MetricPersistenceData();
public MetricAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public MetricAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
final protected void setMetric(String id, String column, Long value) throws Exception { final protected void set(String id, String metricName, Long value) throws Exception {
persistenceData.getElseCreate(id).setMetric(column, value); getMetricAnalysisData().getOrCreate(id).set(metricName, value);
if (persistenceData.size() >= CacheSizeConfig.Cache.Persistence.SIZE) { }
aggregation();
} private MetricAnalysisData getMetricAnalysisData() {
return metricAnalysisData;
} }
final public MetricData pushOne() { @Override
if (persistenceData.iterator().hasNext()) { final protected void aggregation() throws Exception {
return persistenceData.pushOne(); getMetricAnalysisData().asMap().forEach((key, value) -> {
} try {
return null; aggWorkRefs().tell(value);
} catch (Exception e) {
e.printStackTrace();
}
});
getMetricAnalysisData().asMap().clear();
} }
protected abstract WorkerRefs aggWorkRefs();
} }
...@@ -3,89 +3,55 @@ package com.a.eye.skywalking.collector.worker; ...@@ -3,89 +3,55 @@ package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role; import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
import com.a.eye.skywalking.collector.worker.storage.EsClient; import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.MetricData; import com.a.eye.skywalking.collector.worker.storage.MetricData;
import com.a.eye.skywalking.collector.worker.storage.MetricPersistenceData; import com.a.eye.skywalking.collector.worker.storage.MetricPersistenceData;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import java.util.Iterator;
import java.util.Map;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class MetricPersistenceMember extends PersistenceMember { public abstract class MetricPersistenceMember extends PersistenceMember<MetricPersistenceData, MetricData> {
private Logger logger = LogManager.getFormatterLogger(MetricPersistenceMember.class); private Logger logger = LogManager.getFormatterLogger(MetricPersistenceMember.class);
private MetricPersistenceData persistenceData = new MetricPersistenceData();
public MetricPersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public MetricPersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override final public void analyse(Object message) throws Exception { @Override
if (message instanceof MetricData) { public MetricPersistenceData initializeData() {
MetricData metricData = (MetricData)message; return new MetricPersistenceData();
persistenceData.getElseCreate(metricData.getId()).merge(metricData);
if (persistenceData.size() >= CacheSizeConfig.Cache.Persistence.SIZE) {
persistence();
}
} else {
logger.error("message unhandled");
}
} }
final protected void persistence() { @Override
MultiGetResponse multiGetResponse = searchFromEs(); final public void analyse(Object message) throws Exception {
for (MultiGetItemResponse itemResponse : multiGetResponse) { if (message instanceof MetricData) {
GetResponse response = itemResponse.getResponse(); MetricData metricData = (MetricData) message;
if (response != null && response.isExists()) { MetricPersistenceData data = getPersistenceData();
persistenceData.getElseCreate(response.getId()).merge(response.getSource()); data.hold();
} data.getOrCreate(metricData.getId()).merge(metricData);
} data.release();
} else {
boolean success = saveToEs(); logger.error("unhandled message, message instance must MetricData, but is %s", message.getClass().toString());
if (success) {
persistenceData.clear();
} }
} }
private MultiGetResponse searchFromEs() { @Override
Client client = EsClient.INSTANCE.getClient(); final protected void prepareIndex(List<IndexRequestBuilder> builderList) {
MultiGetRequestBuilder multiGetRequestBuilder = client.prepareMultiGet(); Map<String, MetricData> lastData = getPersistenceData().getLast().asMap();
extractData(lastData);
Iterator<Map.Entry<String, MetricData>> iterator = persistenceData.iterator();
while (iterator.hasNext()) {
multiGetRequestBuilder.add(esIndex(), esType(), iterator.next().getKey());
}
MultiGetResponse multiGetResponse = multiGetRequestBuilder.get();
return multiGetResponse;
}
private boolean saveToEs() {
Client client = EsClient.INSTANCE.getClient(); Client client = EsClient.INSTANCE.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk(); lastData.forEach((key, value) -> {
logger.debug("persistenceData SIZE: %s", persistenceData.size()); IndexRequestBuilder builder = client.prepareIndex(esIndex(), esType(), key).setSource(value.asMap());
builderList.add(builder);
Iterator<Map.Entry<String, MetricData>> iterator = persistenceData.iterator(); });
while (iterator.hasNext()) { lastData.clear();
MetricData metricData = iterator.next().getValue();
bulkRequest.add(client.prepareIndex(esIndex(), esType(), metricData.getId()).setSource(metricData.toMap()));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
return !bulkResponse.hasFailures();
} }
} }
\ No newline at end of file
package com.a.eye.skywalking.collector.worker; package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.*; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.queue.EndOfBatchCommand; import com.a.eye.skywalking.collector.worker.storage.*;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class PersistenceMember extends AbstractLocalAsyncWorker { public abstract class PersistenceMember<T extends Window & PersistenceData, D extends Data> extends AbstractLocalSyncWorker {
private Logger logger = LogManager.getFormatterLogger(PersistenceMember.class); private Logger logger = LogManager.getFormatterLogger(PersistenceMember.class);
public PersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public PersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
persistenceData = initializeData();
}
private T persistenceData;
public abstract T initializeData();
protected T getPersistenceData() {
return persistenceData;
} }
public abstract String esIndex(); public abstract String esIndex();
...@@ -22,17 +41,76 @@ public abstract class PersistenceMember extends AbstractLocalAsyncWorker { ...@@ -22,17 +41,76 @@ public abstract class PersistenceMember extends AbstractLocalAsyncWorker {
public abstract void analyse(Object message) throws Exception; public abstract void analyse(Object message) throws Exception;
@Override final public void preStart() throws ProviderNotFoundException { @Override
final public void preStart() throws ProviderNotFoundException {
}
@Override
protected void onWork(Object request, Object response) throws Exception {
if (request instanceof FlushAndSwitch) {
persistenceData.switchPointer();
while (persistenceData.getLast().isHolding()) {
Thread.sleep(10);
}
if (response instanceof LinkedList) {
prepareIndex((LinkedList) response);
} else {
logger.error("unhandled response, response instance must LinkedList, but is %s", response.getClass().toString());
}
} else {
analyse(request);
}
} }
@Override final protected void onWork(Object message) throws Exception { private MultiGetResponse searchFromEs(Map<String, D> dataMap) {
if (message instanceof EndOfBatchCommand) { Client client = EsClient.INSTANCE.getClient();
persistence(); MultiGetRequestBuilder multiGetRequestBuilder = client.prepareMultiGet();
HasDataFlag flag = new HasDataFlag();
dataMap.forEach((key, value) -> {
multiGetRequestBuilder.add(esIndex(), esType(), value.getId());
flag.doTagHasData();
});
if (flag.isHasData()) {
return multiGetRequestBuilder.get();
} else { } else {
analyse(message); return null;
}
}
final void extractData(Map<String, D> dataMap) {
MultiGetResponse multiGetResponse = searchFromEs(dataMap);
if (multiGetResponse != null) {
for (MultiGetItemResponse itemResponse : multiGetResponse) {
GetResponse response = itemResponse.getResponse();
if (response != null && response.isExists()) {
if (dataMap.containsKey(response.getId())) {
dataMap.get(response.getId()).merge(response.getSource());
}
}
}
} }
} }
protected abstract void persistence(); protected abstract void prepareIndex(List<IndexRequestBuilder> builderList);
class HasDataFlag {
private boolean hasData;
HasDataFlag() {
hasData = false;
}
boolean isHasData() {
return hasData;
}
void doTagHasData() {
this.hasData = true;
}
}
} }
...@@ -3,33 +3,43 @@ package com.a.eye.skywalking.collector.worker; ...@@ -3,33 +3,43 @@ package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role; import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig; import com.a.eye.skywalking.collector.actor.WorkerRefs;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordAnalysisData;
import com.a.eye.skywalking.collector.worker.storage.RecordPersistenceData;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class RecordAnalysisMember extends AnalysisMember { public abstract class RecordAnalysisMember extends AnalysisMember {
private RecordPersistenceData persistenceData = new RecordPersistenceData(); private Logger logger = LogManager.getFormatterLogger(RecordAnalysisMember.class);
private RecordAnalysisData recordAnalysisData = new RecordAnalysisData();
public RecordAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public RecordAnalysisMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
final public void setRecord(String id, JsonObject record) throws Exception { final public void set(String id, JsonObject record) throws Exception {
persistenceData.getElseCreate(id).setRecord(record); getRecordAnalysisData().getOrCreate(id).set(record);
if (persistenceData.size() >= CacheSizeConfig.Cache.Analysis.SIZE) { }
aggregation();
} private RecordAnalysisData getRecordAnalysisData() {
return recordAnalysisData;
} }
final public RecordData pushOne() { @Override final protected void aggregation() throws Exception {
if (persistenceData.hasNext()) { getRecordAnalysisData().asMap().forEach((key, value) -> {
return persistenceData.pushOne(); try {
} aggWorkRefs().tell(value);
return null; } catch (Exception e) {
logger.error(e);
}
});
getRecordAnalysisData().asMap().clear();
} }
protected abstract WorkerRefs aggWorkRefs();
} }
...@@ -3,75 +3,52 @@ package com.a.eye.skywalking.collector.worker; ...@@ -3,75 +3,52 @@ package com.a.eye.skywalking.collector.worker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role; import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
import com.a.eye.skywalking.collector.worker.storage.EsClient; import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordData;
import com.a.eye.skywalking.collector.worker.storage.RecordPersistenceData; import com.a.eye.skywalking.collector.worker.storage.RecordPersistenceData;
import org.apache.logging.log4j.LogManager; import java.util.List;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
/** /**
* @author pengys5 * @author pengys5
*/ */
public abstract class RecordPersistenceMember extends PersistenceMember { public abstract class RecordPersistenceMember extends PersistenceMember<RecordPersistenceData, RecordData> {
private Logger logger = LogManager.getFormatterLogger(RecordPersistenceMember.class);
private RecordPersistenceData persistenceData;
public RecordPersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public RecordPersistenceMember(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
persistenceData = new RecordPersistenceData();
} }
private RecordPersistenceData getPersistenceData() { @Override
return this.persistenceData; final public RecordPersistenceData initializeData() {
return new RecordPersistenceData();
} }
@Override @Override
public void analyse(Object message) throws Exception { public void analyse(Object message) throws Exception {
if (message instanceof RecordData) { if (message instanceof RecordData) {
RecordData recordData = (RecordData)message; RecordData recordData = (RecordData) message;
logger.debug("setRecord: id: %s, data: %s", recordData.getId(), recordData.getRecord()); logger().debug("set: id: %s, data: %s", recordData.getId(), recordData.get());
getPersistenceData().getElseCreate(recordData.getId()).setRecord(recordData.getRecord()); RecordPersistenceData data = getPersistenceData();
if (getPersistenceData().size() >= CacheSizeConfig.Cache.Persistence.SIZE) { data.hold();
persistence(); data.getOrCreate(recordData.getId()).set(recordData.get());
} data.release();
} else { } else {
logger.error("message unhandled"); logger().error("message unhandled");
} }
} }
protected void persistence() { @Override
boolean success = saveToEs(); final protected void prepareIndex(List<IndexRequestBuilder> builderList) {
if (success) { Map<String, RecordData> lastData = getPersistenceData().getLast().asMap();
getPersistenceData().clear(); extractData(lastData);
}
}
private boolean saveToEs() {
Client client = EsClient.INSTANCE.getClient(); Client client = EsClient.INSTANCE.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk(); lastData.forEach((key, value) -> {
logger.debug("persistenceData SIZE: %s", getPersistenceData().size()); IndexRequestBuilder builder = client.prepareIndex(esIndex(), esType(), key).setSource(value.get().toString());
builderList.add(builder);
Iterator<Map.Entry<String, RecordData>> iterator = getPersistenceData().iterator(); });
lastData.clear();
while (iterator.hasNext()) {
Map.Entry<String, RecordData> recordData = iterator.next();
logger.debug("saveToEs: key: %s, data: %s", recordData.getKey(), recordData.getValue().getRecord().toString());
bulkRequest.add(client.prepareIndex(esIndex(), esType(), recordData.getKey()).setSource(recordData.getValue().getRecord().toString()));
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
if (bulkResponse.hasFailures()) {
logger.error(bulkResponse.buildFailureMessage());
}
return !bulkResponse.hasFailures();
} }
} }
...@@ -7,11 +7,11 @@ public class CacheSizeConfig { ...@@ -7,11 +7,11 @@ public class CacheSizeConfig {
public static class Cache { public static class Cache {
public static class Analysis { public static class Analysis {
public static int SIZE = 1000; public static int SIZE = 1024;
} }
public static class Persistence { public static class Persistence {
public static int SIZE = 1000; public static int SIZE = 5000;
} }
} }
} }
...@@ -15,6 +15,12 @@ public class EsConfig { ...@@ -15,6 +15,12 @@ public class EsConfig {
} }
} }
public static class Persistence {
public static class Timer {
public static Integer VALUE = 3;
}
}
public static class Index { public static class Index {
public static class Initialize { public static class Initialize {
...@@ -28,6 +34,40 @@ public class EsConfig { ...@@ -28,6 +34,40 @@ public class EsConfig {
public static class Replicas { public static class Replicas {
public static String NUMBER = ""; public static String NUMBER = "";
} }
public static class RefreshInterval {
public static class GlobalTraceIndex {
public static Integer VALUE = 1;
}
public static class NodeCompIndex {
public static Integer VALUE = 1;
}
public static class NodeMappingIndex {
public static Integer VALUE = 1;
}
public static class NodeRefIndex {
public static Integer VALUE = 1;
}
public static class NodeRefResSumIndex {
public static Integer VALUE = 1;
}
public static class SegmentCostIndex {
public static Integer VALUE = 10;
}
public static class SegmentExceptionIndex {
public static Integer VALUE = 10;
}
public static class SegmentIndex {
public static Integer VALUE = 10;
}
}
} }
} }
......
...@@ -8,61 +8,57 @@ public class WorkerConfig { ...@@ -8,61 +8,57 @@ public class WorkerConfig {
public static class WorkerNum { public static class WorkerNum {
public static class Node { public static class Node {
public static class NodeCompAgg { public static class NodeCompAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeMappingDayAgg { public static class NodeMappingDayAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeMappingHourAgg { public static class NodeMappingHourAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeMappingMinuteAgg { public static class NodeMappingMinuteAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
} }
public static class NodeRef { public static class NodeRef {
public static class NodeRefDayAgg { public static class NodeRefDayAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeRefHourAgg { public static class NodeRefHourAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeRefMinuteAgg { public static class NodeRefMinuteAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeRefResSumDayAgg { public static class NodeRefResSumDayAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeRefResSumHourAgg { public static class NodeRefResSumHourAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
public static class NodeRefResSumMinuteAgg { public static class NodeRefResSumMinuteAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
} }
public static class GlobalTrace { public static class GlobalTrace {
public static class GlobalTraceAgg { public static class GlobalTraceAgg {
public static int VALUE = 10; public static int VALUE = 2;
} }
} }
} }
public static class Queue { public static class Queue {
public static class GlobalTrace { public static class GlobalTrace {
public static class GlobalTraceSave {
public static int SIZE = 1024;
}
public static class GlobalTraceAnalysis { public static class GlobalTraceAnalysis {
public static int SIZE = 1024; public static int SIZE = 1024;
} }
...@@ -70,19 +66,19 @@ public class WorkerConfig { ...@@ -70,19 +66,19 @@ public class WorkerConfig {
public static class Segment { public static class Segment {
public static class SegmentPost { public static class SegmentPost {
public static int SIZE = 1024; public static int SIZE = 4096;
} }
public static class SegmentCostSave { public static class SegmentAnalysis {
public static int SIZE = 1024; public static int SIZE = 1024;
} }
public static class SegmentSave { public static class SegmentCostAnalysis {
public static int SIZE = 1024; public static int SIZE = 4096;
} }
public static class SegmentExceptionSave { public static class SegmentExceptionAnalysis {
public static int SIZE = 1024; public static int SIZE = 4096;
} }
} }
...@@ -102,22 +98,6 @@ public class WorkerConfig { ...@@ -102,22 +98,6 @@ public class WorkerConfig {
public static class NodeMappingMinuteAnalysis { public static class NodeMappingMinuteAnalysis {
public static int SIZE = 1024; public static int SIZE = 1024;
} }
public static class NodeCompSave {
public static int SIZE = 1024;
}
public static class NodeMappingDaySave {
public static int SIZE = 1024;
}
public static class NodeMappingHourSave {
public static int SIZE = 1024;
}
public static class NodeMappingMinuteSave {
public static int SIZE = 1024;
}
} }
public static class NodeRef { public static class NodeRef {
...@@ -133,30 +113,6 @@ public class WorkerConfig { ...@@ -133,30 +113,6 @@ public class WorkerConfig {
public static int SIZE = 1024; public static int SIZE = 1024;
} }
public static class NodeRefDaySave {
public static int SIZE = 1024;
}
public static class NodeRefHourSave {
public static int SIZE = 1024;
}
public static class NodeRefMinuteSave {
public static int SIZE = 1024;
}
public static class NodeRefResSumDaySave {
public static int SIZE = 1024;
}
public static class NodeRefResSumHourSave {
public static int SIZE = 1024;
}
public static class NodeRefResSumMinuteSave {
public static int SIZE = 1024;
}
public static class NodeRefResSumDayAnalysis { public static class NodeRefResSumDayAnalysis {
public static int SIZE = 1024; public static int SIZE = 1024;
} }
......
...@@ -46,8 +46,6 @@ public class GlobalTraceGetWithGlobalId extends AbstractGet { ...@@ -46,8 +46,6 @@ public class GlobalTraceGetWithGlobalId extends AbstractGet {
} }
public static class Factory extends AbstractGetProvider<GlobalTraceGetWithGlobalId> { public static class Factory extends AbstractGetProvider<GlobalTraceGetWithGlobalId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.globaltrace; package com.a.eye.skywalking.collector.worker.globaltrace;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -24,14 +25,18 @@ public class GlobalTraceIndex extends AbstractIndex { ...@@ -24,14 +25,18 @@ public class GlobalTraceIndex extends AbstractIndex {
return true; return true;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.GlobalTraceIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(SUB_SEG_IDS) .startObject(SUB_SEG_IDS)
.field("type", "text") .field("type", "keyword")
.field("index", "not_analyzed")
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
......
package com.a.eye.skywalking.collector.worker.globaltrace.analysis; package com.a.eye.skywalking.collector.worker.globaltrace.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.MergeAnalysisMember; import com.a.eye.skywalking.collector.worker.JoinAndSplitAnalysisMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex; import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex;
import com.a.eye.skywalking.collector.worker.globaltrace.persistence.GlobalTraceAgg; import com.a.eye.skywalking.collector.worker.globaltrace.persistence.GlobalTraceAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.GlobalTraceId; import com.a.eye.skywalking.collector.worker.segment.entity.GlobalTraceId;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.MergeData;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools; import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List; import java.util.List;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class GlobalTraceAnalysis extends MergeAnalysisMember { public class GlobalTraceAnalysis extends JoinAndSplitAnalysisMember {
private Logger logger = LogManager.getFormatterLogger(GlobalTraceAnalysis.class);
GlobalTraceAnalysis(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { GlobalTraceAnalysis(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -36,23 +37,25 @@ public class GlobalTraceAnalysis extends MergeAnalysisMember { ...@@ -36,23 +37,25 @@ public class GlobalTraceAnalysis extends MergeAnalysisMember {
if (CollectionTools.isNotEmpty(globalTraceIdList)) { if (CollectionTools.isNotEmpty(globalTraceIdList)) {
for (GlobalTraceId disTraceId : globalTraceIdList) { for (GlobalTraceId disTraceId : globalTraceIdList) {
String traceId = disTraceId.get(); String traceId = disTraceId.get();
setMergeData(traceId, GlobalTraceIndex.SUB_SEG_IDS, subSegmentId); set(traceId, GlobalTraceIndex.SUB_SEG_IDS, subSegmentId);
} }
} }
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
MergeData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(GlobalTraceAgg.Role.INSTANCE);
getClusterContext().lookup(GlobalTraceAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", GlobalTraceAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<GlobalTraceAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<GlobalTraceAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
...@@ -4,7 +4,7 @@ import com.a.eye.skywalking.collector.actor.*; ...@@ -4,7 +4,7 @@ import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.storage.MergeData; import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitData;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
...@@ -27,16 +27,14 @@ public class GlobalTraceAgg extends AbstractClusterWorker { ...@@ -27,16 +27,14 @@ public class GlobalTraceAgg extends AbstractClusterWorker {
@Override @Override
protected void onWork(Object message) throws Exception { protected void onWork(Object message) throws Exception {
if (message instanceof MergeData) { if (message instanceof JoinAndSplitData) {
getSelfContext().lookup(GlobalTraceSave.Role.INSTANCE).tell(message); getSelfContext().lookup(GlobalTraceSave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must JoinAndSplitData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<GlobalTraceAgg> { public static class Factory extends AbstractClusterWorkerProvider<GlobalTraceAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.globaltrace.persistence; package com.a.eye.skywalking.collector.worker.globaltrace.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.MergePersistenceMember; import com.a.eye.skywalking.collector.worker.JoinAndSplitPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex; import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class GlobalTraceSave extends MergePersistenceMember { public class GlobalTraceSave extends JoinAndSplitPersistenceMember {
GlobalTraceSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, GlobalTraceSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,22 +29,17 @@ public class GlobalTraceSave extends MergePersistenceMember { ...@@ -29,22 +29,17 @@ public class GlobalTraceSave extends MergePersistenceMember {
return GlobalTraceIndex.TYPE_RECORD; return GlobalTraceIndex.TYPE_RECORD;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<GlobalTraceSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<GlobalTraceSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
} }
@Override
public int queueSize() {
return WorkerConfig.Queue.GlobalTrace.GlobalTraceSave.SIZE;
}
@Override @Override
public GlobalTraceSave workerInstance(ClusterWorkerContext clusterContext) { public GlobalTraceSave workerInstance(ClusterWorkerContext clusterContext) {
return new GlobalTraceSave(role(), clusterContext, new LocalWorkerContext()); GlobalTraceSave worker = new GlobalTraceSave(role(), clusterContext, new LocalWorkerContext());
PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
} }
} }
......
...@@ -8,7 +8,7 @@ import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex; ...@@ -8,7 +8,7 @@ import com.a.eye.skywalking.collector.worker.globaltrace.GlobalTraceIndex;
import com.a.eye.skywalking.collector.worker.segment.SegmentIndex; import com.a.eye.skywalking.collector.worker.segment.SegmentIndex;
import com.a.eye.skywalking.collector.worker.segment.entity.*; import com.a.eye.skywalking.collector.worker.segment.entity.*;
import com.a.eye.skywalking.collector.worker.storage.GetResponseFromEs; import com.a.eye.skywalking.collector.worker.storage.GetResponseFromEs;
import com.a.eye.skywalking.collector.worker.storage.MergeData; import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitData;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools; import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.Gson; import com.google.gson.Gson;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
...@@ -28,7 +28,7 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker { ...@@ -28,7 +28,7 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker {
private Gson gson = new Gson(); private Gson gson = new Gson();
GlobalTraceSearchWithGlobalId(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) { public GlobalTraceSearchWithGlobalId(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -41,7 +41,7 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker { ...@@ -41,7 +41,7 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker {
logger.debug("globalTraceObj: %s", globalTraceObj); logger.debug("globalTraceObj: %s", globalTraceObj);
String subSegIdsStr = globalTraceObj.get(GlobalTraceIndex.SUB_SEG_IDS).getAsString(); String subSegIdsStr = globalTraceObj.get(GlobalTraceIndex.SUB_SEG_IDS).getAsString();
String[] subSegIds = subSegIdsStr.split(MergeData.SPLIT); String[] subSegIds = subSegIdsStr.split(JoinAndSplitData.SPLIT);
List<SpanView> spanViewList = new ArrayList<>(); List<SpanView> spanViewList = new ArrayList<>();
for (String subSegId : subSegIds) { for (String subSegId : subSegIds) {
...@@ -60,6 +60,8 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker { ...@@ -60,6 +60,8 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker {
JsonObject responseObj = (JsonObject)response; JsonObject responseObj = (JsonObject)response;
responseObj.addProperty("result", buildTree(spanViewList)); responseObj.addProperty("result", buildTree(spanViewList));
} else {
logger.error("unhandled message, message instance must String, but is %s", request.getClass().toString());
} }
} }
...@@ -153,8 +155,6 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker { ...@@ -153,8 +155,6 @@ public class GlobalTraceSearchWithGlobalId extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<GlobalTraceSearchWithGlobalId> { public static class Factory extends AbstractLocalSyncWorkerProvider<GlobalTraceSearchWithGlobalId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.httpserver; package com.a.eye.skywalking.collector.worker.httpserver;
import com.a.eye.skywalking.collector.actor.*; import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorker;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalAsyncWorkerRef;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.Role;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonReader;
import java.io.BufferedReader;
import java.io.IOException;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader; import org.apache.logging.log4j.LogManager;
import java.io.IOException; import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
...@@ -21,8 +26,7 @@ public abstract class AbstractPost extends AbstractLocalAsyncWorker { ...@@ -21,8 +26,7 @@ public abstract class AbstractPost extends AbstractLocalAsyncWorker {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override @Override final public void onWork(Object message) throws Exception {
final public void onWork(Object message) throws Exception {
onReceive(message); onReceive(message);
} }
...@@ -30,21 +34,23 @@ public abstract class AbstractPost extends AbstractLocalAsyncWorker { ...@@ -30,21 +34,23 @@ public abstract class AbstractPost extends AbstractLocalAsyncWorker {
static class PostWithHttpServlet extends AbstractHttpServlet { static class PostWithHttpServlet extends AbstractHttpServlet {
private Logger logger = LogManager.getFormatterLogger(PostWithHttpServlet.class);
private final LocalAsyncWorkerRef ownerWorkerRef; private final LocalAsyncWorkerRef ownerWorkerRef;
PostWithHttpServlet(LocalAsyncWorkerRef ownerWorkerRef) { PostWithHttpServlet(LocalAsyncWorkerRef ownerWorkerRef) {
this.ownerWorkerRef = ownerWorkerRef; this.ownerWorkerRef = ownerWorkerRef;
} }
@Override @Override final protected void doPost(HttpServletRequest request,
final protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpServletResponse response) throws ServletException, IOException {
JsonObject resJson = new JsonObject(); JsonObject resJson = new JsonObject();
try { try {
BufferedReader bufferedReader = request.getReader(); BufferedReader bufferedReader = request.getReader();
streamReader(bufferedReader); streamReader(bufferedReader);
reply(response, resJson, HttpServletResponse.SC_OK); reply(response, resJson, HttpServletResponse.SC_OK);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); logger.error(e);
resJson.addProperty("error", e.getMessage()); resJson.addProperty("error", e.getMessage());
reply(response, resJson, HttpServletResponse.SC_INTERNAL_SERVER_ERROR); reply(response, resJson, HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} }
......
package com.a.eye.skywalking.collector.worker.node; package com.a.eye.skywalking.collector.worker.node;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -25,25 +26,27 @@ public class NodeCompIndex extends AbstractIndex { ...@@ -25,25 +26,27 @@ public class NodeCompIndex extends AbstractIndex {
return false; return false;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.NodeCompIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(NAME) .startObject(NAME)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(PEERS)
.startObject(PEERS) .field("type", "keyword")
.field("type", "string") .endObject()
.field("index", "not_analyzed") .startObject(AGG_COLUMN)
.endObject() .field("type", "keyword")
.startObject(AGG_COLUMN) .endObject()
.field("type", "string") .endObject()
.field("index", "not_analyzed") .endObject();
.endObject()
.endObject()
.endObject();
return mappingBuilder; return mappingBuilder;
} }
} }
package com.a.eye.skywalking.collector.worker.node; package com.a.eye.skywalking.collector.worker.node;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -25,29 +26,34 @@ public class NodeMappingIndex extends AbstractIndex { ...@@ -25,29 +26,34 @@ public class NodeMappingIndex extends AbstractIndex {
return false; return false;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.NodeMappingIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(CODE) .startObject(CODE)
.field("type", "string") .field("type", "string")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(PEERS) .startObject(PEERS)
.field("type", "string") .field("type", "string")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(AGG_COLUMN) .startObject(AGG_COLUMN)
.field("type", "string") .field("type", "string")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(TIME_SLICE) .startObject(TIME_SLICE)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
return mappingBuilder; return mappingBuilder;
} }
} }
...@@ -28,7 +28,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember { ...@@ -28,7 +28,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
void analyseSpans(Segment segment) throws Exception { final void analyseSpans(Segment segment) throws Exception {
List<Span> spanList = segment.getSpans(); List<Span> spanList = segment.getSpans();
logger.debug("node analysis span isNotEmpty %s", CollectionTools.isNotEmpty(spanList)); logger.debug("node analysis span isNotEmpty %s", CollectionTools.isNotEmpty(spanList));
...@@ -43,7 +43,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember { ...@@ -43,7 +43,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember {
compJsonObj.addProperty(NodeCompIndex.PEERS, peers); compJsonObj.addProperty(NodeCompIndex.PEERS, peers);
compJsonObj.addProperty(NodeCompIndex.NAME, Tags.COMPONENT.get(span)); compJsonObj.addProperty(NodeCompIndex.NAME, Tags.COMPONENT.get(span));
setRecord(peers, compJsonObj); set(peers, compJsonObj);
} else if (Tags.SPAN_KIND_SERVER.equals(kind) && span.getParentSpanId() == -1) { } else if (Tags.SPAN_KIND_SERVER.equals(kind) && span.getParentSpanId() == -1) {
String peers = segment.getApplicationCode(); String peers = segment.getApplicationCode();
...@@ -51,9 +51,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember { ...@@ -51,9 +51,7 @@ abstract class AbstractNodeCompAnalysis extends RecordAnalysisMember {
compJsonObj.addProperty(NodeCompIndex.PEERS, peers); compJsonObj.addProperty(NodeCompIndex.PEERS, peers);
compJsonObj.addProperty(NodeCompIndex.NAME, Tags.COMPONENT.get(span)); compJsonObj.addProperty(NodeCompIndex.NAME, Tags.COMPONENT.get(span));
setRecord(peers, compJsonObj); set(peers, compJsonObj);
} else {
logger.error("The span kind value is incorrect which segment record id is %s, the value must client or server", segment.getTraceSegmentId());
} }
} }
} }
......
...@@ -26,7 +26,7 @@ abstract class AbstractNodeMappingAnalysis extends RecordAnalysisMember { ...@@ -26,7 +26,7 @@ abstract class AbstractNodeMappingAnalysis extends RecordAnalysisMember {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
void analyseRefs(Segment segment, long timeSlice) throws Exception { final void analyseRefs(Segment segment, long timeSlice) throws Exception {
List<TraceSegmentRef> segmentRefList = segment.getRefs(); List<TraceSegmentRef> segmentRefList = segment.getRefs();
logger.debug("node mapping analysis refs isNotEmpty %s", CollectionTools.isNotEmpty(segmentRefList)); logger.debug("node mapping analysis refs isNotEmpty %s", CollectionTools.isNotEmpty(segmentRefList));
...@@ -42,7 +42,7 @@ abstract class AbstractNodeMappingAnalysis extends RecordAnalysisMember { ...@@ -42,7 +42,7 @@ abstract class AbstractNodeMappingAnalysis extends RecordAnalysisMember {
nodeMappingJsonObj.addProperty(NodeMappingIndex.TIME_SLICE, timeSlice); nodeMappingJsonObj.addProperty(NodeMappingIndex.TIME_SLICE, timeSlice);
String id = timeSlice + Const.ID_SPLIT + code + Const.ID_SPLIT + peers; String id = timeSlice + Const.ID_SPLIT + code + Const.ID_SPLIT + peers;
setRecord(id, nodeMappingJsonObj); set(id, nodeMappingJsonObj);
} }
} }
} }
......
package com.a.eye.skywalking.collector.worker.node.analysis; package com.a.eye.skywalking.collector.worker.node.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.persistence.NodeCompAgg; import com.a.eye.skywalking.collector.worker.node.persistence.NodeCompAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeCompAnalysis extends AbstractNodeCompAnalysis { public class NodeCompAnalysis extends AbstractNodeCompAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeCompAnalysis.class);
NodeCompAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeCompAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -27,20 +28,22 @@ public class NodeCompAnalysis extends AbstractNodeCompAnalysis { ...@@ -27,20 +28,22 @@ public class NodeCompAnalysis extends AbstractNodeCompAnalysis {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message; SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment(); Segment segment = segmentWithTimeSlice.getSegment();
analyseSpans(segment); analyseSpans(segment);
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeCompAgg.Role.INSTANCE);
getClusterContext().lookup(NodeCompAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeCompAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeCompAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeCompAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.analysis; package com.a.eye.skywalking.collector.worker.node.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingDayAgg; import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingDayAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingDayAnalysis extends AbstractNodeMappingAnalysis { public class NodeMappingDayAnalysis extends AbstractNodeMappingAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeMappingDayAnalysis.class);
public NodeMappingDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, public NodeMappingDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -27,20 +28,22 @@ public class NodeMappingDayAnalysis extends AbstractNodeMappingAnalysis { ...@@ -27,20 +28,22 @@ public class NodeMappingDayAnalysis extends AbstractNodeMappingAnalysis {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message; SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment(); Segment segment = segmentWithTimeSlice.getSegment();
analyseRefs(segment, segmentWithTimeSlice.getDay()); analyseRefs(segment, segmentWithTimeSlice.getDay());
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeMappingDayAgg.Role.INSTANCE);
getClusterContext().lookup(NodeMappingDayAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeMappingDayAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingDayAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingDayAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.analysis; package com.a.eye.skywalking.collector.worker.node.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingHourAgg; import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingHourAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingHourAnalysis extends AbstractNodeMappingAnalysis { public class NodeMappingHourAnalysis extends AbstractNodeMappingAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeMappingHourAnalysis.class);
NodeMappingHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -27,20 +28,22 @@ public class NodeMappingHourAnalysis extends AbstractNodeMappingAnalysis { ...@@ -27,20 +28,22 @@ public class NodeMappingHourAnalysis extends AbstractNodeMappingAnalysis {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message; SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment(); Segment segment = segmentWithTimeSlice.getSegment();
analyseRefs(segment, segmentWithTimeSlice.getHour()); analyseRefs(segment, segmentWithTimeSlice.getHour());
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeMappingHourAgg.Role.INSTANCE);
getClusterContext().lookup(NodeMappingHourAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeMappingHourAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingHourAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingHourAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.analysis; package com.a.eye.skywalking.collector.worker.node.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingMinuteAgg; import com.a.eye.skywalking.collector.worker.node.persistence.NodeMappingMinuteAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingMinuteAnalysis extends AbstractNodeMappingAnalysis { public class NodeMappingMinuteAnalysis extends AbstractNodeMappingAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeMappingMinuteAnalysis.class);
NodeMappingMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -27,20 +28,22 @@ public class NodeMappingMinuteAnalysis extends AbstractNodeMappingAnalysis { ...@@ -27,20 +28,22 @@ public class NodeMappingMinuteAnalysis extends AbstractNodeMappingAnalysis {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message; SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment(); Segment segment = segmentWithTimeSlice.getSegment();
analyseRefs(segment, segmentWithTimeSlice.getMinute()); analyseRefs(segment, segmentWithTimeSlice.getMinute());
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeMappingMinuteAgg.Role.INSTANCE);
getClusterContext().lookup(NodeMappingMinuteAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeMappingMinuteAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingMinuteAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingMinuteAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; ...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeCompAgg extends AbstractClusterWorker { public class NodeCompAgg extends AbstractClusterWorker {
private Logger logger = LogManager.getFormatterLogger(NodeCompAgg.class);
NodeCompAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeCompAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -25,13 +29,12 @@ public class NodeCompAgg extends AbstractClusterWorker { ...@@ -25,13 +29,12 @@ public class NodeCompAgg extends AbstractClusterWorker {
protected void onWork(Object message) throws Exception { protected void onWork(Object message) throws Exception {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeCompSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeCompSave.Role.INSTANCE).tell(message);
} else } else {
throw new IllegalArgumentException("message instance must RecordData"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
}
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeCompAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeCompAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
...@@ -49,8 +49,6 @@ public class NodeCompLoad extends AbstractLocalSyncWorker { ...@@ -49,8 +49,6 @@ public class NodeCompLoad extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<NodeCompLoad> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeCompLoad> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.persistence; package com.a.eye.skywalking.collector.worker.node.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.NodeCompIndex; import com.a.eye.skywalking.collector.worker.node.NodeCompIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeCompIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeCompIndex;
public class NodeCompSave extends RecordPersistenceMember { public class NodeCompSave extends RecordPersistenceMember {
NodeCompSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeCompSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeCompSave extends RecordPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeCompSave extends RecordPersistenceMember {
return NodeCompIndex.TYPE_RECORD; return NodeCompIndex.TYPE_RECORD;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeCompSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeCompSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeCompSave extends RecordPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeCompSave extends RecordPersistenceMember {
@Override @Override
public NodeCompSave workerInstance(ClusterWorkerContext clusterContext) { public NodeCompSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeCompSave(role(), clusterContext, new LocalWorkerContext()); NodeCompSave worker = new NodeCompSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.Node.NodeCompSave.SIZE;
} }
} }
......
...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; ...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingDayAgg extends AbstractClusterWorker { public class NodeMappingDayAgg extends AbstractClusterWorker {
private Logger logger = LogManager.getFormatterLogger(NodeMappingDayAgg.class);
NodeMappingDayAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingDayAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -26,13 +30,11 @@ public class NodeMappingDayAgg extends AbstractClusterWorker { ...@@ -26,13 +30,11 @@ public class NodeMappingDayAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeMappingDaySave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeMappingDaySave.Role.INSTANCE).tell(message);
} else { } else {
throw new IllegalArgumentException("message instance must RecordData"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeMappingDayAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeMappingDayAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.persistence; package com.a.eye.skywalking.collector.worker.node.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
public class NodeMappingDaySave extends RecordPersistenceMember { public class NodeMappingDaySave extends RecordPersistenceMember {
NodeMappingDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeMappingDaySave extends RecordPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeMappingDaySave extends RecordPersistenceMember {
return NodeMappingIndex.TYPE_DAY; return NodeMappingIndex.TYPE_DAY;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingDaySave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeMappingDaySave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeMappingDaySave extends RecordPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeMappingDaySave extends RecordPersistenceMember {
@Override @Override
public NodeMappingDaySave workerInstance(ClusterWorkerContext clusterContext) { public NodeMappingDaySave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeMappingDaySave(role(), clusterContext, new LocalWorkerContext()); NodeMappingDaySave worker = new NodeMappingDaySave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.Node.NodeMappingDaySave.SIZE;
} }
} }
......
...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; ...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingHourAgg extends AbstractClusterWorker { public class NodeMappingHourAgg extends AbstractClusterWorker {
private Logger logger = LogManager.getFormatterLogger(NodeMappingHourAgg.class);
NodeMappingHourAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingHourAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -26,13 +30,11 @@ public class NodeMappingHourAgg extends AbstractClusterWorker { ...@@ -26,13 +30,11 @@ public class NodeMappingHourAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeMappingHourSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeMappingHourSave.Role.INSTANCE).tell(message);
} else { } else {
throw new IllegalArgumentException("message instance must RecordData"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeMappingHourAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeMappingHourAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.persistence; package com.a.eye.skywalking.collector.worker.node.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
public class NodeMappingHourSave extends RecordPersistenceMember { public class NodeMappingHourSave extends RecordPersistenceMember {
NodeMappingHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeMappingHourSave extends RecordPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeMappingHourSave extends RecordPersistenceMember {
return NodeMappingIndex.TYPE_HOUR; return NodeMappingIndex.TYPE_HOUR;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingHourSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeMappingHourSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeMappingHourSave extends RecordPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeMappingHourSave extends RecordPersistenceMember {
@Override @Override
public NodeMappingHourSave workerInstance(ClusterWorkerContext clusterContext) { public NodeMappingHourSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeMappingHourSave(role(), clusterContext, new LocalWorkerContext()); NodeMappingHourSave worker = new NodeMappingHourSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.Node.NodeMappingHourSave.SIZE;
} }
} }
......
...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; ...@@ -5,12 +5,16 @@ import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.RecordData;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeMappingMinuteAgg extends AbstractClusterWorker { public class NodeMappingMinuteAgg extends AbstractClusterWorker {
private Logger logger = LogManager.getFormatterLogger(NodeMappingMinuteAgg.class);
NodeMappingMinuteAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingMinuteAgg(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -26,13 +30,11 @@ public class NodeMappingMinuteAgg extends AbstractClusterWorker { ...@@ -26,13 +30,11 @@ public class NodeMappingMinuteAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeMappingMinuteSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeMappingMinuteSave.Role.INSTANCE).tell(message);
} else { } else {
throw new IllegalArgumentException("message instance must RecordData"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeMappingMinuteAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeMappingMinuteAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.node.persistence; package com.a.eye.skywalking.collector.worker.node.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.node.NodeMappingIndex;
public class NodeMappingMinuteSave extends RecordPersistenceMember { public class NodeMappingMinuteSave extends RecordPersistenceMember {
NodeMappingMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeMappingMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeMappingMinuteSave extends RecordPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeMappingMinuteSave extends RecordPersistenceMember {
return NodeMappingIndex.TYPE_MINUTE; return NodeMappingIndex.TYPE_MINUTE;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeMappingMinuteSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeMappingMinuteSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeMappingMinuteSave extends RecordPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeMappingMinuteSave extends RecordPersistenceMember {
@Override @Override
public NodeMappingMinuteSave workerInstance(ClusterWorkerContext clusterContext) { public NodeMappingMinuteSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeMappingMinuteSave(role(), clusterContext, new LocalWorkerContext()); NodeMappingMinuteSave worker = new NodeMappingMinuteSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.Node.NodeMappingMinuteSave.SIZE;
} }
} }
......
...@@ -62,7 +62,7 @@ public class NodeMappingSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -62,7 +62,7 @@ public class NodeMappingSearchWithTimeSlice extends AbstractLocalSyncWorker {
JsonObject resJsonObj = (JsonObject)response; JsonObject resJsonObj = (JsonObject)response;
resJsonObj.add(Const.RESULT, nodeMappingArray); resJsonObj.add(Const.RESULT, nodeMappingArray);
} else { } else {
throw new IllegalArgumentException("message instance must be RequestEntity"); logger.error("unhandled message, message instance must NodeMappingSearchWithTimeSlice.RequestEntity, but is %s", request.getClass().toString());
} }
} }
...@@ -73,8 +73,6 @@ public class NodeMappingSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -73,8 +73,6 @@ public class NodeMappingSearchWithTimeSlice extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<NodeMappingSearchWithTimeSlice> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeMappingSearchWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef; package com.a.eye.skywalking.collector.worker.noderef;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -26,30 +27,32 @@ public class NodeRefIndex extends AbstractIndex { ...@@ -26,30 +27,32 @@ public class NodeRefIndex extends AbstractIndex {
return false; return false;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.NodeRefIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(FRONT) .startObject(FRONT)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed")
.endObject() .endObject()
.startObject(FRONT_IS_REAL_CODE) .startObject(FRONT_IS_REAL_CODE)
.field("type", "boolean") .field("type", "boolean")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(BEHIND) .startObject(BEHIND)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed")
.endObject() .endObject()
.startObject(BEHIND_IS_REAL_CODE) .startObject(BEHIND_IS_REAL_CODE)
.field("type", "boolean") .field("type", "boolean")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(AGG_COLUMN) .startObject(AGG_COLUMN)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed")
.endObject() .endObject()
.startObject(TIME_SLICE) .startObject(TIME_SLICE)
.field("type", "long") .field("type", "long")
......
...@@ -61,8 +61,6 @@ public class NodeRefResSumGetGroupWithTimeSlice extends AbstractGet { ...@@ -61,8 +61,6 @@ public class NodeRefResSumGetGroupWithTimeSlice extends AbstractGet {
} }
public static class Factory extends AbstractGetProvider<NodeRefResSumGetGroupWithTimeSlice> { public static class Factory extends AbstractGetProvider<NodeRefResSumGetGroupWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef; package com.a.eye.skywalking.collector.worker.noderef;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -28,45 +29,49 @@ public class NodeRefResSumIndex extends AbstractIndex { ...@@ -28,45 +29,49 @@ public class NodeRefResSumIndex extends AbstractIndex {
return false; return false;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.NodeRefResSumIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(ONE_SECOND_LESS) .startObject(ONE_SECOND_LESS)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(THREE_SECOND_LESS) .startObject(THREE_SECOND_LESS)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(FIVE_SECOND_LESS) .startObject(FIVE_SECOND_LESS)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(FIVE_SECOND_GREATER) .startObject(FIVE_SECOND_GREATER)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(ERROR) .startObject(ERROR)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(SUMMARY) .startObject(SUMMARY)
.field("type", "long") .field("type", "long")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject() .endObject()
.startObject(AGG_COLUMN) .startObject(AGG_COLUMN)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(TIME_SLICE)
.startObject(TIME_SLICE) .field("type", "long")
.field("type", "long") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .endObject()
.endObject() .endObject();
.endObject();
return mappingBuilder; return mappingBuilder;
} }
} }
...@@ -49,7 +49,7 @@ abstract class AbstractNodeRefAnalysis extends RecordAnalysisMember { ...@@ -49,7 +49,7 @@ abstract class AbstractNodeRefAnalysis extends RecordAnalysisMember {
String id = timeSlice + Const.ID_SPLIT + front + Const.ID_SPLIT + behind; String id = timeSlice + Const.ID_SPLIT + front + Const.ID_SPLIT + behind;
logger.debug("dag node ref: %s", dataJsonObj.toString()); logger.debug("dag node ref: %s", dataJsonObj.toString());
setRecord(id, dataJsonObj); set(id, dataJsonObj);
buildNodeRefResRecordData(id, span, minute, hour, day, second); buildNodeRefResRecordData(id, span, minute, hour, day, second);
} else if (Tags.SPAN_KIND_SERVER.equals(Tags.SPAN_KIND.get(span))) { } else if (Tags.SPAN_KIND_SERVER.equals(Tags.SPAN_KIND.get(span))) {
if (span.getParentSpanId() == -1 && CollectionTools.isEmpty(segment.getRefs())) { if (span.getParentSpanId() == -1 && CollectionTools.isEmpty(segment.getRefs())) {
...@@ -60,7 +60,7 @@ abstract class AbstractNodeRefAnalysis extends RecordAnalysisMember { ...@@ -60,7 +60,7 @@ abstract class AbstractNodeRefAnalysis extends RecordAnalysisMember {
dataJsonObj.addProperty(NodeRefIndex.FRONT, front); dataJsonObj.addProperty(NodeRefIndex.FRONT, front);
String id = timeSlice + Const.ID_SPLIT + front + Const.ID_SPLIT + behind; String id = timeSlice + Const.ID_SPLIT + front + Const.ID_SPLIT + behind;
setRecord(id, dataJsonObj); set(id, dataJsonObj);
buildNodeRefResRecordData(id, span, minute, hour, day, second); buildNodeRefResRecordData(id, span, minute, hour, day, second);
} }
} }
......
...@@ -22,23 +22,23 @@ abstract class AbstractNodeRefResSumAnalysis extends MetricAnalysisMember { ...@@ -22,23 +22,23 @@ abstract class AbstractNodeRefResSumAnalysis extends MetricAnalysisMember {
boolean isError = nodeRefRes.isError; boolean isError = nodeRefRes.isError;
long cost = endTime - startTime; long cost = endTime - startTime;
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.ONE_SECOND_LESS, 0L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.ONE_SECOND_LESS, 0L);
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.THREE_SECOND_LESS, 0L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.THREE_SECOND_LESS, 0L);
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_LESS, 0L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_LESS, 0L);
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_GREATER, 0L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_GREATER, 0L);
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.ERROR, 0L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.ERROR, 0L);
if (cost <= 1000 && !isError) { if (cost <= 1000 && !isError) {
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.ONE_SECOND_LESS, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.ONE_SECOND_LESS, 1L);
} else if (1000 < cost && cost <= 3000 && !isError) { } else if (1000 < cost && cost <= 3000 && !isError) {
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.THREE_SECOND_LESS, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.THREE_SECOND_LESS, 1L);
} else if (3000 < cost && cost <= 5000 && !isError) { } else if (3000 < cost && cost <= 5000 && !isError) {
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_LESS, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_LESS, 1L);
} else if (5000 < cost && !isError) { } else if (5000 < cost && !isError) {
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_GREATER, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.FIVE_SECOND_GREATER, 1L);
} else { } else {
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.ERROR, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.ERROR, 1L);
} }
setMetric(nodeRefRes.nodeRefId, NodeRefResSumIndex.SUMMARY, 1L); set(nodeRefRes.nodeRefId, NodeRefResSumIndex.SUMMARY, 1L);
} }
public static class NodeRefResRecord extends AbstractTimeSlice { public static class NodeRefResRecord extends AbstractTimeSlice {
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.ProviderNotFoundException;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefDayAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefDayAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis { public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefDayAnalysis.class);
protected NodeRefDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, protected NodeRefDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -39,6 +39,8 @@ public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis { ...@@ -39,6 +39,8 @@ public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis {
long day = segmentWithTimeSlice.getDay(); long day = segmentWithTimeSlice.getDay();
int second = segmentWithTimeSlice.getSecond(); int second = segmentWithTimeSlice.getSecond();
analyseNodeRef(segment, segmentWithTimeSlice.getDay(), minute, hour, day, second); analyseNodeRef(segment, segmentWithTimeSlice.getDay(), minute, hour, day, second);
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
...@@ -48,17 +50,16 @@ public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis { ...@@ -48,17 +50,16 @@ public class NodeRefDayAnalysis extends AbstractNodeRefAnalysis {
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeRefDayAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefDayAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefDayAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefDayAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefDayAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.ProviderNotFoundException;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefHourAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefHourAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis { public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefHourAnalysis.class);
protected NodeRefHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, protected NodeRefHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -39,6 +39,8 @@ public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis { ...@@ -39,6 +39,8 @@ public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis {
long day = segmentWithTimeSlice.getDay(); long day = segmentWithTimeSlice.getDay();
int second = segmentWithTimeSlice.getSecond(); int second = segmentWithTimeSlice.getSecond();
analyseNodeRef(segment, segmentWithTimeSlice.getHour(), minute, hour, day, second); analyseNodeRef(segment, segmentWithTimeSlice.getHour(), minute, hour, day, second);
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
...@@ -48,17 +50,16 @@ public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis { ...@@ -48,17 +50,16 @@ public class NodeRefHourAnalysis extends AbstractNodeRefAnalysis {
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeRefHourAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefHourAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefHourAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefHourAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefHourAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.ProviderNotFoundException;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefMinuteAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefMinuteAgg;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis { public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefMinuteAnalysis.class);
protected NodeRefMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, protected NodeRefMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
...@@ -38,6 +38,8 @@ public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis { ...@@ -38,6 +38,8 @@ public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis {
long day = segmentWithTimeSlice.getDay(); long day = segmentWithTimeSlice.getDay();
int second = segmentWithTimeSlice.getSecond(); int second = segmentWithTimeSlice.getSecond();
analyseNodeRef(segment, segmentWithTimeSlice.getMinute(), minute, hour, day, second); analyseNodeRef(segment, segmentWithTimeSlice.getMinute(), minute, hour, day, second);
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
} }
} }
...@@ -47,17 +49,16 @@ public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis { ...@@ -47,17 +49,16 @@ public class NodeRefMinuteAnalysis extends AbstractNodeRefAnalysis {
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
RecordData oneRecord; try {
while ((oneRecord = pushOne()) != null) { return getClusterContext().lookup(NodeRefMinuteAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefMinuteAgg.Role.INSTANCE).tell(oneRecord); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefMinuteAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefMinuteAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefMinuteAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumDayAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumDayAgg;
import com.a.eye.skywalking.collector.worker.storage.MetricData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefResSumDayAnalysis extends AbstractNodeRefResSumAnalysis { public class NodeRefResSumDayAnalysis extends AbstractNodeRefResSumAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefResSumDayAnalysis.class);
NodeRefResSumDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumDayAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override @Override
public void analyse(Object message) throws Exception { public void analyse(Object message) throws Exception {
if (message instanceof NodeRefResRecord) { if (message instanceof NodeRefResRecord) {
NodeRefResRecord refResRecord = (NodeRefResRecord)message; NodeRefResRecord refResRecord = (NodeRefResRecord) message;
analyseResSum(refResRecord); analyseResSum(refResRecord);
} else {
logger.error("unhandled message, message instance must NodeRefResRecord, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
MetricData oneMetric; try {
while ((oneMetric = pushOne()) != null) { return getClusterContext().lookup(NodeRefResSumDayAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefResSumDayAgg.Role.INSTANCE).tell(oneMetric); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefResSumDayAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumDayAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumDayAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumHourAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumHourAgg;
import com.a.eye.skywalking.collector.worker.storage.MetricData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefResSumHourAnalysis extends AbstractNodeRefResSumAnalysis { public class NodeRefResSumHourAnalysis extends AbstractNodeRefResSumAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefResSumHourAnalysis.class);
NodeRefResSumHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumHourAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override @Override
public void analyse(Object message) throws Exception { public void analyse(Object message) throws Exception {
if (message instanceof NodeRefResRecord) { if (message instanceof NodeRefResRecord) {
NodeRefResRecord refResRecord = (NodeRefResRecord)message; NodeRefResRecord refResRecord = (NodeRefResRecord) message;
analyseResSum(refResRecord); analyseResSum(refResRecord);
} else {
logger.error("unhandled message, message instance must NodeRefResRecord, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
MetricData oneMetric; try {
while ((oneMetric = pushOne()) != null) { return getClusterContext().lookup(NodeRefResSumHourAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefResSumHourAgg.Role.INSTANCE).tell(oneMetric); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefResSumHourAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumHourAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumHourAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.analysis; package com.a.eye.skywalking.collector.worker.noderef.analysis;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig; import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumMinuteAgg; import com.a.eye.skywalking.collector.worker.noderef.persistence.NodeRefResSumMinuteAgg;
import com.a.eye.skywalking.collector.worker.storage.MetricData; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class NodeRefResSumMinuteAnalysis extends AbstractNodeRefResSumAnalysis { public class NodeRefResSumMinuteAnalysis extends AbstractNodeRefResSumAnalysis {
private Logger logger = LogManager.getFormatterLogger(NodeRefResSumMinuteAnalysis.class);
NodeRefResSumMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumMinuteAnalysis(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override @Override
public void analyse(Object message) throws Exception { public void analyse(Object message) throws Exception {
if (message instanceof NodeRefResRecord) { if (message instanceof NodeRefResRecord) {
NodeRefResRecord refResRecord = (NodeRefResRecord)message; NodeRefResRecord refResRecord = (NodeRefResRecord) message;
analyseResSum(refResRecord); analyseResSum(refResRecord);
} else {
logger.error("unhandled message, message instance must NodeRefResRecord, but is %s", message.getClass().toString());
} }
} }
@Override @Override
protected void aggregation() throws Exception { protected WorkerRefs aggWorkRefs() {
MetricData oneMetric; try {
while ((oneMetric = pushOne()) != null) { return getClusterContext().lookup(NodeRefResSumMinuteAgg.Role.INSTANCE);
getClusterContext().lookup(NodeRefResSumMinuteAgg.Role.INSTANCE).tell(oneMetric); } catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", NodeRefResSumMinuteAgg.Role.INSTANCE.roleName());
} }
return null;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumMinuteAnalysis> { public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumMinuteAnalysis> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
...@@ -30,13 +30,11 @@ public class NodeRefDayAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefDayAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeRefDaySave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefDaySave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefDayAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefDayAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
public class NodeRefDaySave extends RecordPersistenceMember { public class NodeRefDaySave extends RecordPersistenceMember {
NodeRefDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,10 +29,7 @@ public class NodeRefDaySave extends RecordPersistenceMember { ...@@ -29,10 +29,7 @@ public class NodeRefDaySave extends RecordPersistenceMember {
return NodeRefIndex.TYPE_DAY; return NodeRefIndex.TYPE_DAY;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefDaySave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefDaySave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -40,12 +37,9 @@ public class NodeRefDaySave extends RecordPersistenceMember { ...@@ -40,12 +37,9 @@ public class NodeRefDaySave extends RecordPersistenceMember {
@Override @Override
public NodeRefDaySave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefDaySave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefDaySave(role(), clusterContext, new LocalWorkerContext()); NodeRefDaySave worker = new NodeRefDaySave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefDaySave.SIZE;
} }
} }
......
...@@ -30,13 +30,11 @@ public class NodeRefHourAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefHourAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeRefHourSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefHourSave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefHourAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefHourAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
public class NodeRefHourSave extends RecordPersistenceMember { public class NodeRefHourSave extends RecordPersistenceMember {
NodeRefHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,10 +29,7 @@ public class NodeRefHourSave extends RecordPersistenceMember { ...@@ -29,10 +29,7 @@ public class NodeRefHourSave extends RecordPersistenceMember {
return NodeRefIndex.TYPE_HOUR; return NodeRefIndex.TYPE_HOUR;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefHourSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefHourSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -40,12 +37,9 @@ public class NodeRefHourSave extends RecordPersistenceMember { ...@@ -40,12 +37,9 @@ public class NodeRefHourSave extends RecordPersistenceMember {
@Override @Override
public NodeRefHourSave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefHourSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefHourSave(role(), clusterContext, new LocalWorkerContext()); NodeRefHourSave worker = new NodeRefHourSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefHourSave.SIZE;
} }
} }
......
...@@ -30,13 +30,11 @@ public class NodeRefMinuteAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefMinuteAgg extends AbstractClusterWorker {
if (message instanceof RecordData) { if (message instanceof RecordData) {
getSelfContext().lookup(NodeRefMinuteSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefMinuteSave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must RecordData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefMinuteAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefMinuteAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefIndex;
public class NodeRefMinuteSave extends RecordPersistenceMember { public class NodeRefMinuteSave extends RecordPersistenceMember {
NodeRefMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,10 +29,7 @@ public class NodeRefMinuteSave extends RecordPersistenceMember { ...@@ -29,10 +29,7 @@ public class NodeRefMinuteSave extends RecordPersistenceMember {
return NodeRefIndex.TYPE_MINUTE; return NodeRefIndex.TYPE_MINUTE;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefMinuteSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefMinuteSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -40,12 +37,9 @@ public class NodeRefMinuteSave extends RecordPersistenceMember { ...@@ -40,12 +37,9 @@ public class NodeRefMinuteSave extends RecordPersistenceMember {
@Override @Override
public NodeRefMinuteSave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefMinuteSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefMinuteSave(role(), clusterContext, new LocalWorkerContext()); NodeRefMinuteSave worker = new NodeRefMinuteSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefMinuteSave.SIZE;
} }
} }
......
...@@ -30,13 +30,11 @@ public class NodeRefResSumDayAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefResSumDayAgg extends AbstractClusterWorker {
if (message instanceof MetricData) { if (message instanceof MetricData) {
getSelfContext().lookup(NodeRefResSumDaySave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefResSumDaySave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must MetricData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumDayAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumDayAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.MetricPersistenceMember; import com.a.eye.skywalking.collector.worker.MetricPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
public class NodeRefResSumDaySave extends MetricPersistenceMember { public class NodeRefResSumDaySave extends MetricPersistenceMember {
NodeRefResSumDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumDaySave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeRefResSumDaySave extends MetricPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeRefResSumDaySave extends MetricPersistenceMember {
return NodeRefResSumIndex.TYPE_DAY; return NodeRefResSumIndex.TYPE_DAY;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumDaySave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumDaySave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeRefResSumDaySave extends MetricPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeRefResSumDaySave extends MetricPersistenceMember {
@Override @Override
public NodeRefResSumDaySave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefResSumDaySave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefResSumDaySave(role(), clusterContext, new LocalWorkerContext()); NodeRefResSumDaySave worker = new NodeRefResSumDaySave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefResSumDaySave.SIZE;
} }
} }
......
...@@ -80,7 +80,7 @@ public class NodeRefResSumGroupWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -80,7 +80,7 @@ public class NodeRefResSumGroupWithTimeSlice extends AbstractLocalSyncWorker {
JsonObject resJsonObj = (JsonObject)response; JsonObject resJsonObj = (JsonObject)response;
resJsonObj.add("result", nodeRefResSumArray); resJsonObj.add("result", nodeRefResSumArray);
} else { } else {
throw new IllegalArgumentException("message instance must be RequestEntity"); logger.error("unhandled message, message instance must NodeRefResSumGroupWithTimeSlice.RequestEntity, but is %s", request.getClass().toString());
} }
} }
...@@ -91,8 +91,6 @@ public class NodeRefResSumGroupWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -91,8 +91,6 @@ public class NodeRefResSumGroupWithTimeSlice extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumGroupWithTimeSlice> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumGroupWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -30,13 +30,11 @@ public class NodeRefResSumHourAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefResSumHourAgg extends AbstractClusterWorker {
if (message instanceof MetricData) { if (message instanceof MetricData) {
getSelfContext().lookup(NodeRefResSumHourSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefResSumHourSave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must MetricData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumHourAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumHourAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.MetricPersistenceMember; import com.a.eye.skywalking.collector.worker.MetricPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
public class NodeRefResSumHourSave extends MetricPersistenceMember { public class NodeRefResSumHourSave extends MetricPersistenceMember {
NodeRefResSumHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumHourSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeRefResSumHourSave extends MetricPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeRefResSumHourSave extends MetricPersistenceMember {
return NodeRefResSumIndex.TYPE_HOUR; return NodeRefResSumIndex.TYPE_HOUR;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumHourSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumHourSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeRefResSumHourSave extends MetricPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeRefResSumHourSave extends MetricPersistenceMember {
@Override @Override
public NodeRefResSumHourSave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefResSumHourSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefResSumHourSave(role(), clusterContext, new LocalWorkerContext()); NodeRefResSumHourSave worker = new NodeRefResSumHourSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefResSumHourSave.SIZE;
} }
} }
......
...@@ -30,13 +30,11 @@ public class NodeRefResSumMinuteAgg extends AbstractClusterWorker { ...@@ -30,13 +30,11 @@ public class NodeRefResSumMinuteAgg extends AbstractClusterWorker {
if (message instanceof MetricData) { if (message instanceof MetricData) {
getSelfContext().lookup(NodeRefResSumMinuteSave.Role.INSTANCE).tell(message); getSelfContext().lookup(NodeRefResSumMinuteSave.Role.INSTANCE).tell(message);
} else { } else {
logger.error("message unhandled"); logger.error("unhandled message, message instance must MetricData, but is %s", message.getClass().toString());
} }
} }
public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumMinuteAgg> { public static class Factory extends AbstractClusterWorkerProvider<NodeRefResSumMinuteAgg> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.noderef.persistence; package com.a.eye.skywalking.collector.worker.noderef.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector; import com.a.eye.skywalking.collector.actor.selector.HashCodeSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.MetricPersistenceMember; import com.a.eye.skywalking.collector.worker.MetricPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
/** /**
* @author pengys5 * @author pengys5
...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex; ...@@ -15,7 +15,7 @@ import com.a.eye.skywalking.collector.worker.noderef.NodeRefResSumIndex;
public class NodeRefResSumMinuteSave extends MetricPersistenceMember { public class NodeRefResSumMinuteSave extends MetricPersistenceMember {
NodeRefResSumMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, NodeRefResSumMinuteSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
...@@ -29,9 +29,7 @@ public class NodeRefResSumMinuteSave extends MetricPersistenceMember { ...@@ -29,9 +29,7 @@ public class NodeRefResSumMinuteSave extends MetricPersistenceMember {
return NodeRefResSumIndex.TYPE_MINUTE; return NodeRefResSumIndex.TYPE_MINUTE;
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<NodeRefResSumMinuteSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumMinuteSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
...@@ -39,12 +37,9 @@ public class NodeRefResSumMinuteSave extends MetricPersistenceMember { ...@@ -39,12 +37,9 @@ public class NodeRefResSumMinuteSave extends MetricPersistenceMember {
@Override @Override
public NodeRefResSumMinuteSave workerInstance(ClusterWorkerContext clusterContext) { public NodeRefResSumMinuteSave workerInstance(ClusterWorkerContext clusterContext) {
return new NodeRefResSumMinuteSave(role(), clusterContext, new LocalWorkerContext()); NodeRefResSumMinuteSave worker = new NodeRefResSumMinuteSave(role(), clusterContext, new LocalWorkerContext());
} PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
@Override
public int queueSize() {
return WorkerConfig.Queue.NodeRef.NodeRefResSumMinuteSave.SIZE;
} }
} }
......
...@@ -91,7 +91,7 @@ public class NodeRefResSumSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -91,7 +91,7 @@ public class NodeRefResSumSearchWithTimeSlice extends AbstractLocalSyncWorker {
JsonObject resJsonObj = (JsonObject)response; JsonObject resJsonObj = (JsonObject)response;
resJsonObj.add("result", nodeRefResSumArray); resJsonObj.add("result", nodeRefResSumArray);
} else { } else {
throw new IllegalArgumentException("message instance must be RequestEntity"); logger.error("unhandled message, message instance must NodeRefResSumSearchWithTimeSlice.RequestEntity, but is %s", request.getClass().toString());
} }
} }
...@@ -102,8 +102,6 @@ public class NodeRefResSumSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -102,8 +102,6 @@ public class NodeRefResSumSearchWithTimeSlice extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumSearchWithTimeSlice> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefResSumSearchWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -63,7 +63,7 @@ public class NodeRefSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -63,7 +63,7 @@ public class NodeRefSearchWithTimeSlice extends AbstractLocalSyncWorker {
JsonObject resJsonObj = (JsonObject)response; JsonObject resJsonObj = (JsonObject)response;
resJsonObj.add("result", nodeRefArray); resJsonObj.add("result", nodeRefArray);
} else { } else {
throw new IllegalArgumentException("message instance must be RequestEntity"); logger.error("unhandled message, message instance must NodeRefSearchWithTimeSlice.RequestEntity, but is %s", request.getClass().toString());
} }
} }
...@@ -74,8 +74,6 @@ public class NodeRefSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -74,8 +74,6 @@ public class NodeRefSearchWithTimeSlice extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefSearchWithTimeSlice> { public static class Factory extends AbstractLocalSyncWorkerProvider<NodeRefSearchWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.segment; package com.a.eye.skywalking.collector.worker.segment;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -28,32 +29,35 @@ public class SegmentCostIndex extends AbstractIndex { ...@@ -28,32 +29,35 @@ public class SegmentCostIndex extends AbstractIndex {
return true; return true;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.SegmentCostIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
return XContentFactory.jsonBuilder() return XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(SEG_ID) .startObject(SEG_ID)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(START_TIME)
.startObject(START_TIME) .field("type", "long")
.field("type", "long") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(END_TIME)
.startObject(END_TIME) .field("type", "long")
.field("type", "long") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(OPERATION_NAME)
.startObject(OPERATION_NAME) .field("type", "keyword")
.field("type", "string") .endObject()
.field("index", "not_analyzed") .startObject(COST)
.endObject() .field("type", "long")
.startObject(COST) .field("index", "not_analyzed")
.field("type", "long") .endObject()
.field("index", "not_analyzed") .endObject()
.endObject() .endObject();
.endObject()
.endObject();
} }
} }
package com.a.eye.skywalking.collector.worker.segment; package com.a.eye.skywalking.collector.worker.segment;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -25,25 +26,28 @@ public class SegmentExceptionIndex extends AbstractIndex { ...@@ -25,25 +26,28 @@ public class SegmentExceptionIndex extends AbstractIndex {
return true; return true;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.SegmentExceptionIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() XContentBuilder mappingBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject(SEG_ID) .startObject(SEG_ID)
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(IS_ERROR)
.startObject(IS_ERROR) .field("type", "boolean")
.field("type", "boolean") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject(ERROR_KIND)
.startObject(ERROR_KIND) .field("type", "keyword")
.field("type", "string") .endObject()
.field("index", "not_analyzed") .endObject()
.endObject() .endObject();
.endObject()
.endObject();
return mappingBuilder; return mappingBuilder;
} }
} }
package com.a.eye.skywalking.collector.worker.segment; package com.a.eye.skywalking.collector.worker.segment;
import com.a.eye.skywalking.collector.worker.config.EsConfig;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
...@@ -23,40 +24,43 @@ public class SegmentIndex extends AbstractIndex { ...@@ -23,40 +24,43 @@ public class SegmentIndex extends AbstractIndex {
return true; return true;
} }
@Override
public int refreshInterval() {
return EsConfig.Es.Index.RefreshInterval.SegmentIndex.VALUE;
}
@Override @Override
public XContentBuilder createMappingBuilder() throws IOException { public XContentBuilder createMappingBuilder() throws IOException {
return XContentFactory.jsonBuilder() return XContentFactory.jsonBuilder()
.startObject() .startObject()
.startObject("properties") .startObject("properties")
.startObject("traceSegmentId") .startObject("traceSegmentId")
.field("type", "string") .field("type", "keyword")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject("startTime")
.startObject("startTime") .field("type", "date")
.field("type", "date") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject("endTime")
.startObject("endTime") .field("type", "date")
.field("type", "date") .field("index", "not_analyzed")
.field("index", "not_analyzed") .endObject()
.endObject() .startObject("applicationCode")
.startObject("applicationCode") .field("type", "keyword")
.field("type", "string") .endObject()
.field("index", "not_analyzed") .startObject("minute")
.endObject() .field("type", "long")
.startObject("minute") .field("index", "not_analyzed")
.field("type", "long") .endObject()
.field("index", "not_analyzed") .startObject("hour")
.endObject() .field("type", "long")
.startObject("hour") .field("index", "not_analyzed")
.field("type", "long") .endObject()
.field("index", "not_analyzed") .startObject("day")
.endObject() .field("type", "long")
.startObject("day") .field("index", "not_analyzed")
.field("type", "long") .endObject()
.field("index", "not_analyzed") .endObject()
.endObject() .endObject();
.endObject()
.endObject();
} }
} }
...@@ -18,10 +18,10 @@ import com.a.eye.skywalking.collector.worker.node.analysis.NodeMappingMinuteAnal ...@@ -18,10 +18,10 @@ import com.a.eye.skywalking.collector.worker.node.analysis.NodeMappingMinuteAnal
import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefDayAnalysis; import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefDayAnalysis;
import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefHourAnalysis; import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefHourAnalysis;
import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefMinuteAnalysis; import com.a.eye.skywalking.collector.worker.noderef.analysis.NodeRefMinuteAnalysis;
import com.a.eye.skywalking.collector.worker.segment.analysis.SegmentAnalysis;
import com.a.eye.skywalking.collector.worker.segment.analysis.SegmentCostAnalysis;
import com.a.eye.skywalking.collector.worker.segment.analysis.SegmentExceptionAnalysis;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentCostSave;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentExceptionSave;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentSave;
import com.a.eye.skywalking.collector.worker.storage.AbstractTimeSlice; import com.a.eye.skywalking.collector.worker.storage.AbstractTimeSlice;
import com.a.eye.skywalking.collector.worker.tools.DateTools; import com.a.eye.skywalking.collector.worker.tools.DateTools;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
...@@ -41,16 +41,16 @@ public class SegmentPost extends AbstractPost { ...@@ -41,16 +41,16 @@ public class SegmentPost extends AbstractPost {
public void preStart() throws ProviderNotFoundException { public void preStart() throws ProviderNotFoundException {
getClusterContext().findProvider(GlobalTraceAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(GlobalTraceAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeCompAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(SegmentAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(SegmentCostAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(SegmentSave.Role.INSTANCE).create(this); getClusterContext().findProvider(SegmentExceptionAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(SegmentCostSave.Role.INSTANCE).create(this);
getClusterContext().findProvider(SegmentExceptionSave.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeRefMinuteAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeRefMinuteAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeRefHourAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeRefHourAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeRefDayAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeRefDayAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeCompAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeMappingDayAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeMappingDayAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeMappingHourAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeMappingHourAnalysis.Role.INSTANCE).create(this);
getClusterContext().findProvider(NodeMappingMinuteAnalysis.Role.INSTANCE).create(this); getClusterContext().findProvider(NodeMappingMinuteAnalysis.Role.INSTANCE).create(this);
...@@ -75,11 +75,11 @@ public class SegmentPost extends AbstractPost { ...@@ -75,11 +75,11 @@ public class SegmentPost extends AbstractPost {
logger.debug("minuteSlice: %s, hourSlice: %s, daySlice: %s, second:%s", minuteSlice, hourSlice, daySlice, second); logger.debug("minuteSlice: %s, hourSlice: %s, daySlice: %s, second:%s", minuteSlice, hourSlice, daySlice, second);
SegmentWithTimeSlice segmentWithTimeSlice = new SegmentWithTimeSlice(segment, minuteSlice, hourSlice, daySlice, second); SegmentWithTimeSlice segmentWithTimeSlice = new SegmentWithTimeSlice(segment, minuteSlice, hourSlice, daySlice, second);
getSelfContext().lookup(SegmentSave.Role.INSTANCE).tell(segment); getSelfContext().lookup(SegmentAnalysis.Role.INSTANCE).tell(segment);
getSelfContext().lookup(SegmentCostSave.Role.INSTANCE).tell(segmentWithTimeSlice); getSelfContext().lookup(SegmentCostAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice);
getSelfContext().lookup(GlobalTraceAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice); getSelfContext().lookup(GlobalTraceAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice);
getSelfContext().lookup(SegmentExceptionSave.Role.INSTANCE).tell(segmentWithTimeSlice); getSelfContext().lookup(SegmentExceptionAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice);
getSelfContext().lookup(NodeCompAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice); getSelfContext().lookup(NodeCompAnalysis.Role.INSTANCE).tell(segmentWithTimeSlice);
...@@ -110,8 +110,6 @@ public class SegmentPost extends AbstractPost { ...@@ -110,8 +110,6 @@ public class SegmentPost extends AbstractPost {
} }
public static class Factory extends AbstractPostProvider<SegmentPost> { public static class Factory extends AbstractPostProvider<SegmentPost> {
public static Factory INSTANCE = new Factory();
@Override @Override
public String servletPath() { public String servletPath() {
return "/segments"; return "/segments";
......
...@@ -62,8 +62,6 @@ public class SegmentTopGetWithGlobalTraceId extends AbstractGet { ...@@ -62,8 +62,6 @@ public class SegmentTopGetWithGlobalTraceId extends AbstractGet {
} }
public static class Factory extends AbstractGetProvider<SegmentTopGetWithGlobalTraceId> { public static class Factory extends AbstractGetProvider<SegmentTopGetWithGlobalTraceId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -86,8 +86,6 @@ public class SegmentTopGetWithTimeSlice extends AbstractGet { ...@@ -86,8 +86,6 @@ public class SegmentTopGetWithTimeSlice extends AbstractGet {
} }
public static class Factory extends AbstractGetProvider<SegmentTopGetWithTimeSlice> { public static class Factory extends AbstractGetProvider<SegmentTopGetWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.segment.analysis;
import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordAnalysisMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentSave;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* @author pengys5
*/
public class SegmentAnalysis extends RecordAnalysisMember {
private Logger logger = LogManager.getFormatterLogger(SegmentAnalysis.class);
SegmentAnalysis(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
}
@Override
public void preStart() throws ProviderNotFoundException {
getClusterContext().findProvider(SegmentSave.Role.INSTANCE).create(this);
}
@Override
public void analyse(Object message) throws Exception {
if (message instanceof Segment) {
Segment segment = (Segment) message;
getSelfContext().lookup(SegmentSave.Role.INSTANCE).tell(segment);
} else {
logger.error("unhandled message, message instance must Segment, but is %s", message.getClass().toString());
}
}
@Override
protected WorkerRefs aggWorkRefs() {
return null;
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentAnalysis> {
@Override
public Role role() {
return SegmentAnalysis.Role.INSTANCE;
}
@Override
public SegmentAnalysis workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentAnalysis(role(), clusterContext, new LocalWorkerContext());
}
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentAnalysis.SIZE;
}
}
public enum Role implements com.a.eye.skywalking.collector.actor.Role {
INSTANCE;
@Override
public String roleName() {
return SegmentAnalysis.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new RollingSelector();
}
}
}
package com.a.eye.skywalking.collector.worker.segment.analysis;
import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordAnalysisMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.SegmentCostIndex;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.entity.Span;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentCostSave;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* @author pengys5
*/
public class SegmentCostAnalysis extends RecordAnalysisMember {
private Logger logger = LogManager.getFormatterLogger(SegmentCostAnalysis.class);
SegmentCostAnalysis(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
}
@Override
public void preStart() throws ProviderNotFoundException {
getClusterContext().findProvider(SegmentCostSave.Role.INSTANCE).create(this);
}
@Override
public void analyse(Object message) throws Exception {
if (message instanceof SegmentPost.SegmentWithTimeSlice) {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment();
if (CollectionTools.isNotEmpty(segment.getSpans())) {
for (Span span : segment.getSpans()) {
if (span.getParentSpanId() == -1) {
JsonObject dataJsonObj = new JsonObject();
dataJsonObj.addProperty(SegmentCostIndex.SEG_ID, segment.getTraceSegmentId());
dataJsonObj.addProperty(SegmentCostIndex.START_TIME, span.getStartTime());
dataJsonObj.addProperty(SegmentCostIndex.END_TIME, span.getEndTime());
dataJsonObj.addProperty(SegmentCostIndex.OPERATION_NAME, span.getOperationName());
dataJsonObj.addProperty(SegmentCostIndex.TIME_SLICE, segmentWithTimeSlice.getMinute());
long startTime = span.getStartTime();
long endTime = span.getEndTime();
long cost = endTime - startTime;
if (cost == 0) {
cost = 1;
}
dataJsonObj.addProperty(SegmentCostIndex.COST, cost);
set(segment.getTraceSegmentId(), dataJsonObj);
}
}
}
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
}
}
@Override
protected WorkerRefs aggWorkRefs() {
try {
return getSelfContext().lookup(SegmentCostSave.Role.INSTANCE);
} catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", SegmentCostSave.Role.INSTANCE.roleName());
}
return null;
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentCostAnalysis> {
@Override
public Role role() {
return SegmentCostAnalysis.Role.INSTANCE;
}
@Override
public SegmentCostAnalysis workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentCostAnalysis(role(), clusterContext, new LocalWorkerContext());
}
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentCostAnalysis.SIZE;
}
}
public enum Role implements com.a.eye.skywalking.collector.actor.Role {
INSTANCE;
@Override
public String roleName() {
return SegmentCostAnalysis.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new RollingSelector();
}
}
}
package com.a.eye.skywalking.collector.worker.segment.analysis;
import com.a.eye.skywalking.collector.actor.*;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordAnalysisMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.SegmentExceptionIndex;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.LogData;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.entity.Span;
import com.a.eye.skywalking.collector.worker.segment.entity.tag.Tags;
import com.a.eye.skywalking.collector.worker.segment.persistence.SegmentExceptionSave;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
/**
* @author pengys5
*/
public class SegmentExceptionAnalysis extends RecordAnalysisMember {
private Logger logger = LogManager.getFormatterLogger(SegmentExceptionAnalysis.class);
SegmentExceptionAnalysis(Role role, ClusterWorkerContext clusterContext, LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext);
}
@Override
public void preStart() throws ProviderNotFoundException {
getClusterContext().findProvider(SegmentExceptionSave.Role.INSTANCE).create(this);
}
@Override
public void analyse(Object message) throws Exception {
if (message instanceof SegmentPost.SegmentWithTimeSlice) {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice) message;
Segment segment = segmentWithTimeSlice.getSegment();
if (CollectionTools.isNotEmpty(segment.getSpans())) {
for (Span span : segment.getSpans()) {
boolean isError = Tags.ERROR.get(span);
JsonObject dataJsonObj = new JsonObject();
dataJsonObj.addProperty(SegmentExceptionIndex.IS_ERROR, isError);
dataJsonObj.addProperty(SegmentExceptionIndex.SEG_ID, segment.getTraceSegmentId());
JsonArray errorKind = new JsonArray();
if (isError) {
List<LogData> logDataList = span.getLogs();
for (LogData logData : logDataList) {
if (logData.getFields().containsKey("error.kind")) {
errorKind.add(String.valueOf(logData.getFields().get("error.kind")));
}
}
}
dataJsonObj.add(SegmentExceptionIndex.ERROR_KIND, errorKind);
set(segment.getTraceSegmentId(), dataJsonObj);
}
}
} else {
logger.error("unhandled message, message instance must SegmentPost.SegmentWithTimeSlice, but is %s", message.getClass().toString());
}
}
@Override
protected WorkerRefs aggWorkRefs() {
try {
return getSelfContext().lookup(SegmentExceptionSave.Role.INSTANCE);
} catch (WorkerNotFoundException e) {
logger.error("The role of %s worker not found", SegmentExceptionSave.Role.INSTANCE.roleName());
}
return null;
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentExceptionAnalysis> {
@Override
public Role role() {
return SegmentExceptionAnalysis.Role.INSTANCE;
}
@Override
public SegmentExceptionAnalysis workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentExceptionAnalysis(role(), clusterContext, new LocalWorkerContext());
}
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentExceptionAnalysis.SIZE;
}
}
public enum Role implements com.a.eye.skywalking.collector.actor.Role {
INSTANCE;
@Override
public String roleName() {
return SegmentExceptionAnalysis.class.getSimpleName();
}
@Override
public WorkerSelector workerSelector() {
return new RollingSelector();
}
}
}
...@@ -40,7 +40,7 @@ public class TraceSegmentRef extends DeserializeObject { ...@@ -40,7 +40,7 @@ public class TraceSegmentRef extends DeserializeObject {
reader.beginObject(); reader.beginObject();
while (reader.hasNext()) { while (reader.hasNext()) {
switch (reader.nextName()) { switch (reader.nextName()) {
case "rs": case "ts":
String ts = reader.nextString(); String ts = reader.nextString();
this.traceSegmentId = ts; this.traceSegmentId = ts;
JsonBuilder.INSTANCE.append(stringBuilder, "ts", ts, first); JsonBuilder.INSTANCE.append(stringBuilder, "ts", ts, first);
......
package com.a.eye.skywalking.collector.worker.segment.persistence; package com.a.eye.skywalking.collector.worker.segment.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.SegmentCostIndex; import com.a.eye.skywalking.collector.worker.segment.SegmentCostIndex;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost; import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.entity.Span;
import com.a.eye.skywalking.collector.worker.storage.RecordData;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class SegmentCostSave extends RecordPersistenceMember { public class SegmentCostSave extends RecordPersistenceMember {
private Logger logger = LogManager.getFormatterLogger(SegmentCostSave.class);
@Override @Override
public String esIndex() { public String esIndex() {
return SegmentCostIndex.INDEX; return SegmentCostIndex.INDEX;
...@@ -35,61 +24,21 @@ public class SegmentCostSave extends RecordPersistenceMember { ...@@ -35,61 +24,21 @@ public class SegmentCostSave extends RecordPersistenceMember {
} }
protected SegmentCostSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, protected SegmentCostSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentCostSave> {
public void analyse(Object message) throws Exception {
if (message instanceof SegmentPost.SegmentWithTimeSlice) {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice)message;
Segment segment = segmentWithTimeSlice.getSegment();
if (CollectionTools.isNotEmpty(segment.getSpans())) {
for (Span span : segment.getSpans()) {
if (span.getParentSpanId() == -1) {
JsonObject dataJsonObj = new JsonObject();
dataJsonObj.addProperty(SegmentCostIndex.SEG_ID, segment.getTraceSegmentId());
dataJsonObj.addProperty(SegmentCostIndex.START_TIME, span.getStartTime());
dataJsonObj.addProperty(SegmentCostIndex.END_TIME, span.getEndTime());
dataJsonObj.addProperty(SegmentCostIndex.OPERATION_NAME, span.getOperationName());
dataJsonObj.addProperty(SegmentCostIndex.TIME_SLICE, segmentWithTimeSlice.getMinute());
long startTime = span.getStartTime();
long endTime = span.getEndTime();
long cost = endTime - startTime;
if (cost == 0) {
cost = 1;
}
dataJsonObj.addProperty(SegmentCostIndex.COST, cost);
RecordData recordData = new RecordData(segment.getTraceSegmentId());
recordData.setRecord(dataJsonObj);
super.analyse(recordData);
}
}
}
} else {
logger.error("unhandled message, message instance must JsonObject, but is %s", message.getClass().toString());
}
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentCostSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
} }
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentCostSave.SIZE;
}
@Override @Override
public SegmentCostSave workerInstance(ClusterWorkerContext clusterContext) { public SegmentCostSave workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentCostSave(role(), clusterContext, new LocalWorkerContext()); SegmentCostSave worker = new SegmentCostSave(role(), clusterContext, new LocalWorkerContext());
PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
} }
} }
......
package com.a.eye.skywalking.collector.worker.segment.persistence; package com.a.eye.skywalking.collector.worker.segment.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.RecordPersistenceMember;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.SegmentExceptionIndex; import com.a.eye.skywalking.collector.worker.segment.SegmentExceptionIndex;
import com.a.eye.skywalking.collector.worker.segment.SegmentPost;
import com.a.eye.skywalking.collector.worker.segment.entity.LogData;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.entity.Span;
import com.a.eye.skywalking.collector.worker.segment.entity.tag.Tags;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import com.a.eye.skywalking.collector.worker.storage.RecordData; import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.List;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class SegmentExceptionSave extends RecordPersistenceMember { public class SegmentExceptionSave extends RecordPersistenceMember {
private Logger logger = LogManager.getFormatterLogger(SegmentExceptionSave.class);
@Override @Override
public String esIndex() { public String esIndex() {
return SegmentExceptionIndex.INDEX; return SegmentExceptionIndex.INDEX;
...@@ -41,61 +25,21 @@ public class SegmentExceptionSave extends RecordPersistenceMember { ...@@ -41,61 +25,21 @@ public class SegmentExceptionSave extends RecordPersistenceMember {
} }
protected SegmentExceptionSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext, protected SegmentExceptionSave(com.a.eye.skywalking.collector.actor.Role role, ClusterWorkerContext clusterContext,
LocalWorkerContext selfContext) { LocalWorkerContext selfContext) {
super(role, clusterContext, selfContext); super(role, clusterContext, selfContext);
} }
@Override public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentExceptionSave> {
public void analyse(Object message) throws Exception {
if (message instanceof SegmentPost.SegmentWithTimeSlice) {
SegmentPost.SegmentWithTimeSlice segmentWithTimeSlice = (SegmentPost.SegmentWithTimeSlice)message;
Segment segment = segmentWithTimeSlice.getSegment();
if (CollectionTools.isNotEmpty(segment.getSpans())) {
for (Span span : segment.getSpans()) {
boolean isError = Tags.ERROR.get(span);
JsonObject dataJsonObj = new JsonObject();
dataJsonObj.addProperty(SegmentExceptionIndex.IS_ERROR, isError);
dataJsonObj.addProperty(SegmentExceptionIndex.SEG_ID, segment.getTraceSegmentId());
JsonArray errorKind = new JsonArray();
if (isError) {
List<LogData> logDataList = span.getLogs();
for (LogData logData : logDataList) {
if (logData.getFields().containsKey("error.kind")) {
errorKind.add(String.valueOf(logData.getFields().get("error.kind")));
}
}
}
dataJsonObj.add(SegmentExceptionIndex.ERROR_KIND, errorKind);
RecordData recordData = new RecordData(segment.getTraceSegmentId());
recordData.setRecord(dataJsonObj);
super.analyse(recordData);
}
}
} else {
logger.error("unhandled message, message instance must JsonObject, but is %s", message.getClass().toString());
}
}
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentExceptionSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
} }
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentExceptionSave.SIZE;
}
@Override @Override
public SegmentExceptionSave workerInstance(ClusterWorkerContext clusterContext) { public SegmentExceptionSave workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentExceptionSave(role(), clusterContext, new LocalWorkerContext()); SegmentExceptionSave worker = new SegmentExceptionSave(role(), clusterContext, new LocalWorkerContext());
PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
} }
} }
......
...@@ -42,8 +42,6 @@ public class SegmentExceptionWithSegId extends AbstractLocalSyncWorker { ...@@ -42,8 +42,6 @@ public class SegmentExceptionWithSegId extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentExceptionWithSegId> { public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentExceptionWithSegId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
package com.a.eye.skywalking.collector.worker.segment.persistence; package com.a.eye.skywalking.collector.worker.segment.persistence;
import com.a.eye.skywalking.collector.actor.AbstractLocalAsyncWorkerProvider; import com.a.eye.skywalking.collector.actor.AbstractLocalSyncWorkerProvider;
import com.a.eye.skywalking.collector.actor.ClusterWorkerContext; import com.a.eye.skywalking.collector.actor.ClusterWorkerContext;
import com.a.eye.skywalking.collector.actor.LocalWorkerContext; import com.a.eye.skywalking.collector.actor.LocalWorkerContext;
import com.a.eye.skywalking.collector.actor.selector.RollingSelector; import com.a.eye.skywalking.collector.actor.selector.RollingSelector;
import com.a.eye.skywalking.collector.actor.selector.WorkerSelector; import com.a.eye.skywalking.collector.actor.selector.WorkerSelector;
import com.a.eye.skywalking.collector.worker.RecordPersistenceMember; import com.a.eye.skywalking.collector.worker.PersistenceMember;
import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig; import com.a.eye.skywalking.collector.worker.config.CacheSizeConfig;
import com.a.eye.skywalking.collector.worker.config.WorkerConfig;
import com.a.eye.skywalking.collector.worker.segment.SegmentIndex; import com.a.eye.skywalking.collector.worker.segment.SegmentIndex;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.storage.AbstractIndex; import com.a.eye.skywalking.collector.worker.storage.AbstractIndex;
import com.a.eye.skywalking.collector.worker.storage.EsClient; import com.a.eye.skywalking.collector.worker.storage.EsClient;
import org.apache.logging.log4j.LogManager; import com.a.eye.skywalking.collector.worker.storage.PersistenceWorkerListener;
import org.apache.logging.log4j.Logger; import com.a.eye.skywalking.collector.worker.storage.SegmentData;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import com.a.eye.skywalking.collector.worker.storage.SegmentPersistenceData;
import org.elasticsearch.action.bulk.BulkResponse; import java.util.LinkedList;
import org.elasticsearch.client.Client; import java.util.List;
import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
/** /**
* @author pengys5 * @author pengys5
*/ */
public class SegmentSave extends RecordPersistenceMember { public class SegmentSave extends PersistenceMember<SegmentPersistenceData, SegmentData> {
private Logger logger = LogManager.getFormatterLogger(SegmentSave.class);
private Map<String, String> persistenceData = new LinkedHashMap<>();
@Override @Override
public String esIndex() { public String esIndex() {
...@@ -46,56 +41,60 @@ public class SegmentSave extends RecordPersistenceMember { ...@@ -46,56 +41,60 @@ public class SegmentSave extends RecordPersistenceMember {
} }
@Override @Override
public void analyse(Object message) throws Exception { public SegmentPersistenceData initializeData() {
return new SegmentPersistenceData();
}
@Override
final public void analyse(Object message) throws Exception {
if (message instanceof Segment) { if (message instanceof Segment) {
Segment segment = (Segment) message; Segment segment = (Segment) message;
persistenceData.put(segment.getTraceSegmentId(), segment.getJsonStr()); SegmentPersistenceData data = getPersistenceData();
if (persistenceData.size() >= CacheSizeConfig.Cache.Persistence.SIZE) { data.hold();
persistence(); data.getOrCreate(segment.getTraceSegmentId()).setSegmentStr(segment.getJsonStr());
if (data.size() >= CacheSizeConfig.Cache.Persistence.SIZE) {
persistence(data.asMap());
} }
data.release();
} else { } else {
logger.error("unhandled message, message instance must JsonObject, but is %s", message.getClass().toString()); logger().error("unhandled message, message instance must Segment, but is %s", message.getClass().toString());
}
}
@Override
protected void persistence() {
boolean success = saveToEs();
if (success) {
persistenceData.clear();
} }
} }
private boolean saveToEs() { private void persistence(Map<String, SegmentData> dataMap) {
List<IndexRequestBuilder> builderList = new LinkedList<>();
Client client = EsClient.INSTANCE.getClient(); Client client = EsClient.INSTANCE.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk(); dataMap.forEach((key, value) -> {
logger.debug("persistenceData SIZE: %s", persistenceData.size()); IndexRequestBuilder builder = client.prepareIndex(esIndex(), esType(), key).setSource(value.getSegmentStr());
builderList.add(builder);
});
EsClient.INSTANCE.bulk(builderList);
dataMap.clear();
}
persistenceData.forEach((key, value) -> bulkRequest.add(client.prepareIndex(esIndex(), esType(), key).setSource(value))); @Override
final protected void prepareIndex(List<IndexRequestBuilder> builderList) {
Map<String, SegmentData> lastData = getPersistenceData().getLast().asMap();
BulkResponse bulkResponse = bulkRequest.execute().actionGet(); Client client = EsClient.INSTANCE.getClient();
if (bulkResponse.hasFailures()) { lastData.forEach((key, value) -> {
logger.error(bulkResponse.buildFailureMessage()); IndexRequestBuilder builder = client.prepareIndex(esIndex(), esType(), key).setSource(value.getSegmentStr());
} builderList.add(builder);
return !bulkResponse.hasFailures(); });
lastData.clear();
} }
public static class Factory extends AbstractLocalAsyncWorkerProvider<SegmentSave> { public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentSave> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return Role.INSTANCE; return Role.INSTANCE;
} }
@Override
public int queueSize() {
return WorkerConfig.Queue.Segment.SegmentSave.SIZE;
}
@Override @Override
public SegmentSave workerInstance(ClusterWorkerContext clusterContext) { public SegmentSave workerInstance(ClusterWorkerContext clusterContext) {
return new SegmentSave(role(), clusterContext, new LocalWorkerContext()); SegmentSave worker = new SegmentSave(role(), clusterContext, new LocalWorkerContext());
PersistenceWorkerListener.INSTANCE.register(worker);
return worker;
} }
} }
......
...@@ -11,7 +11,7 @@ import com.a.eye.skywalking.collector.worker.segment.entity.GlobalTraceId; ...@@ -11,7 +11,7 @@ import com.a.eye.skywalking.collector.worker.segment.entity.GlobalTraceId;
import com.a.eye.skywalking.collector.worker.segment.entity.Segment; import com.a.eye.skywalking.collector.worker.segment.entity.Segment;
import com.a.eye.skywalking.collector.worker.segment.entity.SegmentDeserialize; import com.a.eye.skywalking.collector.worker.segment.entity.SegmentDeserialize;
import com.a.eye.skywalking.collector.worker.storage.EsClient; import com.a.eye.skywalking.collector.worker.storage.EsClient;
import com.a.eye.skywalking.collector.worker.storage.MergeData; import com.a.eye.skywalking.collector.worker.storage.JoinAndSplitData;
import com.a.eye.skywalking.collector.worker.tools.CollectionTools; import com.a.eye.skywalking.collector.worker.tools.CollectionTools;
import com.google.gson.Gson; import com.google.gson.Gson;
import com.google.gson.JsonArray; import com.google.gson.JsonArray;
...@@ -55,7 +55,7 @@ public class SegmentTopSearchWithGlobalTraceId extends AbstractLocalSyncWorker { ...@@ -55,7 +55,7 @@ public class SegmentTopSearchWithGlobalTraceId extends AbstractLocalSyncWorker {
if (globalTraceObj != null && globalTraceObj.has(GlobalTraceIndex.SUB_SEG_IDS)) { if (globalTraceObj != null && globalTraceObj.has(GlobalTraceIndex.SUB_SEG_IDS)) {
String subSegIdsStr = globalTraceObj.get(GlobalTraceIndex.SUB_SEG_IDS).getAsString(); String subSegIdsStr = globalTraceObj.get(GlobalTraceIndex.SUB_SEG_IDS).getAsString();
String[] subSegIds = subSegIdsStr.split(MergeData.SPLIT); String[] subSegIds = subSegIdsStr.split(JoinAndSplitData.SPLIT);
topSegPaging.addProperty("recordsTotal", subSegIds.length); topSegPaging.addProperty("recordsTotal", subSegIds.length);
...@@ -138,8 +138,6 @@ public class SegmentTopSearchWithGlobalTraceId extends AbstractLocalSyncWorker { ...@@ -138,8 +138,6 @@ public class SegmentTopSearchWithGlobalTraceId extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentTopSearchWithGlobalTraceId> { public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentTopSearchWithGlobalTraceId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -168,8 +168,6 @@ public class SegmentTopSearchWithTimeSlice extends AbstractLocalSyncWorker { ...@@ -168,8 +168,6 @@ public class SegmentTopSearchWithTimeSlice extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentTopSearchWithTimeSlice> { public static class Factory extends AbstractLocalSyncWorkerProvider<SegmentTopSearchWithTimeSlice> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -48,8 +48,6 @@ public class SpanGetWithId extends AbstractGet { ...@@ -48,8 +48,6 @@ public class SpanGetWithId extends AbstractGet {
} }
public static class Factory extends AbstractGetProvider<SpanGetWithId> { public static class Factory extends AbstractGetProvider<SpanGetWithId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -68,8 +68,6 @@ public class SpanSearchWithId extends AbstractLocalSyncWorker { ...@@ -68,8 +68,6 @@ public class SpanSearchWithId extends AbstractLocalSyncWorker {
} }
public static class Factory extends AbstractLocalSyncWorkerProvider<SpanSearchWithId> { public static class Factory extends AbstractLocalSyncWorkerProvider<SpanSearchWithId> {
public static Factory INSTANCE = new Factory();
@Override @Override
public Role role() { public Role role() {
return WorkerRole.INSTANCE; return WorkerRole.INSTANCE;
......
...@@ -30,12 +30,15 @@ public abstract class AbstractIndex { ...@@ -30,12 +30,15 @@ public abstract class AbstractIndex {
final XContentBuilder createSettingBuilder() throws IOException { final XContentBuilder createSettingBuilder() throws IOException {
return XContentFactory.jsonBuilder() return XContentFactory.jsonBuilder()
.startObject() .startObject()
.field("index.number_of_shards", EsConfig.Es.Index.Shards.NUMBER) .field("index.number_of_shards", EsConfig.Es.Index.Shards.NUMBER)
.field("index.number_of_replicas", EsConfig.Es.Index.Replicas.NUMBER) .field("index.number_of_replicas", EsConfig.Es.Index.Replicas.NUMBER)
.endObject(); .field("index.refresh_interval", String.valueOf(refreshInterval()) + "s")
.endObject();
} }
public abstract int refreshInterval();
public abstract boolean isRecord(); public abstract boolean isRecord();
public abstract XContentBuilder createMappingBuilder() throws IOException; public abstract XContentBuilder createMappingBuilder() throws IOException;
......
package com.a.eye.skywalking.collector.worker.storage;
import java.util.Map;
/**
* @author pengys5
*/
public interface Data {
String getId();
void merge(Map<String, ?> dbData);
}
...@@ -5,6 +5,9 @@ import org.apache.logging.log4j.LogManager; ...@@ -5,6 +5,9 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress;
...@@ -77,4 +80,21 @@ public enum EsClient { ...@@ -77,4 +80,21 @@ public enum EsClient {
this.port = port; this.port = port;
} }
} }
public void bulk(List<IndexRequestBuilder> dataList) {
Client client = EsClient.INSTANCE.getClient();
BulkRequestBuilder bulkRequest = client.prepareBulk();
logger.info("bulk data size: %s", dataList.size());
if (dataList.size() > 0) {
for (IndexRequestBuilder builder : dataList) {
bulkRequest.add(builder);
}
BulkResponse bulkResponse = bulkRequest.execute().actionGet();
if (bulkResponse.hasFailures()) {
logger.error(bulkResponse.buildFailureMessage());
}
}
}
} }
package com.a.eye.skywalking.collector.worker.storage;
/**
* @author pengys5
*/
public class FlushAndSwitch {
}
package com.a.eye.skywalking.collector.worker.storage;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author pengys5
*/
public class JoinAndSplitAnalysisData {
private WindowData<JoinAndSplitData> windowData = new WindowData(new LinkedHashMap<String, JoinAndSplitData>());
public JoinAndSplitData getOrCreate(String id) {
if (!windowData.containsKey(id)) {
windowData.put(id, new JoinAndSplitData(id));
}
return windowData.get(id);
}
public Map<String, JoinAndSplitData> asMap() {
return windowData.asMap();
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册