提交 b4f455cc 编写于 作者: G gaohongtao

Merge branch 'master' of https://github.com/apache/incubator-skywalking into apache-master

......@@ -10,3 +10,4 @@ target/
packages/
**/dependency-reduced-pom.xml
/skywalking-agent/
/dist/
......@@ -59,13 +59,4 @@
</plugin>
</plugins>
</build>
<distributionManagement>
<repository>
<id>bintray-wu-sheng-sky-walking-repository</id>
<name>wu-sheng-sky-walking-repository</name>
<url>https://api.bintray.com/maven/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-log4j-1.x/;publish=1
</url>
</repository>
</distributionManagement>
</project>
......@@ -60,12 +60,4 @@
</plugins>
</build>
<distributionManagement>
<repository>
<id>bintray-wu-sheng-sky-walking-repository</id>
<name>wu-sheng-sky-walking-repository</name>
<url>https://api.bintray.com/maven/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-log4j-2.x/;publish=1
</url>
</repository>
</distributionManagement>
</project>
......@@ -59,13 +59,4 @@
</plugin>
</plugins>
</build>
<distributionManagement>
<repository>
<id>bintray-wu-sheng-sky-walking-repository</id>
<name>wu-sheng-sky-walking-repository</name>
<url>https://api.bintray.com/maven/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-logback-1.x/;publish=1
</url>
</repository>
</distributionManagement>
</project>
......@@ -42,12 +42,5 @@
</dependency>
</dependencies>
<distributionManagement>
<repository>
<id>bintray-wu-sheng-sky-walking-repository</id>
<name>wu-sheng-sky-walking-repository</name>
<url>https://api.bintray.com/maven/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-opentracing/;publish=1
</url>
</repository>
</distributionManagement>
</project>
......@@ -52,13 +52,4 @@
</plugins>
</build>
<distributionManagement>
<repository>
<id>bintray-wu-sheng-sky-walking-repository</id>
<name>wu-sheng-sky-walking-repository</name>
<url>
https://api.bintray.com/maven/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-trace/;publish=1
</url>
</repository>
</distributionManagement>
</project>
......@@ -25,6 +25,8 @@ import org.apache.skywalking.apm.collector.analysis.jvm.define.service.ICpuMetri
import org.apache.skywalking.apm.collector.analysis.jvm.define.service.IGCMetricService;
import org.apache.skywalking.apm.collector.analysis.jvm.define.service.IMemoryMetricService;
import org.apache.skywalking.apm.collector.analysis.jvm.define.service.IMemoryPoolMetricService;
import org.apache.skywalking.apm.collector.analysis.metric.define.AnalysisMetricModule;
import org.apache.skywalking.apm.collector.analysis.metric.define.service.IInstanceHeartBeatService;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
import org.apache.skywalking.apm.collector.core.util.TimeBucketUtils;
import org.apache.skywalking.apm.collector.server.grpc.GRPCHandler;
......@@ -49,12 +51,14 @@ public class JVMMetricsServiceHandler extends JVMMetricsServiceGrpc.JVMMetricsSe
private final IGCMetricService gcMetricService;
private final IMemoryMetricService memoryMetricService;
private final IMemoryPoolMetricService memoryPoolMetricService;
private final IInstanceHeartBeatService instanceHeartBeatService;
public JVMMetricsServiceHandler(ModuleManager moduleManager) {
this.cpuMetricService = moduleManager.find(AnalysisJVMModule.NAME).getService(ICpuMetricService.class);
this.gcMetricService = moduleManager.find(AnalysisJVMModule.NAME).getService(IGCMetricService.class);
this.memoryMetricService = moduleManager.find(AnalysisJVMModule.NAME).getService(IMemoryMetricService.class);
this.memoryPoolMetricService = moduleManager.find(AnalysisJVMModule.NAME).getService(IMemoryPoolMetricService.class);
this.instanceHeartBeatService = moduleManager.find(AnalysisMetricModule.NAME).getService(IInstanceHeartBeatService.class);
}
@Override public void collect(JVMMetrics request, StreamObserver<Downstream> responseObserver) {
......@@ -67,6 +71,7 @@ public class JVMMetricsServiceHandler extends JVMMetricsServiceGrpc.JVMMetricsSe
sendToMemoryMetricService(instanceId, time, metric.getMemoryList());
sendToMemoryPoolMetricService(instanceId, time, metric.getMemoryPoolList());
sendToGCMetricService(instanceId, time, metric.getGcList());
sendToInstanceHeartBeatService(instanceId, metric.getTime());
});
responseObserver.onNext(Downstream.newBuilder().build());
......@@ -90,4 +95,8 @@ public class JVMMetricsServiceHandler extends JVMMetricsServiceGrpc.JVMMetricsSe
private void sendToGCMetricService(int instanceId, long timeBucket, List<GC> gcs) {
gcs.forEach(gc -> gcMetricService.send(instanceId, timeBucket, gc.getPhraseValue(), gc.getCount(), gc.getTime()));
}
private void sendToInstanceHeartBeatService(int instanceId, long heartBeatTime) {
instanceHeartBeatService.heartBeat(instanceId, heartBeatTime);
}
}
......@@ -54,7 +54,7 @@ public class ServiceNameDiscoveryServiceHandler extends ServiceNameDiscoveryServ
int applicationId = serviceNameElement.getApplicationId();
String serviceName = serviceNameElement.getServiceName();
int srcSpanType = serviceNameElement.getSrcSpanTypeValue();
int serviceId = serviceNameService.getOrCreate(applicationId, srcSpanType, serviceName);
int serviceId = serviceNameService.get(applicationId, srcSpanType, serviceName);
if (serviceId != 0) {
ServiceNameMappingElement.Builder mappingElement = ServiceNameMappingElement.newBuilder();
......
......@@ -24,5 +24,8 @@ import org.apache.skywalking.apm.collector.core.module.Service;
* @author peng-yongsheng
*/
public interface IServiceNameService extends Service {
int getOrCreate(int applicationId, int srcSpanType, String serviceName);
int get(int applicationId, int srcSpanType, String serviceName);
}
......@@ -74,4 +74,8 @@ public class ServiceNameService implements IServiceNameService {
}
return serviceId;
}
@Override public int get(int applicationId, int srcSpanType, String serviceName) {
return getServiceIdCacheService().get(applicationId, srcSpanType, serviceName);
}
}
......@@ -27,6 +27,7 @@ import org.apache.skywalking.apm.network.proto.UniqueId;
* @author peng-yongsheng
*/
public class ReferenceDecorator implements StandardBuilder {
private boolean isOrigin = true;
private StandardBuilder standardBuilder;
private TraceSegmentReference referenceObject;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.collector.analysis.segment.parser.provider.service;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.Base64;
import java.util.List;
import org.apache.skywalking.apm.network.proto.SpanObject;
import org.apache.skywalking.apm.network.proto.TraceSegmentObject;
import org.apache.skywalking.apm.network.proto.UniqueId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author peng-yongsheng
*/
public class SegmentBase64Printer {
private static final Logger LOGGER = LoggerFactory.getLogger(SegmentBase64Printer.class);
public static void main(String[] args) throws InvalidProtocolBufferException {
String segmentBase64 = "CgwKCgIBsv/x1L2vgBsSggEQ////////////ARirnsP1niwg9Z7D9Z4sOhhIMi9KREJJL0Nvbm5lY3Rpb24vY2xvc2VKDGxvY2FsaG9zdDotMVABWAFgBHoOCgdkYi50eXBlEgNzcWx6GQoLZGIuaW5zdGFuY2USCmRhdGFTb3VyY2V6DgoMZGIuc3RhdGVtZW50GP///////////wEgAg==";
byte[] binarySegment = Base64.getDecoder().decode(segmentBase64);
TraceSegmentObject segmentObject = TraceSegmentObject.parseFrom(binarySegment);
UniqueId segmentId = segmentObject.getTraceSegmentId();
StringBuilder segmentIdBuilder = new StringBuilder();
for (int i = 0; i < segmentId.getIdPartsList().size(); i++) {
if (i == 0) {
segmentIdBuilder.append(segmentId.getIdPartsList().get(i));
} else {
segmentIdBuilder.append(".").append(segmentId.getIdPartsList().get(i));
}
}
LOGGER.info("SegmentId: {}", segmentIdBuilder.toString());
LOGGER.info("ApplicationId: {}", segmentObject.getApplicationId());
LOGGER.info("ApplicationInstanceId: {}", segmentObject.getApplicationInstanceId());
List<SpanObject> spansList = segmentObject.getSpansList();
LOGGER.info("Spans:");
spansList.forEach(span -> {
LOGGER.info(" Span:");
LOGGER.info(" SpanId: {}", span.getSpanId());
LOGGER.info(" ParentSpanId: {}", span.getParentSpanId());
LOGGER.info(" SpanLayer: {}", span.getSpanLayer());
LOGGER.info(" SpanType: {}", span.getSpanType());
LOGGER.info(" StartTime: {}", span.getStartTime());
LOGGER.info(" EndTime: {}", span.getEndTime());
LOGGER.info(" ComponentId: {}", span.getComponentId());
LOGGER.info(" Component: {}", span.getComponent());
LOGGER.info(" OperationNameId: {}", span.getOperationNameId());
LOGGER.info(" OperationName: {}", span.getOperationName());
LOGGER.info(" PeerId: {}", span.getPeerId());
LOGGER.info(" Peer: {}", span.getPeer());
LOGGER.info(" IsError: {}", span.getIsError());
LOGGER.info(" reference:");
span.getRefsList().forEach(reference -> {
LOGGER.info(" EntryApplicationInstanceId: {}", reference.getEntryApplicationInstanceId());
});
});
}
}
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
~
-->
<Configuration status="info">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout charset="UTF-8" pattern="%d - %c -%-4r [%t] %-5p %x - %m%n"/>
</Console>
</Appenders>
<Loggers>
<logger name="org.eclipse.jetty" level="INFO"/>
<logger name="org.apache.zookeeper" level="INFO"/>
<logger name="org.elasticsearch.common.network.IfConfig" level="INFO"/>
<logger name="org.apache.skywalking.apm.collector.agent.grpc.provider.handler.JVMMetricsServiceHandler" level="INFO"/>
<logger name="org.apache.skywalking.apm.collector.analysis.worker.timer.PersistenceTimer" level="INFO"/>
<logger name="io.grpc.netty" level="INFO"/>
<Root level="info">
<AppenderRef ref="Console"/>
</Root>
</Loggers>
</Configuration>
......@@ -66,8 +66,14 @@ public class MemoryMetricEsUIDAO extends EsDAO implements IMemoryMetricUIDAO {
long max = ((Number)response.getResponse().getSource().get(MemoryMetricTable.COLUMN_MAX)).longValue();
long used = ((Number)response.getResponse().getSource().get(MemoryMetricTable.COLUMN_USED)).longValue();
long times = ((Number)response.getResponse().getSource().get(MemoryMetricTable.COLUMN_TIMES)).longValue();
trend.getMetrics().add((int)(used / times));
trend.getMaxMetrics().add((int)(max / times));
if (max < 0) {
trend.getMaxMetrics().add((int)(used / times));
} else {
trend.getMaxMetrics().add((int)(max / times));
}
} else {
trend.getMetrics().add(0);
trend.getMaxMetrics().add(0);
......
......@@ -71,7 +71,12 @@ public class MemoryMetricH2UIDAO extends H2DAO implements IMemoryMetricUIDAO {
long used = rs.getLong(MemoryMetricTable.COLUMN_USED);
long times = rs.getLong(MemoryMetricTable.COLUMN_TIMES);
trend.getMetrics().add((int)(used / times));
trend.getMaxMetrics().add((int)(max / times));
if (max < 0) {
trend.getMaxMetrics().add((int)(used / times));
} else {
trend.getMaxMetrics().add((int)(max / times));
}
} else {
trend.getMetrics().add(0);
trend.getMaxMetrics().add(0);
......
......@@ -58,11 +58,11 @@ public class AlarmQuery implements Query {
switch (alarmType) {
case APPLICATION:
return getAlarmService().loadApplicationAlarmList(keyword, startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
return getAlarmService().loadApplicationAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
case SERVER:
return getAlarmService().loadInstanceAlarmList(keyword, startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
return getAlarmService().loadInstanceAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
case SERVICE:
return getAlarmService().loadServiceAlarmList(keyword, startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
return getAlarmService().loadServiceAlarmList(keyword, duration.getStep(), startTimeBucket, endTimeBucket, page.getLimit(), page.getFrom());
default:
return new Alarm();
}
......
......@@ -31,6 +31,7 @@ import org.apache.skywalking.apm.collector.ui.graphql.Query;
import org.apache.skywalking.apm.collector.ui.service.SegmentTopService;
import org.apache.skywalking.apm.collector.ui.service.TraceStackService;
import org.apache.skywalking.apm.collector.ui.utils.DurationUtils;
import org.apache.skywalking.apm.collector.ui.utils.PaginationUtils;
/**
* @author peng-yongsheng
......@@ -64,11 +65,11 @@ public class TraceQuery implements Query {
long endSecondTimeBucket = 0;
String traceId = Const.EMPTY_STRING;
if (ObjectUtils.isNotEmpty(condition.getQueryDuration())) {
if (StringUtils.isNotEmpty(condition.getTraceId())) {
traceId = condition.getTraceId();
} else if (ObjectUtils.isNotEmpty(condition.getQueryDuration())) {
startSecondTimeBucket = DurationUtils.INSTANCE.durationToSecondTimeBucket(condition.getQueryDuration().getStep(), condition.getQueryDuration().getStart());
endSecondTimeBucket = DurationUtils.INSTANCE.durationToSecondTimeBucket(condition.getQueryDuration().getStep(), condition.getQueryDuration().getEnd());
} else if (StringUtils.isNotEmpty(condition.getTraceId())) {
traceId = condition.getTraceId();
} else {
throw new UnexpectedException("The condition must contains either queryDuration or traceId.");
}
......@@ -77,10 +78,9 @@ public class TraceQuery implements Query {
long maxDuration = condition.getMaxTraceDuration();
String operationName = condition.getOperationName();
int applicationId = condition.getApplicationId();
int limit = condition.getPaging().getPageSize();
int from = condition.getPaging().getPageSize() * condition.getPaging().getPageNum();
return getSegmentTopService().loadTop(startSecondTimeBucket, endSecondTimeBucket, minDuration, maxDuration, operationName, traceId, applicationId, limit, from);
PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
return getSegmentTopService().loadTop(startSecondTimeBucket, endSecondTimeBucket, minDuration, maxDuration, operationName, traceId, applicationId, page.getLimit(), page.getFrom());
}
public Trace queryTrace(String traceId) {
......
......@@ -32,6 +32,7 @@ import org.apache.skywalking.apm.collector.core.util.Const;
import org.apache.skywalking.apm.collector.storage.StorageModule;
import org.apache.skywalking.apm.collector.storage.dao.ui.IApplicationAlarmListUIDAO;
import org.apache.skywalking.apm.collector.storage.dao.ui.IApplicationAlarmUIDAO;
import org.apache.skywalking.apm.collector.storage.dao.ui.IApplicationMappingUIDAO;
import org.apache.skywalking.apm.collector.storage.dao.ui.IInstanceAlarmUIDAO;
import org.apache.skywalking.apm.collector.storage.dao.ui.IInstanceUIDAO;
import org.apache.skywalking.apm.collector.storage.dao.ui.IServiceAlarmUIDAO;
......@@ -56,6 +57,7 @@ public class AlarmService {
private final Gson gson = new Gson();
private final IInstanceUIDAO instanceDAO;
private final IApplicationAlarmUIDAO applicationAlarmUIDAO;
private final IApplicationMappingUIDAO applicationMappingUIDAO;
private final IInstanceAlarmUIDAO instanceAlarmUIDAO;
private final IServiceAlarmUIDAO serviceAlarmUIDAO;
private final IApplicationAlarmListUIDAO applicationAlarmListUIDAO;
......@@ -67,6 +69,7 @@ public class AlarmService {
public AlarmService(ModuleManager moduleManager) {
this.instanceDAO = moduleManager.find(StorageModule.NAME).getService(IInstanceUIDAO.class);
this.applicationAlarmUIDAO = moduleManager.find(StorageModule.NAME).getService(IApplicationAlarmUIDAO.class);
this.applicationMappingUIDAO = moduleManager.find(StorageModule.NAME).getService(IApplicationMappingUIDAO.class);
this.instanceAlarmUIDAO = moduleManager.find(StorageModule.NAME).getService(IInstanceAlarmUIDAO.class);
this.serviceAlarmUIDAO = moduleManager.find(StorageModule.NAME).getService(IServiceAlarmUIDAO.class);
this.applicationAlarmListUIDAO = moduleManager.find(StorageModule.NAME).getService(IApplicationAlarmListUIDAO.class);
......@@ -74,12 +77,16 @@ public class AlarmService {
this.serviceNameCacheService = moduleManager.find(CacheModule.NAME).getService(ServiceNameCacheService.class);
}
public Alarm loadApplicationAlarmList(String keyword, long startTimeBucket, long endTimeBucket,
public Alarm loadApplicationAlarmList(String keyword, Step step, long startTimeBucket, long endTimeBucket,
int limit, int from) throws ParseException {
logger.debug("keyword: {}, startTimeBucket: {}, endTimeBucket: {}, limit: {}, from: {}", keyword, startTimeBucket, endTimeBucket, limit, from);
Alarm alarm = applicationAlarmUIDAO.loadAlarmList(keyword, startTimeBucket, endTimeBucket, limit, from);
List<IApplicationMappingUIDAO.ApplicationMapping> applicationMappings = applicationMappingUIDAO.load(step, startTimeBucket, endTimeBucket);
Map<Integer, Integer> mappings = new HashMap<>();
applicationMappings.forEach(applicationMapping -> mappings.put(applicationMapping.getMappingApplicationId(), applicationMapping.getApplicationId()));
alarm.getItems().forEach(item -> {
String applicationCode = applicationCacheService.getApplicationById(item.getId()).getApplicationCode();
String applicationCode = applicationCacheService.getApplicationById(mappings.getOrDefault(item.getId(), item.getId())).getApplicationCode();
switch (item.getCauseType()) {
case SLOW_RESPONSE:
item.setTitle("Application " + applicationCode + RESPONSE_TIME_ALARM);
......@@ -92,13 +99,18 @@ public class AlarmService {
return alarm;
}
public Alarm loadInstanceAlarmList(String keyword, long startTimeBucket, long endTimeBucket,
public Alarm loadInstanceAlarmList(String keyword, Step step, long startTimeBucket, long endTimeBucket,
int limit, int from) throws ParseException {
logger.debug("keyword: {}, startTimeBucket: {}, endTimeBucket: {}, limit: {}, from: {}", keyword, startTimeBucket, endTimeBucket, limit, from);
Alarm alarm = instanceAlarmUIDAO.loadAlarmList(keyword, startTimeBucket, endTimeBucket, limit, from);
List<IApplicationMappingUIDAO.ApplicationMapping> applicationMappings = applicationMappingUIDAO.load(step, startTimeBucket, endTimeBucket);
Map<Integer, Integer> mappings = new HashMap<>();
applicationMappings.forEach(applicationMapping -> mappings.put(applicationMapping.getMappingApplicationId(), applicationMapping.getApplicationId()));
alarm.getItems().forEach(item -> {
Instance instance = instanceDAO.getInstance(item.getId());
String applicationCode = applicationCacheService.getApplicationById(instance.getApplicationId()).getApplicationCode();
String applicationCode = applicationCacheService.getApplicationById(mappings.getOrDefault(instance.getApplicationId(), instance.getApplicationId())).getApplicationCode();
String serverName = buildServerName(instance.getOsInfo());
switch (item.getCauseType()) {
case SLOW_RESPONSE:
......@@ -113,13 +125,18 @@ public class AlarmService {
return alarm;
}
public Alarm loadServiceAlarmList(String keyword, long startTimeBucket, long endTimeBucket,
public Alarm loadServiceAlarmList(String keyword, Step step, long startTimeBucket, long endTimeBucket,
int limit, int from) throws ParseException {
logger.debug("keyword: {}, startTimeBucket: {}, endTimeBucket: {}, limit: {}, from: {}", keyword, startTimeBucket, endTimeBucket, limit, from);
Alarm alarm = serviceAlarmUIDAO.loadAlarmList(keyword, startTimeBucket, endTimeBucket, limit, from);
List<IApplicationMappingUIDAO.ApplicationMapping> applicationMappings = applicationMappingUIDAO.load(step, startTimeBucket, endTimeBucket);
Map<Integer, Integer> mappings = new HashMap<>();
applicationMappings.forEach(applicationMapping -> mappings.put(applicationMapping.getMappingApplicationId(), applicationMapping.getApplicationId()));
alarm.getItems().forEach(item -> {
ServiceName serviceName = serviceNameCacheService.get(item.getId());
String applicationCode = applicationCacheService.getApplicationById(serviceName.getApplicationId()).getApplicationCode();
String applicationCode = applicationCacheService.getApplicationById(mappings.getOrDefault(serviceName.getApplicationId(), serviceName.getApplicationId())).getApplicationCode();
switch (item.getCauseType()) {
case SLOW_RESPONSE:
item.setTitle("Service " + serviceName.getServiceName() + " of Application " + applicationCode + RESPONSE_TIME_ALARM);
......@@ -146,7 +163,11 @@ public class AlarmService {
AlarmTrend alarmTrend = new AlarmTrend();
durationPoints.forEach(durationPoint -> {
alarmTrend.getNumOfAlarmRate().add((trendsMap.getOrDefault(durationPoint.getPoint(), 0) * 10000) / (applications.size()));
if (applications.size() == 0) {
alarmTrend.getNumOfAlarmRate().add(0);
} else {
alarmTrend.getNumOfAlarmRate().add((trendsMap.getOrDefault(durationPoint.getPoint(), 0) * 10000) / (applications.size()));
}
});
return alarmTrend;
}
......
......@@ -72,11 +72,12 @@ public class ApplicationService {
int... applicationIds) {
List<Application> applications = instanceDAO.getApplications(startSecondTimeBucket, endSecondTimeBucket, applicationIds);
applications.forEach(application -> {
for (int i = applications.size() - 1; i >= 0; i--) {
Application application = applications.get(i);
if (application.getId() == Const.NONE_APPLICATION_ID) {
applications.remove(application);
applications.remove(i);
}
});
}
applications.forEach(application -> {
String applicationCode = applicationCacheService.getApplicationById(application.getId()).getApplicationCode();
......@@ -112,11 +113,12 @@ public class ApplicationService {
return applicationThroughput;
}
public ConjecturalAppBrief getConjecturalApps(Step step, long startSecondTimeBucket, long endSecondTimeBucket) throws ParseException {
public ConjecturalAppBrief getConjecturalApps(Step step, long startSecondTimeBucket,
long endSecondTimeBucket) throws ParseException {
List<ConjecturalApp> conjecturalApps = networkAddressUIDAO.getConjecturalApps();
conjecturalApps.forEach(conjecturalApp -> {
String name = ServerTypeDefine.getInstance().getServerType(conjecturalApp.getId());
conjecturalApp.setName(name);
String serverType = ServerTypeDefine.getInstance().getServerType(conjecturalApp.getId());
conjecturalApp.setName(serverType);
});
ConjecturalAppBrief conjecturalAppBrief = new ConjecturalAppBrief();
......
......@@ -82,6 +82,6 @@ public class ApplicationTopologyService {
TopologyBuilder builder = new TopologyBuilder(moduleManager);
return builder.build(applicationComponents, applicationMappings, applicationMetrics, callerReferenceMetric, calleeReferenceMetric, startTimeBucket, endTimeBucket, startSecondTimeBucket, endSecondTimeBucket);
return builder.build(applicationComponents, applicationMappings, applicationMetrics, callerReferenceMetric, calleeReferenceMetric, step, startTimeBucket, endTimeBucket, startSecondTimeBucket, endSecondTimeBucket);
}
}
......@@ -72,6 +72,6 @@ public class ClusterTopologyService {
TopologyBuilder builder = new TopologyBuilder(moduleManager);
return builder.build(applicationComponents, applicationMappings, applicationMetrics, callerReferenceMetric, calleeReferenceMetric, startTimeBucket, endTimeBucket, startSecondTimeBucket, endSecondTimeBucket);
return builder.build(applicationComponents, applicationMappings, applicationMetrics, callerReferenceMetric, calleeReferenceMetric, step, startTimeBucket, endTimeBucket, startSecondTimeBucket, endSecondTimeBucket);
}
}
......@@ -53,6 +53,10 @@ class SecondBetweenService {
Date startDate = new SimpleDateFormat("yyyyMMddHHmmss").parse(String.valueOf(startSecondTimeBucket));
Date endDate = new SimpleDateFormat("yyyyMMddHHmmss").parse(String.valueOf(endSecondTimeBucket));
return Seconds.secondsBetween(new DateTime(startDate), new DateTime(endDate)).getSeconds();
int seconds = Seconds.secondsBetween(new DateTime(startDate), new DateTime(endDate)).getSeconds();
if (seconds == 0) {
seconds = 1;
}
return seconds;
}
}
......@@ -59,8 +59,8 @@ public class ServerService {
private final ICpuMetricUIDAO cpuMetricUIDAO;
private final IGCMetricUIDAO gcMetricUIDAO;
private final IMemoryMetricUIDAO memoryMetricUIDAO;
private final InstanceCacheService instanceCacheService;
private final ApplicationCacheService applicationCacheService;
private final InstanceCacheService instanceCacheService;
private final SecondBetweenService secondBetweenService;
public ServerService(ModuleManager moduleManager) {
......@@ -69,18 +69,19 @@ public class ServerService {
this.cpuMetricUIDAO = moduleManager.find(StorageModule.NAME).getService(ICpuMetricUIDAO.class);
this.gcMetricUIDAO = moduleManager.find(StorageModule.NAME).getService(IGCMetricUIDAO.class);
this.memoryMetricUIDAO = moduleManager.find(StorageModule.NAME).getService(IMemoryMetricUIDAO.class);
this.instanceCacheService = moduleManager.find(CacheModule.NAME).getService(InstanceCacheService.class);
this.applicationCacheService = moduleManager.find(CacheModule.NAME).getService(ApplicationCacheService.class);
this.instanceCacheService = moduleManager.find(CacheModule.NAME).getService(InstanceCacheService.class);
this.secondBetweenService = new SecondBetweenService(moduleManager);
}
public List<AppServerInfo> searchServer(String keyword, long startSecondTimeBucket, long endSecondTimeBucket) {
List<AppServerInfo> serverInfos = instanceUIDAO.searchServer(keyword, startSecondTimeBucket, endSecondTimeBucket);
serverInfos.forEach(serverInfo -> {
if (serverInfo.getId() == Const.NONE_INSTANCE_ID) {
serverInfos.remove(serverInfo);
for (int i = serverInfos.size() - 1; i >= 0; i--) {
if (serverInfos.get(i).getId() == Const.NONE_INSTANCE_ID) {
serverInfos.remove(i);
}
});
}
buildAppServerInfo(serverInfos);
return serverInfos;
......@@ -107,7 +108,8 @@ public class ServerService {
List<AppServerInfo> serverThroughput = instanceMetricUIDAO.getServerThroughput(applicationId, step, startTimeBucket, endTimeBucket, secondBetween, topN, MetricSource.Callee);
serverThroughput.forEach(appServerInfo -> {
String applicationCode = applicationCacheService.getApplicationById(applicationId).getApplicationCode();
appServerInfo.setApplicationId(instanceCacheService.getApplicationId(appServerInfo.getId()));
String applicationCode = applicationCacheService.getApplicationById(appServerInfo.getApplicationId()).getApplicationCode();
appServerInfo.setApplicationCode(applicationCode);
Instance instance = instanceUIDAO.getInstance(appServerInfo.getId());
appServerInfo.setOsInfo(instance.getOsInfo());
......@@ -164,6 +166,8 @@ public class ServerService {
private void buildAppServerInfo(List<AppServerInfo> serverInfos) {
serverInfos.forEach(serverInfo -> {
serverInfo.setApplicationCode(applicationCacheService.getApplicationById(serverInfo.getApplicationId()).getApplicationCode());
StringBuilder nameBuilder = new StringBuilder();
nameBuilder.append(serverInfo.getApplicationCode());
if (StringUtils.isNotEmpty(serverInfo.getOsInfo())) {
JsonObject osInfoJson = gson.fromJson(serverInfo.getOsInfo(), JsonObject.class);
if (osInfoJson.has("osName")) {
......@@ -180,10 +184,14 @@ public class ServerService {
JsonArray ipv4Array = osInfoJson.get("ipv4s").getAsJsonArray();
List<String> ipv4s = new LinkedList<>();
ipv4Array.forEach(ipv4 -> ipv4s.add(ipv4.getAsString()));
ipv4Array.forEach(ipv4 -> {
ipv4s.add(ipv4.getAsString());
nameBuilder.append(Const.ID_SPLIT).append(ipv4.getAsString());
});
serverInfo.setIpv4(ipv4s);
}
}
serverInfo.setName(nameBuilder.toString());
});
}
}
......@@ -73,8 +73,8 @@ public class ServiceTopologyService {
Map<Integer, String> components = new HashMap<>();
applicationComponents.forEach(component -> components.put(component.getApplicationId(), ComponentsDefine.getInstance().getComponentName(component.getComponentId())));
List<IServiceReferenceMetricUIDAO.ServiceReferenceMetric> referenceMetrics = serviceReferenceMetricUIDAO.getFrontServices(step, startTimeBucket, endTimeBucket, MetricSource.Callee, serviceId);
referenceMetrics.addAll(serviceReferenceMetricUIDAO.getBehindServices(step, startTimeBucket, endTimeBucket, MetricSource.Caller, serviceId));
List<IServiceReferenceMetricUIDAO.ServiceReferenceMetric> referenceMetrics = serviceReferenceMetricUIDAO.getFrontServices(step, startTimeBucket, endTimeBucket, MetricSource.Caller, serviceId);
referenceMetrics.addAll(serviceReferenceMetricUIDAO.getBehindServices(step, startTimeBucket, endTimeBucket, MetricSource.Callee, serviceId));
Set<Integer> nodeIds = new HashSet<>();
......
......@@ -20,9 +20,11 @@ package org.apache.skywalking.apm.collector.ui.service;
import java.text.ParseException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.skywalking.apm.collector.cache.CacheModule;
import org.apache.skywalking.apm.collector.cache.service.ApplicationCacheService;
import org.apache.skywalking.apm.collector.core.module.ModuleManager;
......@@ -38,6 +40,7 @@ import org.apache.skywalking.apm.collector.storage.ui.application.ApplicationNod
import org.apache.skywalking.apm.collector.storage.ui.application.ConjecturalNode;
import org.apache.skywalking.apm.collector.storage.ui.common.Call;
import org.apache.skywalking.apm.collector.storage.ui.common.Node;
import org.apache.skywalking.apm.collector.storage.ui.common.Step;
import org.apache.skywalking.apm.collector.storage.ui.common.Topology;
import org.apache.skywalking.apm.collector.storage.ui.common.VisualUserNode;
import org.apache.skywalking.apm.collector.ui.utils.ApdexCalculator;
......@@ -70,7 +73,7 @@ class TopologyBuilder {
List<IApplicationMetricUIDAO.ApplicationMetric> applicationMetrics,
List<IApplicationReferenceMetricUIDAO.ApplicationReferenceMetric> callerReferenceMetric,
List<IApplicationReferenceMetricUIDAO.ApplicationReferenceMetric> calleeReferenceMetric,
long startTimeBucket, long endTimeBucket, long startSecondTimeBucket, long endSecondTimeBucket) {
Step step, long startTimeBucket, long endTimeBucket, long startSecondTimeBucket, long endSecondTimeBucket) {
Map<Integer, String> components = changeNodeComp2Map(applicationComponents);
Map<Integer, Integer> mappings = changeMapping2Map(applicationMappings);
......@@ -95,7 +98,7 @@ class TopologyBuilder {
applicationNode.setApdex(ApdexCalculator.INSTANCE.calculate(applicationMetric.getSatisfiedCount(), applicationMetric.getToleratingCount(), applicationMetric.getFrustratedCount()));
applicationNode.setAlarm(false);
try {
Alarm alarm = alarmService.loadApplicationAlarmList(Const.EMPTY_STRING, startTimeBucket, endTimeBucket, 1, 0);
Alarm alarm = alarmService.loadApplicationAlarmList(Const.EMPTY_STRING, step, startTimeBucket, endTimeBucket, 1, 0);
if (alarm.getItems().size() > 0) {
applicationNode.setAlarm(true);
}
......@@ -105,14 +108,14 @@ class TopologyBuilder {
applicationNode.setNumOfServer(serverService.getAllServer(applicationId, startSecondTimeBucket, endSecondTimeBucket).size());
try {
Alarm alarm = alarmService.loadInstanceAlarmList(Const.EMPTY_STRING, startTimeBucket, endTimeBucket, 1000, 0);
Alarm alarm = alarmService.loadInstanceAlarmList(Const.EMPTY_STRING, step, startTimeBucket, endTimeBucket, 1000, 0);
applicationNode.setNumOfServerAlarm(alarm.getItems().size());
} catch (ParseException e) {
logger.error(e.getMessage(), e);
}
try {
Alarm alarm = alarmService.loadServiceAlarmList(Const.EMPTY_STRING, startTimeBucket, endTimeBucket, 1000, 0);
Alarm alarm = alarmService.loadServiceAlarmList(Const.EMPTY_STRING, step, startTimeBucket, endTimeBucket, 1000, 0);
applicationNode.setNumOfServiceAlarm(alarm.getItems().size());
} catch (ParseException e) {
logger.error(e.getMessage(), e);
......@@ -133,6 +136,17 @@ class TopologyBuilder {
nodes.add(conjecturalNode);
}
Set<Integer> nodeIds = buildNodeIds(nodes);
if (!nodeIds.contains(source.getApplicationId())) {
ApplicationNode applicationNode = new ApplicationNode();
applicationNode.setId(source.getApplicationId());
applicationNode.setName(source.getApplicationCode());
applicationNode.setType(components.getOrDefault(source.getApplicationId(), Const.UNKNOWN));
applicationNode.setApdex(100);
applicationNode.setSla(100);
nodes.add(applicationNode);
}
Call call = new Call();
call.setSource(source.getApplicationId());
call.setSourceName(source.getApplicationCode());
......@@ -198,6 +212,12 @@ class TopologyBuilder {
return topology;
}
private Set<Integer> buildNodeIds(List<Node> nodes) {
Set<Integer> nodeIds = new HashSet<>();
nodes.forEach(node -> nodeIds.add(node.getId()));
return nodeIds;
}
private List<IApplicationReferenceMetricUIDAO.ApplicationReferenceMetric> calleeReferenceMetricFilter(
List<IApplicationReferenceMetricUIDAO.ApplicationReferenceMetric> calleeReferenceMetric) {
List<IApplicationReferenceMetricUIDAO.ApplicationReferenceMetric> filteredMetrics = new LinkedList<>();
......
......@@ -25,6 +25,10 @@ public enum ApdexCalculator {
INSTANCE;
public int calculate(long satisfiedCount, long toleratingCount, long frustratedCount) {
return (int)(((satisfiedCount + toleratingCount / 2) * 100) / (satisfiedCount + toleratingCount + frustratedCount));
if (satisfiedCount + toleratingCount + frustratedCount == 0) {
return 100;
} else {
return (int)(((satisfiedCount + toleratingCount / 2) * 100) / (satisfiedCount + toleratingCount + frustratedCount));
}
}
}
......@@ -9,7 +9,7 @@
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>apm-backend-dist</artifactId>
<artifactId>apm-dist</artifactId>
<packaging>pom</packaging>
......
......@@ -74,6 +74,7 @@ public enum OperationNameDictionary {
ServiceNameElement serviceNameElement = ServiceNameElement.newBuilder()
.setApplicationId(operationNameKey.getApplicationId())
.setServiceName(operationNameKey.getOperationName())
.setSrcSpanType(operationNameKey.getSpanType())
.build();
builder.addElements(serviceNameElement);
}
......@@ -122,9 +123,14 @@ public enum OperationNameDictionary {
OperationNameKey key = (OperationNameKey)o;
if (applicationId != key.applicationId)
return false;
return operationName.equals(key.operationName);
boolean isApplicationMatch = false;
if (applicationId == key.applicationId) {
isApplicationMatch = true;
} else if (operationName.equals(key.operationName)) {
isApplicationMatch = true;
}
return isApplicationMatch && isEntry == key.isEntry
&& isExit == key.isExit;
}
@Override public int hashCode() {
......@@ -140,5 +146,15 @@ public enum OperationNameDictionary {
boolean isExit() {
return isExit;
}
SpanType getSpanType() {
if (isEntry) {
return SpanType.Entry;
} else if (isExit) {
return SpanType.Exit;
} else {
return SpanType.Local;
}
}
}
}
......@@ -17,7 +17,7 @@ agent.application_code=Your_ApplicationName
# agent.is_open_debugging_class = true
# Server addresses.
# Mapping to `agent_server/jetty/port` in `config/application.yml` of Collector.
# Mapping to `naming/jetty/ip:port` in `config/application.yml` of Collector.
# Examples:
# Single collector:SERVERS="127.0.0.1:8080"
# Collector cluster:SERVERS="10.2.45.126:8080,10.2.45.127:7600"
......
......@@ -10,7 +10,7 @@
* [OpenTracing中文版](https://github.com/opentracing-contrib/opentracing-specification-zh)
* Application Toolkit,应用程序工具包
* [概述](cn/Application-toolkit-CN.md)
* [OpenTracing Tracer](cn/skywalking-opentracing-CN.md)
* [OpenTracing Tracer](cn/Opentracing-CN.md)
* 日志组件
* [log4j组件](cn/Application-toolkit-log4j-1.x-CN.md)
* [log4j2组件](cn/Application-toolkit-log4j-2.x-CN.md)
......
......@@ -78,3 +78,6 @@ storage:
3. 运行`bin/startup.sh`启动。windows用户为.bat文件。
- **注意:startup.sh将会启动collector和UI两个进程,UI通过127.0.0.1:10800访问本地collector,无需额外配置。
如需保证UI负载均衡,推荐使用类nginx的HTTP代理服务。**
\ No newline at end of file
# 用途说明
单机模式默认使用本地H2数据库,不支持集群部署。主要用于:预览、功能测试、演示和低压力系统。如果使用单机collector用于非演示环境,你可选择使用Elasticsearch作为存储实现。
单机模式默认使用本地H2数据库,不支持集群部署。主要用于:预览、功能测试、演示和低压力系统。
如果使用单机collector用于非演示环境,你可选择使用Elasticsearch作为存储实现。
**在5.0.0-alpha版本中,暂不提供H2实现**
## 所需的第三方软件
- JDK8+
......@@ -14,6 +18,8 @@ Collector单机模拟启动简单,提供和集群模式相同的功能,单
1. 解压安装包`tar -xvf skywalking-collector.tar.gz`,windows用户可以选择zip包
1. 运行`bin/startup.sh`启动。windows用户为.bat文件。
- **注意:startup.sh将会启动collector和UI两个进程,UI通过127.0.0.1:10800访问本地collector,无需额外配置。**
## 使用Elastic Search代替H2存储
- 在单机模式下除了支持内置的H2数据库运行,也支持其他的存储(当前已支持的ElasticSearch 5.3),取消Storage相关配置节的注释,并修改配置。
```yaml
......
......@@ -7,8 +7,6 @@
</dependency>
```
&nbsp;&nbsp;&nbsp;[ ![Download](https://api.bintray.com/packages/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-opentracing/images/download.svg) ](https://bintray.com/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-opentracing/_latestVersion)
* 使用OpenTracing的标准API和桥接器,使用手动埋点
```java
Tracer tracer = new org.apache.skywalking.apm.toolkit.opentracing.SkywalkingTracer();
......
# 部署步骤
1. 部署 Collector
1. [单机模式](Deploy-collector-in-standalone-mode-CN.md)
1. [集群模式](Deploy-collector-in-cluster-mode-CN.md)
1. 部署 webui server, [doc](https://github.com/apache/incubator-skywalking-ui#quickstart)
1. 在Maven Central中下载 `org.apache.skywalking.apm-dist`. 或者本地编译程序,在`dist`目录中找到`skywalking-dist.tar.gz/.zip`.
1. 部署 Backend
1. [单机模式](Deploy-backend-in-standalone-mode-CN.md)
1. [集群模式](Deploy-backend-in-cluster-mode-CN.md)
1. 部署 Java Agent,[doc](Deploy-skywalking-agent-CN.md)
1. 重启并访问系统功能,查看UI即可。
\ No newline at end of file
......@@ -25,7 +25,8 @@ thread_pool.bulk.queue_size: 1000
1. Run `tar -xvf skywalking-collector.tar.gz`
2. Config collector in cluster mode.
Cluster mode depends on Zookeeper register and application discovery capabilities. So, you just need to adjust the IP config items in `config/application.yml`. Change IP and port configs of agent_server, agent_stream, ui, collector_inside, replace them to the real ip or hostname which you want to use for cluster.
Cluster mode depends on Zookeeper register and application discovery capabilities. So, you just need to adjust the IP config items in `config/application.yml`. Change IP and port configs of naming, remote, agent_gRPC, agent_jetty and ui,
replace them to the real ip or hostname which you want to use for cluster.
- `config/application.yml`
```
......@@ -76,3 +77,6 @@ storage:
3. Run `bin/startup.sh`
- NOTICE: **In 5.0.0-alpha, startup.sh will start two processes, collector and UI, and UI uses 127.0.0.1:10800 as default.
Recommend use http proxy to access UI in product, otherwise, access any UI.**
# Usage scenario
Default standalong mode collector means don't support cluster. It uses H2 as storage layer implementation, suggest that use only for preview, test, demonstration, low throughputs and small scale system.
If you are using skywalking in a low throughputs monitoring scenario, and don't want to deploy cluster, at least, swith the storage implementation from H2 to Elasticsearch.
If you are using skywalking in a low throughputs monitoring scenario, and don't want to deploy cluster, at least, switch the storage implementation from H2 to Elasticsearch.
**H2 storage implementation is not provided in 5.0.0-alpha, so you must deploy ElasticSearch before try to start backend**
## Requirements
* JDK 8+
......@@ -17,6 +19,8 @@ You can simplely tar/unzip and startup if ports 10800, 11800, 12800 are free.
You should keep the `config/application.yml` as default.
- NOTICE: **In 5.0.0-alpha, startup.sh will start two processes, collector and UI, and UI uses 127.0.0.1:10800 as default.**
## Use Elastic Search instead of H2 as storage layer implementation
Even in standalone mode, collector can run with Elastic Search as storage. If so, uncomment the `storage` section in `application.yml`, set the config right. The default configs fit for collector and Elasticsearch both running in same machine, and not cluster.
......@@ -24,7 +28,7 @@ Even in standalone mode, collector can run with Elastic Search as storage. If so
- Modify `elasticsearch.yml`
- Set `cluster.name: CollectorDBCluster`
- Set `node.name: anyname`, this name can be any, it based on Elasticsearch.
- Add the following configurations to
- Add the following configurations
```
# The ip used for listening
......
......@@ -42,5 +42,5 @@ CATALINA_OPTS="$CATALINA_OPTS -javaagent:/path/to/skywalking-agent/skywalking-ag
- Tomcat 8
Change the first line of `tomcat/bin/catalina.sh`.
```shell
set "CATALINA_OPTS=-javaagent:E:\apache-tomcat-8.5.20\skywalking-agent\skywalking-agent.jar -Dconfig=\skywalking\config\dir"
set "CATALINA_OPTS=-javaagent:E:\apache-tomcat-8.5.20\skywalking-agent\skywalking-agent.jar"
```
\ No newline at end of file
......@@ -7,7 +7,6 @@
</dependency>
```
&nbsp;&nbsp;&nbsp;[ ![Download](https://api.bintray.com/packages/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-opentracing/images/download.svg) ](https://bintray.com/wu-sheng/skywalking/org.apache.skywalking.apm-toolkit-opentracing/_latestVersion)
* Use our OpenTracing tracer implementation
```java
......
# Quick start
1. Deploy Collector
1. [Standalone Mode](Deploy-collector-in-standalone-mode.md)
1. [Cluster Mode](Deploy-collector-in-cluster-mode.md)
1. Deploy webui server, [doc](https://github.com/apache/incubator-skywalking-ui#quickstart)
1. Doploy Java Agent,[doc](Deploy-skywalking-agent.md)
1. Download `org.apache.skywalking.apm-dist` in maven central. Or find `skywalking-dist.tar.gz/.zip` in `dist` folder after `mvn clean package`.
1. Deploy Backend
- [Standalone Mode](Deploy-backend-in-standalone-mode.md)
- [Cluster Mode](Deploy-backend-in-cluster-mode.md)
1. Deploy Java Agent,[doc](Deploy-skywalking-agent.md)
1. Reboot your applications, and open UI.
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册