未验证 提交 a8810b1a 编写于 作者: W wankai123 提交者: GitHub

Support metrics grouped by scope labelValue in MAL, no need global same...

Support metrics grouped by scope labelValue in MAL, no need global same labelValue as before. (#6495)
上级 0be76ace
......@@ -51,6 +51,7 @@ Release Notes.
* Merge indices by Metrics Function and Meter Function in Elasticsearch Storage.
* Fix receiver don't need to get itself when healthCheck
* Remove group concept from AvgHistogramFunction. Heatmap(function result) doesn't support labels.
* Support metrics grouped by scope labelValue in MAL, no need global same labelValue as before.
#### UI
* Update selector scroller to show in all pages.
......
......@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableMap;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import java.util.List;
import java.util.Map;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
......@@ -63,13 +64,16 @@ import static java.util.stream.Collectors.toList;
*/
@Slf4j
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
@ToString(of = {"metricName", "expression"})
@ToString(of = {
"metricName",
"expression"
})
public class Analyzer {
public static final Tuple2<String, SampleFamily> NIL = Tuple.of("", null);
public static Analyzer build(final String metricName, final String expression,
final MeterSystem meterSystem) {
final MeterSystem meterSystem) {
Expression e = DSL.parse(expression);
ExpressionParsingContext ctx = e.parse();
Analyzer analyzer = new Analyzer(metricName, e, meterSystem);
......@@ -97,8 +101,10 @@ public class Analyzer {
* @param sampleFamilies input samples.
*/
public void analyse(final ImmutableMap<String, SampleFamily> sampleFamilies) {
ImmutableMap<String, SampleFamily> input = samples.stream().map(s -> Tuple.of(s, sampleFamilies.get(s)))
.filter(t -> t._2 != null).collect(ImmutableMap.toImmutableMap(t -> t._1, t -> t._2));
ImmutableMap<String, SampleFamily> input = samples.stream()
.map(s -> Tuple.of(s, sampleFamilies.get(s)))
.filter(t -> t._2 != null)
.collect(ImmutableMap.toImmutableMap(t -> t._1, t -> t._2));
if (input.size() < 1) {
if (log.isDebugEnabled()) {
log.debug("{} is ignored due to the lack of {}", expression, samples);
......@@ -110,52 +116,57 @@ public class Analyzer {
return;
}
SampleFamily.RunningContext ctx = r.getData().context;
Sample[] ss = r.getData().samples;
generateTraffic(ctx.getMeterEntity());
switch (metricType) {
case single:
AcceptableValue<Long> sv = meterSystem.buildMetrics(metricName, Long.class);
sv.accept(ctx.getMeterEntity(), getValue(ss[0]));
send(sv, ss[0].getTimestamp());
break;
case labeled:
AcceptableValue<DataTable> lv = meterSystem.buildMetrics(metricName, DataTable.class);
DataTable dt = new DataTable();
for (Sample each : ss) {
dt.put(composeGroup(each.getLabels()), getValue(each));
}
lv.accept(ctx.getMeterEntity(), dt);
send(lv, ss[0].getTimestamp());
break;
case histogram:
case histogramPercentile:
Stream.of(ss).map(s -> Tuple.of(composeGroup(s.getLabels(), k -> !Objects.equals("le", k)), s))
.collect(groupingBy(Tuple2::_1, mapping(Tuple2::_2, toList())))
.forEach((group, subSs) -> {
if (subSs.size() < 1) {
return;
}
long[] bb = new long[subSs.size()];
long[] vv = new long[bb.length];
for (int i = 0; i < subSs.size(); i++) {
Sample s = subSs.get(i);
bb[i] = Long.parseLong(s.getLabels().get("le"));
vv[i] = getValue(s);
}
BucketedValues bv = new BucketedValues(bb, vv);
long time = subSs.get(0).getTimestamp();
if (metricType == MetricType.histogram) {
AcceptableValue<BucketedValues> v = meterSystem.buildMetrics(metricName, BucketedValues.class);
v.accept(ctx.getMeterEntity(), bv);
Map<MeterEntity, Sample[]> meterSamples = ctx.getMeterSamples();
meterSamples.forEach((meterEntity, ss) -> {
generateTraffic(meterEntity);
switch (metricType) {
case single:
AcceptableValue<Long> sv = meterSystem.buildMetrics(metricName, Long.class);
sv.accept(meterEntity, getValue(ss[0]));
send(sv, ss[0].getTimestamp());
break;
case labeled:
AcceptableValue<DataTable> lv = meterSystem.buildMetrics(metricName, DataTable.class);
DataTable dt = new DataTable();
for (Sample each : ss) {
dt.put(composeGroup(each.getLabels()), getValue(each));
}
lv.accept(meterEntity, dt);
send(lv, ss[0].getTimestamp());
break;
case histogram:
case histogramPercentile:
Stream.of(ss).map(s -> Tuple.of(composeGroup(s.getLabels(), k -> !Objects.equals("le", k)), s))
.collect(groupingBy(Tuple2::_1, mapping(Tuple2::_2, toList())))
.forEach((group, subSs) -> {
if (subSs.size() < 1) {
return;
}
long[] bb = new long[subSs.size()];
long[] vv = new long[bb.length];
for (int i = 0; i < subSs.size(); i++) {
Sample s = subSs.get(i);
bb[i] = Long.parseLong(s.getLabels().get("le"));
vv[i] = getValue(s);
}
BucketedValues bv = new BucketedValues(bb, vv);
bv.setGroup(group);
long time = subSs.get(0).getTimestamp();
if (metricType == MetricType.histogram) {
AcceptableValue<BucketedValues> v = meterSystem.buildMetrics(
metricName, BucketedValues.class);
v.accept(meterEntity, bv);
send(v, time);
return;
}
AcceptableValue<PercentileArgument> v = meterSystem.buildMetrics(
metricName, PercentileArgument.class);
v.accept(meterEntity, new PercentileArgument(bv, percentiles));
send(v, time);
return;
}
AcceptableValue<PercentileArgument> v = meterSystem.buildMetrics(metricName, PercentileArgument.class);
v.accept(ctx.getMeterEntity(), new PercentileArgument(bv, percentiles));
send(v, time);
});
break;
}
});
break;
}
});
}
private long getValue(Sample sample) {
......@@ -174,7 +185,7 @@ public class Analyzer {
private String composeGroup(ImmutableMap<String, String> labels, Predicate<String> filter) {
return labels.keySet().stream().filter(filter).sorted().map(labels::get)
.collect(Collectors.joining("-"));
.collect(Collectors.joining("-"));
}
@RequiredArgsConstructor
......@@ -210,8 +221,11 @@ public class Analyzer {
createMetric(ctx.getScopeType(), metricType.literal, ctx.getDownsampling());
}
private void createMetric(final ScopeType scopeType, final String dataType, final DownsamplingType downsamplingType) {
String functionName = String.format(FUNCTION_NAME_TEMP, downsamplingType.toString().toLowerCase(), Strings.capitalize(dataType));
private void createMetric(final ScopeType scopeType,
final String dataType,
final DownsamplingType downsamplingType) {
String functionName = String.format(
FUNCTION_NAME_TEMP, downsamplingType.toString().toLowerCase(), Strings.capitalize(dataType));
meterSystem.create(metricName, functionName, scopeType);
}
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
@Getter
@RequiredArgsConstructor
@ToString
public class EndpointEntityDescription implements EntityDescription {
private final ScopeType scopeType = ScopeType.ENDPOINT;
private final List<String> serviceKeys;
private final List<String> endpointKeys;
@Override
public List<String> getLabelKeys() {
return Stream.concat(this.serviceKeys.stream(), this.endpointKeys.stream()).collect(Collectors.toList());
}
@Override
public List<String> getInstanceKeys() {
throw new UnsupportedOperationException("Unsupported Operation of getInstanceKeys() " + this.toString());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription;
import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
public interface EntityDescription {
ScopeType getScopeType();
List<String> getLabelKeys();
List<String> getServiceKeys();
List<String> getInstanceKeys();
List<String> getEndpointKeys();
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
@Getter
@RequiredArgsConstructor
@ToString
public class InstanceEntityDescription implements EntityDescription {
private final ScopeType scopeType = ScopeType.SERVICE_INSTANCE;
private final List<String> serviceKeys;
private final List<String> instanceKeys;
@Override
public List<String> getLabelKeys() {
return Stream.concat(this.serviceKeys.stream(), this.instanceKeys.stream()).collect(Collectors.toList());
}
@Override
public List<String> getEndpointKeys() {
throw new UnsupportedOperationException("Unsupported Operation of getEndpointKeys() " + this.toString());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription;
import java.util.List;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
@Getter
@RequiredArgsConstructor
@ToString
public class ServiceEntityDescription implements EntityDescription {
private final ScopeType scopeType = ScopeType.SERVICE;
private final List<String> serviceKeys;
@Override
public List<String> getLabelKeys() {
return serviceKeys;
}
@Override
public List<String> getInstanceKeys() {
throw new UnsupportedOperationException("Unsupported Operation of getInstanceKeys() " + this.toString());
}
@Override
public List<String> getEndpointKeys() {
throw new UnsupportedOperationException("Unsupported Operation of getEndpointKeys() " + this.toString());
}
}
......@@ -33,6 +33,11 @@ import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription.EndpointEntityDescription;
import org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription.EntityDescription;
import org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription.InstanceEntityDescription;
import org.apache.skywalking.oap.meter.analyzer.dsl.EntityDescription.ServiceEntityDescription;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.apache.skywalking.oap.server.core.analysis.meter.ScopeType;
......@@ -81,11 +86,11 @@ public class SampleFamily {
/* tag filter operations*/
public SampleFamily tagEqual(String... labels) {
return match(labels, this::stringComp);
return match(labels, InternalOps::stringComp);
}
public SampleFamily tagNotEqual(String[] labels) {
return match(labels, (sv, lv) -> !stringComp(sv, lv));
return match(labels, (sv, lv) -> !InternalOps.stringComp(sv, lv));
}
public SampleFamily tagMatch(String[] labels) {
......@@ -180,15 +185,15 @@ public class SampleFamily {
}
if (by == null) {
double result = Arrays.stream(samples).mapToDouble(Sample::getValue).average().orElse(0.0D);
return SampleFamily.build(this.context, newSample(ImmutableMap.of(), samples[0].timestamp, result));
return SampleFamily.build(this.context, InternalOps.newSample(ImmutableMap.of(), samples[0].timestamp, result));
}
return SampleFamily.build(
this.context,
Arrays.stream(samples)
.collect(groupingBy(it -> getLabels(by, it), mapping(identity(), toList())))
.collect(groupingBy(it -> InternalOps.getLabels(by, it), mapping(identity(), toList())))
.entrySet().stream()
.map(entry -> newSample(
.map(entry -> InternalOps.newSample(
entry.getKey(),
entry.getValue().get(0).getTimestamp(),
entry.getValue().stream().mapToDouble(Sample::getValue).average().orElse(0.0D)
......@@ -204,15 +209,14 @@ public class SampleFamily {
}
if (by == null) {
double result = Arrays.stream(samples).mapToDouble(s -> s.value).reduce(aggregator).orElse(0.0D);
return SampleFamily.build(this.context, newSample(ImmutableMap.of(), samples[0].timestamp, result));
return SampleFamily.build(this.context, InternalOps.newSample(ImmutableMap.of(), samples[0].timestamp, result));
}
return SampleFamily.build(
this.context,
Arrays.stream(samples)
.collect(groupingBy(it -> getLabels(by, it), mapping(identity(), toList())))
.collect(groupingBy(it -> InternalOps.getLabels(by, it), mapping(identity(), toList())))
.entrySet().stream()
.map(entry -> newSample(
.map(entry -> InternalOps.newSample(
entry.getKey(),
entry.getValue().get(0).getTimestamp(),
entry.getValue().stream().mapToDouble(Sample::getValue).reduce(aggregator).orElse(0.0D)
......@@ -221,14 +225,6 @@ public class SampleFamily {
);
}
private ImmutableMap<String, String> getLabels(final List<String> labelKeys, final Sample sample) {
return labelKeys.stream()
.collect(toImmutableMap(
Function.identity(),
labelKey -> sample.labels.getOrDefault(labelKey, "")
));
}
/* Function */
public SampleFamily increase(String range) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(range));
......@@ -319,7 +315,7 @@ public class SampleFamily {
.putAll(Maps.filterKeys(s.labels, key -> !Objects.equals(key, le)))
.put("le", String.valueOf((long) ((Double.parseDouble(this.context.histogramType == HistogramType.ORDINARY ? s.labels.get(le) : preLe.get())) * scale))).build();
preLe.set(s.labels.get(le));
return newSample(ll, s.timestamp, r);
return InternalOps.newSample(ll, s.timestamp, r);
})
).toArray(Sample[]::new)
);
......@@ -344,8 +340,7 @@ public class SampleFamily {
if (this == EMPTY) {
return EMPTY;
}
this.context.setMeterEntity(MeterEntity.newService(dim(labelKeys)));
return left(labelKeys);
return createMeterSamples(new ServiceEntityDescription(labelKeys));
}
public SampleFamily instance(List<String> serviceKeys, List<String> instanceKeys) {
......@@ -360,8 +355,7 @@ public class SampleFamily {
if (this == EMPTY) {
return EMPTY;
}
this.context.setMeterEntity(MeterEntity.newServiceInstance(dim(serviceKeys), dim(instanceKeys)));
return left(io.vavr.collection.Stream.concat(serviceKeys, instanceKeys).asJava());
return createMeterSamples(new InstanceEntityDescription(serviceKeys, instanceKeys));
}
public SampleFamily endpoint(List<String> serviceKeys, List<String> endpointKeys) {
......@@ -376,27 +370,21 @@ public class SampleFamily {
if (this == EMPTY) {
return EMPTY;
}
this.context.setMeterEntity(MeterEntity.newEndpoint(dim(serviceKeys), dim(endpointKeys)));
return left(io.vavr.collection.Stream.concat(serviceKeys, endpointKeys).asJava());
return createMeterSamples(new EndpointEntityDescription(serviceKeys, endpointKeys));
}
private String dim(List<String> labelKeys) {
String name = labelKeys.stream().map(k -> samples[0].labels.getOrDefault(k, "")).collect(Collectors.joining("."));
return CharMatcher.is('.').trimFrom(name);
}
private SampleFamily createMeterSamples(EntityDescription entityDescription) {
Map<MeterEntity, Sample[]> meterSamples = new HashMap<>();
Arrays.stream(samples)
.collect(groupingBy(it -> InternalOps.getLabels(entityDescription.getLabelKeys(), it), mapping(identity(), toList())))
.forEach((labels, samples) -> {
MeterEntity meterEntity = InternalOps.buildMeterEntity(samples, entityDescription);
meterSamples.put(meterEntity, InternalOps.left(samples, entityDescription.getLabelKeys()));
});
private SampleFamily left(List<String> labelKeys) {
return SampleFamily.build(
this.context,
Arrays.stream(samples)
.map(s -> {
ImmutableMap<String, String> ll = ImmutableMap.<String, String>builder()
.putAll(Maps.filterKeys(s.labels, key -> !labelKeys.contains(key)))
.build();
return s.toBuilder().labels(ll).build();
})
.toArray(Sample[]::new)
);
this.context.setMeterSamples(meterSamples);
//This samples is original, The grouped samples is in context which mapping with MeterEntity
return SampleFamily.build(this.context, samples);
}
private SampleFamily match(String[] labels, Function2<String, String, Boolean> op) {
......@@ -435,24 +423,6 @@ public class SampleFamily {
return ss.length > 0 ? SampleFamily.build(this.context, ss) : EMPTY;
}
private Sample newSample(ImmutableMap<String, String> labels, long timestamp, double newValue) {
return Sample.builder()
.value(newValue)
.labels(labels)
.timestamp(timestamp)
.build();
}
private boolean stringComp(String a, String b) {
if (Strings.isNullOrEmpty(a) && Strings.isNullOrEmpty(b)) {
return true;
}
if (Strings.isNullOrEmpty(a)) {
return false;
}
return a.equals(b);
}
public SampleFamily downsampling(final DownsamplingType type) {
ExpressionParsingContext.get().ifPresent(it -> it.downsampling = type);
return this;
......@@ -477,10 +447,74 @@ public class SampleFamily {
.build();
}
MeterEntity meterEntity;
private Map<MeterEntity, Sample[]> meterSamples = new HashMap<>();
private HistogramType histogramType;
private TimeUnit defaultHistogramBucketUnit;
}
private static class InternalOps {
private static Sample[] left(List<Sample> samples, List<String> labelKeys) {
return samples.stream().map(s -> {
ImmutableMap<String, String> ll = ImmutableMap.<String, String>builder()
.putAll(Maps.filterKeys(s.labels, key -> !labelKeys.contains(key)))
.build();
return s.toBuilder().labels(ll).build();
}).toArray(Sample[]::new);
}
private static String dim(List<Sample> samples, List<String> labelKeys) {
String name = labelKeys.stream()
.map(k -> samples.get(0).labels.getOrDefault(k, ""))
.collect(Collectors.joining("."));
return CharMatcher.is('.').trimFrom(name);
}
private static MeterEntity buildMeterEntity(List<Sample> samples,
EntityDescription entityDescription) {
switch (entityDescription.getScopeType()) {
case SERVICE:
return MeterEntity.newService(InternalOps.dim(samples, entityDescription.getServiceKeys()));
case SERVICE_INSTANCE:
return MeterEntity.newServiceInstance(
InternalOps.dim(samples, entityDescription.getServiceKeys()),
InternalOps.dim(samples, entityDescription.getInstanceKeys())
);
case ENDPOINT:
return MeterEntity.newEndpoint(
InternalOps.dim(samples, entityDescription.getServiceKeys()),
InternalOps.dim(samples, entityDescription.getEndpointKeys())
);
default: throw new UnexpectedException("Unexpected scope type of entityDescription " + entityDescription.toString());
}
}
private static Sample newSample(ImmutableMap<String, String> labels, long timestamp, double newValue) {
return Sample.builder()
.value(newValue)
.labels(labels)
.timestamp(timestamp)
.build();
}
private static boolean stringComp(String a, String b) {
if (Strings.isNullOrEmpty(a) && Strings.isNullOrEmpty(b)) {
return true;
}
if (Strings.isNullOrEmpty(a)) {
return false;
}
return a.equals(b);
}
private static ImmutableMap<String, String> getLabels(final List<String> labelKeys, final Sample sample) {
return labelKeys.stream()
.collect(toImmutableMap(
Function.identity(),
labelKey -> sample.labels.getOrDefault(labelKey, "")
));
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl;
import com.google.common.collect.ImmutableMap;
import java.util.HashMap;
import java.util.Map;
import org.apache.skywalking.oap.meter.analyzer.Analyzer;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.StreamDefinition;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterSystem;
import org.apache.skywalking.oap.server.core.analysis.meter.function.AcceptableValue;
import org.apache.skywalking.oap.server.core.analysis.meter.function.avg.AvgFunction;
import org.apache.skywalking.oap.server.core.analysis.meter.function.avg.AvgHistogramPercentileFunction;
import org.apache.skywalking.oap.server.core.analysis.meter.function.avg.AvgLabeledFunction;
import org.apache.skywalking.oap.server.core.analysis.metrics.IntList;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.storage.StorageException;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.powermock.reflect.Whitebox;
import static com.google.common.collect.ImmutableMap.of;
import static org.hamcrest.core.Is.is;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.spy;
@RunWith(MockitoJUnitRunner.Silent.class)
public class AnalyzerTest {
@Mock
private ModuleManager moduleManager;
private MeterSystem meterSystem;
private Analyzer analyzer;
@Before
public void setup() throws StorageException {
meterSystem = spy(new MeterSystem(moduleManager));
Whitebox.setInternalState(MetricsStreamProcessor.class, "PROCESSOR",
Mockito.spy(MetricsStreamProcessor.getInstance())
);
doNothing().when(MetricsStreamProcessor.getInstance()).create(any(), (StreamDefinition) any(), any());
}
@Test
public void testSingle() {
analyzer = Analyzer.build(
"sum_service_instance",
"http_success_request.sum(['region', 'idc']).instance(['idc'] , ['region'])",
meterSystem
);
ImmutableMap<String, SampleFamily> input = ImmutableMap.of(
"http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()
);
Map<String, AvgFunction> actValues = new HashMap<>();
doAnswer(invocationOnMock -> {
AvgFunction actValue = (AvgFunction) invocationOnMock.getArgument(
0, AcceptableValue.class);
actValues.put(actValue.getEntityId(), actValue);
return null;
}).when(meterSystem).doStreamingCalculation(any());
analyzer.analyse(input);
AvgFunction t1 = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t1", true), ""));
AvgFunction t1Us = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t1", true), "us"));
AvgFunction t3Cn = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t3", true), "cn"));
Assert.assertEquals(50L, t1.getSummation(), 0.0);
Assert.assertEquals(1L, t1.getCount(), 0.0);
Assert.assertEquals(150L, t1Us.getSummation(), 0.0);
Assert.assertEquals(1L, t1Us.getCount(), 0.0);
Assert.assertEquals(54L, t3Cn.getSummation(), 0.0);
Assert.assertEquals(1L, t3Cn.getCount(), 0.0);
}
@Test
public void testLabeled() {
analyzer = Analyzer.build(
"sum_service_instance_labels",
"http_success_request.sum(['region', 'idc' , 'instance']).instance(['idc'] , ['region'])",
meterSystem
);
ImmutableMap<String, SampleFamily> input = ImmutableMap.of(
"http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()
);
Map<String, AvgLabeledFunction> actValues = new HashMap<>();
doAnswer(invocationOnMock -> {
AvgLabeledFunction actValue = (AvgLabeledFunction) invocationOnMock.getArgument(
0, AcceptableValue.class);
actValues.put(actValue.getEntityId(), actValue);
return null;
}).when(meterSystem).doStreamingCalculation(any());
analyzer.analyse(input);
AvgLabeledFunction t1 = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t1", true), ""));
AvgLabeledFunction t1Us = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t1", true), "us"));
AvgLabeledFunction t3Cn = actValues.get(IDManager.ServiceInstanceID.buildId(
IDManager.ServiceID.buildId("t3", true), "cn"));
Assert.assertEquals(50L, t1.getSummation().get(""), 0.0);
Assert.assertEquals(1L, t1.getCount().get(""), 0.0);
Assert.assertEquals(50L, t1Us.getSummation().get(""), 0.0);
Assert.assertEquals(100L, t1Us.getSummation().get("10.0.0.1"), 0.0);
Assert.assertEquals(1L, t1Us.getCount().get(""), 0.0);
Assert.assertEquals(1L, t1Us.getCount().get("10.0.0.1"), 0.0);
Assert.assertEquals(51L, t3Cn.getSummation().get(""), 0.0);
Assert.assertEquals(3L, t3Cn.getSummation().get("10.0.0.1"), 0.0);
Assert.assertEquals(1L, t3Cn.getCount().get(""), 0.0);
Assert.assertEquals(1L, t3Cn.getCount().get("10.0.0.1"), 0.0);
}
@Test
public void testHistogramPercentile() {
analyzer = Analyzer.build(
"instance_cpu_percentage",
"instance_cpu_percentage.sum(['le' , 'service' , 'instance']).histogram().histogram_percentile([75,99]).service(['service'])",
meterSystem
);
ImmutableMap<String, SampleFamily> input = ImmutableMap.of(
"instance_cpu_percentage", SampleFamilyBuilder.newBuilder(
Sample.builder()
.labels(of("le", "0.025", "service", "service1", "instance", "instance1"))
.value(100)
.build(),
Sample.builder()
.labels(of("le", "1.25", "service", "service1", "instance", "instance1"))
.value(300)
.build(),
Sample.builder()
.labels(of("le", "0.75", "service", "service1", "instance", "instance2"))
.value(122)
.build(),
Sample.builder()
.labels(of("le", String.valueOf(Integer.MAX_VALUE), "service", "service1", "instance",
"instance2"
))
.value(410)
.build()
).build()
);
Map<String, AvgHistogramPercentileFunction> actValues = new HashMap<>();
doAnswer(invocationOnMock -> {
AvgHistogramPercentileFunction actValue = (AvgHistogramPercentileFunction) invocationOnMock.getArgument(
0, AcceptableValue.class);
if (actValue.getSummation().hasKey("instance1:0")) {
actValues.put("instance1", actValue);
} else {
actValues.put("instance2", actValue);
}
return null;
}).when(meterSystem).doStreamingCalculation(any());
analyzer.analyse(input);
Assert.assertEquals(2, actValues.size());
String expServiceId = IDManager.ServiceID.buildId("service1", true);
IntList expRanks = new IntList(2) {
{
add(75);
add(99);
}
};
actValues.forEach((key, actValue) -> {
Assert.assertEquals(expServiceId, actValue.getEntityId());
Assert.assertThat(expRanks, is(actValue.getRanks()));
});
AvgHistogramPercentileFunction instance1 = actValues.get("instance1");
AvgHistogramPercentileFunction instance2 = actValues.get("instance2");
Assert.assertEquals(100L, instance1.getSummation().get("instance1:0"), 0.0);
Assert.assertEquals(178L, instance1.getSummation().get("instance1:750"), 0.0);
Assert.assertEquals(1L, instance1.getCount().get("instance1:0"), 0.0);
Assert.assertEquals(1L, instance1.getCount().get("instance1:750"), 0.0);
Assert.assertEquals(22L, instance2.getSummation().get("instance2:25"), 0.0);
Assert.assertEquals(110L, instance2.getSummation().get("instance2:1250"), 0.0);
Assert.assertEquals(1L, instance2.getCount().get("instance2:25"), 0.0);
Assert.assertEquals(1L, instance2.getCount().get("instance2:1250"), 0.0);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.meter.analyzer.dsl;
import com.google.common.collect.ImmutableMap;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static com.google.common.collect.ImmutableMap.of;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
@Slf4j
@RunWith(Parameterized.class)
public class ScopeTest {
@Parameterized.Parameter
public String name;
@Parameterized.Parameter(1)
public ImmutableMap<String, SampleFamily> input;
@Parameterized.Parameter(2)
public String expression;
@Parameterized.Parameter(3)
public boolean isThrow;
@Parameterized.Parameter(4)
public Map<MeterEntity, Sample[]> want;
@Parameterized.Parameters(name = "{index}: {0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{
"sum_service",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['idc']).service(['idc'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newService("t1"),
new Sample[] {Sample.builder().labels(of()).value(200).build()}
);
put(
MeterEntity.newService("t3"),
new Sample[] {Sample.builder().labels(of()).value(54).build()}
);
}
}
},
{
"sum_service_labels",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['region', 'idc']).service(['idc'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newService("t1"),
new Sample[] {
Sample.builder().labels(of("region", "")).value(50).build(),
Sample.builder().labels(of("region", "us")).value(150).build()
}
);
put(
MeterEntity.newService("t3"),
new Sample[] {Sample.builder().labels(of("region", "cn")).value(54).build()}
);
}
}
},
{
"sum_service_m",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['idc', 'region']).service(['idc' , 'region'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newService("t1.us"),
new Sample[] {Sample.builder().labels(of()).value(150).build()}
);
put(
MeterEntity.newService("t3.cn"),
new Sample[] {Sample.builder().labels(of()).value(54).build()}
);
put(
MeterEntity.newService("t1"),
new Sample[] {Sample.builder().labels(of()).value(50).build()}
);
}
}
},
{
"sum_service_endpiont",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['region', 'idc']).endpoint(['idc'] , ['region'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newEndpoint("t1", "us"),
new Sample[] {Sample.builder().labels(of()).value(150).build()}
);
put(
MeterEntity.newEndpoint("t3", "cn"),
new Sample[] {Sample.builder().labels(of()).value(54).build()}
);
put(
MeterEntity.newEndpoint("t1", ""),
new Sample[] {Sample.builder().labels(of()).value(50).build()}
);
}
}
},
{
"sum_service_endpiont_labels",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['region', 'idc' , 'instance']).endpoint(['idc'] , ['region'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newEndpoint("t1", "us"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(50).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(100).build()
}
);
put(
MeterEntity.newEndpoint("t3", "cn"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(51).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(3).build()
}
);
put(
MeterEntity.newEndpoint("t1", ""),
new Sample[] {Sample.builder().labels(of("instance", "")).value(50).build()}
);
}
}
},
{
"sum_service_endpiont_labels_m",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "product")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "catalog")).value(50).build(),
Sample.builder()
.labels(of("idc", "t1", "region", "us", "svc", "catalog", "instance", "10.0.0.1"))
.value(100)
.build(),
Sample.builder()
.labels(of("idc", "t3", "region", "cn", "svc", "product", "instance", "10.0.0.1"))
.value(3)
.build()
).build()),
"http_success_request.sum(['region', 'idc' , 'svc' , 'instance']).endpoint(['idc'] , ['region','svc'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newEndpoint("t1", "us.catalog"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(50).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(100).build()
}
);
put(
MeterEntity.newEndpoint("t3", "cn.product"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(51).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(3).build()
}
);
put(
MeterEntity.newEndpoint("t1", ""),
new Sample[] {Sample.builder().labels(of("instance", "")).value(50).build()}
);
}
}
},
{
"sum_service_instance",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['region', 'idc']).instance(['idc'] , ['region'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newServiceInstance("t1", "us"),
new Sample[] {Sample.builder().labels(of()).value(150).build()}
);
put(
MeterEntity.newServiceInstance("t3", "cn"),
new Sample[] {Sample.builder().labels(of()).value(54).build()}
);
put(
MeterEntity.newServiceInstance("t1", ""),
new Sample[] {Sample.builder().labels(of()).value(50).build()}
);
}
}
},
{
"sum_service_instance_labels",
of("http_success_request", SampleFamilyBuilder.newBuilder(
Sample.builder().labels(of("idc", "t1")).value(50).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "svc", "catalog")).value(51).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "svc", "product")).value(50).build(),
Sample.builder().labels(of("idc", "t1", "region", "us", "instance", "10.0.0.1")).value(100).build(),
Sample.builder().labels(of("idc", "t3", "region", "cn", "instance", "10.0.0.1")).value(3).build()
).build()),
"http_success_request.sum(['region', 'idc' , 'instance']).instance(['idc'] , ['region'])",
false,
new HashMap<MeterEntity, Sample[]>() {
{
put(
MeterEntity.newServiceInstance("t1", "us"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(50).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(100).build()
}
);
put(
MeterEntity.newServiceInstance("t3", "cn"),
new Sample[] {
Sample.builder().labels(of("instance", "")).value(51).build(),
Sample.builder().labels(of("instance", "10.0.0.1")).value(3).build()
}
);
put(
MeterEntity.newServiceInstance("t1", ""),
new Sample[] {Sample.builder().labels(of("instance", "")).value(50).build()}
);
}
}
},
});
}
@Test
public void test() {
Expression e = DSL.parse(expression);
Result r = null;
try {
r = e.run(input);
} catch (Throwable t) {
if (isThrow) {
return;
}
log.error("Test failed", t);
fail("Should not throw anything");
}
if (isThrow) {
fail("Should throw something");
}
assertThat(r.isSuccess(), is(true));
Map<MeterEntity, Sample[]> meterSamplesR = r.getData().context.getMeterSamples();
meterSamplesR.forEach((meterEntity, samples) -> {
assertThat(samples, is(want.get(meterEntity)));
});
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册