未验证 提交 439f843d 编写于 作者: J Jiajing LU 提交者: GitHub

Banyandb OAP integration (#9064)

上级 e02f7efd
......@@ -17,7 +17,8 @@
~
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>apm</artifactId>
<groupId>org.apache.skywalking</groupId>
......@@ -75,6 +76,7 @@
<awaitility.version>3.0.0</awaitility.version>
<httpcore.version>4.4.13</httpcore.version>
<commons-compress.version>1.21</commons-compress.version>
<banyandb-java-client.version>0.1.0-SNAPSHOT</banyandb-java-client.version>
</properties>
<dependencyManagement>
......@@ -383,6 +385,29 @@
<version>${jackson-module-afterburner.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>banyandb-java-client</artifactId>
<version>${banyandb-java-client.version}</version>
<exclusions>
<exclusion>
<groupId>io.grpc</groupId>
<artifactId>grpc-netty</artifactId>
</exclusion>
<exclusion>
<groupId>io.grpc</groupId>
<artifactId>grpc-protobuf</artifactId>
</exclusion>
<exclusion>
<groupId>io.grpc</groupId>
<artifactId>grpc-stub</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-tcnative-boringssl-static</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
......
......@@ -45,9 +45,9 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.NE
})
public class NetworkAddressAlias extends Metrics {
public static final String INDEX_NAME = "network_address_alias";
private static final String ADDRESS = "address";
private static final String REPRESENT_SERVICE_ID = "represent_service_id";
private static final String REPRESENT_SERVICE_INSTANCE_ID = "represent_service_instance_id";
public static final String ADDRESS = "address";
public static final String REPRESENT_SERVICE_ID = "represent_service_id";
public static final String REPRESENT_SERVICE_INSTANCE_ID = "represent_service_instance_id";
public static final String LAST_UPDATE_TIME_BUCKET = "last_update_time_bucket";
@Setter
......
......@@ -33,7 +33,7 @@ public class EBPFProcessProfilingDataDispatcher implements SourceDispatcher<EBPF
record.setStacksBinary(source.getStacksBinary());
record.setUploadTime(source.getUploadTime());
record.setStackDumpCount(source.getDumpCount());
record.setTimeBucket(TimeBucket.getMinuteTimeBucket(source.getUploadTime()));
record.setTimeBucket(TimeBucket.getRecordTimeBucket(source.getUploadTime()));
RecordStreamProcessor.getInstance().in(record);
}
}
\ No newline at end of file
......@@ -187,6 +187,11 @@
<artifactId>storage-tidb-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>storage-banyandb-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<!-- storage module -->
<!-- queryBuild module -->
......
......@@ -241,6 +241,14 @@ storage:
oapAnalyzer: ${SW_STORAGE_ES_OAP_ANALYZER:"{\"analyzer\":{\"oap_analyzer\":{\"type\":\"stop\"}}}"} # the oap analyzer.
oapLogAnalyzer: ${SW_STORAGE_ES_OAP_LOG_ANALYZER:"{\"analyzer\":{\"oap_log_analyzer\":{\"type\":\"standard\"}}}"} # the oap log analyzer. It could be customized by the ES analyzer configuration to support more language log formats, such as Chinese log, Japanese log and etc.
advanced: ${SW_STORAGE_ES_ADVANCED:""}
banyandb:
host: ${SW_STORAGE_BANYANDB_HOST:127.0.0.1}
port: ${SW_STORAGE_BANYANDB_PORT:17912}
group: ${SW_STORAGE_BANYANDB_GROUP:default}
maxBulkSize: ${SW_STORAGE_BANYANDB_MAX_BULK_SIZE:5000}
flushInterval: ${SW_STORAGE_BANYANDB_FLUSH_INTERVAL:15}
concurrentWriteThreads: ${SW_STORAGE_BANYANDB_CONCURRENT_WRITE_THREADS:15}
fetchTaskLogMaxSize: ${SW_STORAGE_BANYANDB_FETCH_TASK_LOG_MAX_SIZE:1000} # the max number of fetch task log in a request
agent-analyzer:
selector: ${SW_AGENT_ANALYZER:default}
......
......@@ -32,5 +32,6 @@
<module>storage-elasticsearch-plugin</module>
<module>storage-zipkin-elasticsearch-plugin</module>
<module>storage-tidb-plugin</module>
<module>storage-banyandb-plugin</module>
</modules>
</project>
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
~
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>server-storage-plugin</artifactId>
<groupId>org.apache.skywalking</groupId>
<version>9.1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>storage-banyandb-plugin</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>server-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>library-client</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>library-datacarrier-queue</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>banyandb-java-client</artifactId>
</dependency>
</dependencies>
</project>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.Duration;
import org.apache.skywalking.oap.server.core.query.input.TopNCondition;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.SelectedRecord;
import org.apache.skywalking.oap.server.core.storage.query.IAggregationQueryDAO;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.util.ByteUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class BanyanDBAggregationQueryDAO extends AbstractBanyanDBDAO implements IAggregationQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(Metrics.ENTITY_ID, Metrics.TIME_BUCKET);
public BanyanDBAggregationQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<SelectedRecord> sortMetrics(TopNCondition condition, String valueColumnName, Duration duration, List<KeyValue> additionalConditions) throws IOException {
final String modelName = condition.getName();
final TimestampRange timestampRange = new TimestampRange(duration.getStartTimestamp(), duration.getEndTimestamp());
MeasureQueryResponse resp = query(modelName, TAGS, Collections.singleton(valueColumnName),
timestampRange, new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.meanBy(valueColumnName, ImmutableSet.of(Metrics.ENTITY_ID));
query.and(lte(Metrics.TIME_BUCKET, duration.getEndTimeBucket()));
query.and(gte(Metrics.TIME_BUCKET, duration.getStartTimeBucket()));
if (condition.getOrder() == Order.DES) {
query.topN(condition.getTopN(), valueColumnName);
} else {
query.bottomN(condition.getTopN(), valueColumnName);
}
if (CollectionUtils.isNotEmpty(additionalConditions)) {
additionalConditions.forEach(additionalCondition -> query
.and(eq(
additionalCondition.getKey(),
additionalCondition.getValue()
)));
}
}
});
if (resp.size() == 0) {
return Collections.emptyList();
}
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
if (schema == null) {
throw new IOException("schema is not registered");
}
MetadataRegistry.ColumnSpec spec = schema.getSpec(valueColumnName);
if (spec == null) {
throw new IOException("field spec is not registered");
}
final List<SelectedRecord> topNList = new ArrayList<>();
for (DataPoint dataPoint : resp.getDataPoints()) {
SelectedRecord record = new SelectedRecord();
record.setId(dataPoint.getTagValue(Metrics.ENTITY_ID));
record.setValue(extractFieldValueAsString(spec, valueColumnName, dataPoint));
topNList.add(record);
}
return topNList;
}
private String extractFieldValueAsString(MetadataRegistry.ColumnSpec spec, String fieldName, DataPoint dataPoint) throws IOException {
if (double.class.equals(spec.getColumnClass())) {
return String.valueOf(ByteUtil.bytes2Double(dataPoint.getFieldValue(fieldName)).longValue());
} else if (String.class.equals(spec.getColumnClass())) {
return dataPoint.getFieldValue(fieldName);
} else {
return String.valueOf(((Number) dataPoint.getFieldValue(fieldName)).longValue());
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import org.apache.skywalking.banyandb.v1.client.MeasureBulkWriteProcessor;
import org.apache.skywalking.banyandb.v1.client.StreamBulkWriteProcessor;
import org.apache.skywalking.oap.server.core.storage.AbstractDAO;
import org.apache.skywalking.oap.server.core.storage.IBatchDAO;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBMeasureInsertRequest;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBMeasureUpdateRequest;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBStreamInsertRequest;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
public class BanyanDBBatchDAO extends AbstractDAO<BanyanDBStorageClient> implements IBatchDAO {
private StreamBulkWriteProcessor streamBulkWriteProcessor;
private MeasureBulkWriteProcessor measureBulkWriteProcessor;
private final int maxBulkSize;
private final int flushInterval;
private final int concurrency;
private final AtomicBoolean initialized = new AtomicBoolean(false);
public BanyanDBBatchDAO(BanyanDBStorageClient client, int maxBulkSize, int flushInterval, int concurrency) {
super(client);
this.maxBulkSize = maxBulkSize;
this.flushInterval = flushInterval;
this.concurrency = concurrency;
}
@Override
public void insert(InsertRequest insertRequest) {
if (initialized.compareAndSet(false, true)) {
this.streamBulkWriteProcessor = getClient().createStreamBulkProcessor(maxBulkSize, flushInterval, concurrency);
this.measureBulkWriteProcessor = getClient().createMeasureBulkProcessor(maxBulkSize, flushInterval, concurrency);
}
if (insertRequest instanceof BanyanDBStreamInsertRequest) {
this.streamBulkWriteProcessor.add(((BanyanDBStreamInsertRequest) insertRequest).getStreamWrite());
} else if (insertRequest instanceof BanyanDBMeasureInsertRequest) {
this.measureBulkWriteProcessor.add(((BanyanDBMeasureInsertRequest) insertRequest).getMeasureWrite());
}
}
@Override
public CompletableFuture<Void> flush(List<PrepareRequest> prepareRequests) {
if (initialized.compareAndSet(false, true)) {
this.streamBulkWriteProcessor = getClient().createStreamBulkProcessor(maxBulkSize, flushInterval, concurrency);
this.measureBulkWriteProcessor = getClient().createMeasureBulkProcessor(maxBulkSize, flushInterval, concurrency);
}
if (CollectionUtils.isNotEmpty(prepareRequests)) {
for (final PrepareRequest r : prepareRequests) {
if (r instanceof BanyanDBStreamInsertRequest) {
// TODO: return CompletableFuture<Void>
this.streamBulkWriteProcessor.add(((BanyanDBStreamInsertRequest) r).getStreamWrite());
} else if (r instanceof BanyanDBMeasureInsertRequest) {
this.measureBulkWriteProcessor.add(((BanyanDBMeasureInsertRequest) r).getMeasureWrite());
} else if (r instanceof BanyanDBMeasureUpdateRequest) {
this.measureBulkWriteProcessor.add(((BanyanDBMeasureUpdateRequest) r).getMeasureWrite());
}
}
}
return CompletableFuture.completedFuture(null);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import com.google.gson.JsonObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.model.v1.BanyandbModel;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureWrite;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamWrite;
import org.apache.skywalking.banyandb.v1.client.TagAndValue;
import org.apache.skywalking.banyandb.v1.client.grpc.exception.BanyanDBException;
import org.apache.skywalking.banyandb.v1.client.metadata.Serializable;
import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.util.ByteUtil;
import java.util.List;
public class BanyanDBConverter {
public static class StorageToStream implements Convert2Entity {
private final MetadataRegistry.Schema schema;
private final RowEntity rowEntity;
public StorageToStream(String modelName, RowEntity rowEntity) {
this.schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
this.rowEntity = rowEntity;
}
@Override
public Object get(String fieldName) {
MetadataRegistry.ColumnSpec spec = schema.getSpec(fieldName);
if (double.class.equals(spec.getColumnClass())) {
return ByteUtil.bytes2Double(rowEntity.getTagValue(fieldName));
} else {
return rowEntity.getTagValue(fieldName);
}
}
@Override
public byte[] getBytes(String fieldName) {
return rowEntity.getTagValue(fieldName);
}
}
@Slf4j
@RequiredArgsConstructor
public static class StreamToStorage implements Convert2Storage<StreamWrite> {
private final MetadataRegistry.Schema schema;
private final StreamWrite streamWrite;
@Override
public void accept(String fieldName, Object fieldValue) {
MetadataRegistry.ColumnSpec columnSpec = this.schema.getSpec(fieldName);
if (columnSpec == null) {
throw new IllegalArgumentException("fail to find field[" + fieldName + "]");
}
try {
this.streamWrite.tag(fieldName, buildTag(fieldValue, columnSpec.getColumnClass()));
} catch (BanyanDBException ex) {
log.error("fail to add tag", ex);
}
}
@Override
public void accept(String fieldName, byte[] fieldValue) {
try {
this.streamWrite.tag(fieldName, TagAndValue.binaryTagValue(fieldValue));
} catch (BanyanDBException ex) {
log.error("fail to add tag", ex);
}
}
@Override
public void accept(String fieldName, List<String> fieldValue) {
try {
this.streamWrite.tag(fieldName, TagAndValue.stringArrayTagValue(fieldValue));
} catch (BanyanDBException ex) {
log.error("fail to accept string array tag", ex);
}
}
@Override
public Object get(String fieldName) {
throw new IllegalStateException("should not reach here");
}
@Override
public StreamWrite obtain() {
return this.streamWrite;
}
}
@Slf4j
@RequiredArgsConstructor
public static class MeasureToStorage implements Convert2Storage<MeasureWrite> {
private final MetadataRegistry.Schema schema;
private final MeasureWrite measureWrite;
@Override
public void accept(String fieldName, Object fieldValue) {
MetadataRegistry.ColumnSpec columnSpec = this.schema.getSpec(fieldName);
if (columnSpec == null) {
throw new IllegalArgumentException("fail to find field[" + fieldName + "]");
}
try {
if (columnSpec.getColumnType() == MetadataRegistry.ColumnType.TAG) {
this.measureWrite.tag(fieldName, buildTag(fieldValue, columnSpec.getColumnClass()));
} else {
this.measureWrite.field(fieldName, buildField(fieldValue, columnSpec.getColumnClass()));
}
} catch (BanyanDBException ex) {
log.error("fail to add tag", ex);
}
}
public void acceptID(String id) {
try {
this.measureWrite.setID(id);
} catch (BanyanDBException ex) {
log.error("fail to add ID tag", ex);
}
}
@Override
public void accept(String fieldName, byte[] fieldValue) {
MetadataRegistry.ColumnSpec columnSpec = this.schema.getSpec(fieldName);
try {
if (columnSpec.getColumnType() == MetadataRegistry.ColumnType.TAG) {
this.measureWrite.tag(fieldName, TagAndValue.binaryTagValue(fieldValue));
} else {
this.measureWrite.field(fieldName, TagAndValue.binaryFieldValue(fieldValue));
}
} catch (BanyanDBException ex) {
log.error("fail to add binary tag/field", ex);
}
}
@Override
public void accept(String fieldName, List<String> fieldValue) {
try {
this.measureWrite.tag(fieldName, TagAndValue.stringArrayTagValue(fieldValue));
} catch (BanyanDBException ex) {
log.error("fail to accept string array tag", ex);
}
}
@Override
public Object get(String fieldName) {
throw new IllegalStateException("should not reach here");
}
@Override
public MeasureWrite obtain() {
return this.measureWrite;
}
}
private static Serializable<BanyandbModel.TagValue> buildTag(Object value, final Class<?> clazz) {
if (int.class.equals(clazz) || Integer.class.equals(clazz)) {
return TagAndValue.longTagValue(((Number) value).longValue());
} else if (Long.class.equals(clazz) || long.class.equals(clazz)) {
return TagAndValue.longTagValue((Long) value);
} else if (String.class.equals(clazz)) {
return TagAndValue.stringTagValue((String) value);
} else if (Double.class.equals(clazz) || double.class.equals(clazz)) {
return TagAndValue.binaryTagValue(ByteUtil.double2Bytes((double) value));
} else if (StorageDataComplexObject.class.isAssignableFrom(clazz)) {
return TagAndValue.stringTagValue(((StorageDataComplexObject<?>) value).toStorageData());
} else if (clazz.isEnum()) {
return TagAndValue.longTagValue((int) value);
} else if (JsonObject.class.equals(clazz)) {
return TagAndValue.stringTagValue((String) value);
} else if (byte[].class.equals(clazz)) {
return TagAndValue.stringTagValue((String) value);
}
throw new IllegalStateException(clazz.getSimpleName() + " is not supported");
}
private static Serializable<BanyandbModel.FieldValue> buildField(Object value, final Class<?> clazz) {
if (Integer.class.equals(clazz) || int.class.equals(clazz)) {
return TagAndValue.longFieldValue(((Number) value).longValue());
} else if (Long.class.equals(clazz) || long.class.equals(clazz)) {
return TagAndValue.longFieldValue((Long) value);
} else if (String.class.equals(clazz)) {
return TagAndValue.stringFieldValue((String) value);
} else if (Double.class.equals(clazz) || double.class.equals(clazz)) {
return TagAndValue.binaryFieldValue(ByteUtil.double2Bytes((double) value));
} else if (StorageDataComplexObject.class.isAssignableFrom(clazz)) {
return TagAndValue.stringFieldValue(((StorageDataComplexObject<?>) value).toStorageData());
}
throw new IllegalStateException(clazz.getSimpleName() + " is not supported");
}
public static class StorageToMeasure implements Convert2Entity {
private final MetadataRegistry.Schema schema;
private final DataPoint dataPoint;
public StorageToMeasure(String modelName, DataPoint dataPoint) {
this.schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
this.dataPoint = dataPoint;
}
@Override
public Object get(String fieldName) {
MetadataRegistry.ColumnSpec spec = schema.getSpec(fieldName);
switch (spec.getColumnType()) {
case TAG:
if (double.class.equals(spec.getColumnClass())) {
return ByteUtil.bytes2Double(dataPoint.getTagValue(fieldName));
} else {
return dataPoint.getTagValue(fieldName);
}
case FIELD:
default:
if (double.class.equals(spec.getColumnClass())) {
return ByteUtil.bytes2Double(dataPoint.getFieldValue(fieldName));
} else {
return dataPoint.getFieldValue(fieldName);
}
}
}
@Override
public byte[] getBytes(String fieldName) {
// TODO: double may be a field?
return dataPoint.getFieldValue(fieldName);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.BanyanDBClient;
import org.apache.skywalking.banyandb.v1.client.grpc.exception.BanyanDBException;
import org.apache.skywalking.banyandb.v1.client.metadata.Group;
import org.apache.skywalking.banyandb.v1.client.metadata.Measure;
import org.apache.skywalking.banyandb.v1.client.metadata.Stream;
import org.apache.skywalking.oap.server.core.storage.StorageException;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.model.ModelInstaller;
import org.apache.skywalking.oap.server.library.client.Client;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import java.io.IOException;
@Slf4j
public class BanyanDBIndexInstaller extends ModelInstaller {
public BanyanDBIndexInstaller(Client client, ModuleManager moduleManager) {
super(client, moduleManager);
}
@Override
protected boolean isExists(Model model) throws StorageException {
final MetadataRegistry.SchemaMetadata metadata = MetadataRegistry.INSTANCE.parseMetadata(model);
try {
final BanyanDBClient c = ((BanyanDBStorageClient) this.client).client;
// first check group
Group g = metadata.getOrCreateGroup(c);
if (g == null) {
throw new StorageException("fail to create group " + metadata.getGroup());
}
log.info("group {} created", g.name());
// then check entity schema
if (metadata.findRemoteSchema(c).isPresent()) {
MetadataRegistry.INSTANCE.registerModel(model);
return true;
}
return false;
} catch (BanyanDBException ex) {
throw new StorageException("fail to check existence", ex);
}
}
@Override
protected void createTable(Model model) throws StorageException {
try {
if (model.isTimeSeries() && model.isRecord()) { // stream
Stream stream = (Stream) MetadataRegistry.INSTANCE.registerModel(model);
if (stream != null) {
log.info("install stream schema {}", model.getName());
((BanyanDBStorageClient) client).define(stream);
}
} else if (model.isTimeSeries() && !model.isRecord()) { // measure
Measure measure = (Measure) MetadataRegistry.INSTANCE.registerModel(model);
if (measure != null) {
log.info("install measure schema {}", model.getName());
((BanyanDBStorageClient) client).define(measure);
}
} else if (!model.isTimeSeries()) { // UITemplate
log.info("skip property index {}", model.getName());
}
} catch (IOException ex) {
throw new StorageException("fail to install schema", ex);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import java.io.IOException;
@RequiredArgsConstructor
@Slf4j
public class BanyanDBManagementDAO implements IManagementDAO {
private final BanyanDBStorageClient client;
private final StorageBuilder<ManagementData> storageBuilder;
@Override
public void insert(Model model, ManagementData storageData) throws IOException {
log.info("insert Management Model {}", model);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.StreamWrite;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.storage.AbstractDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import java.io.IOException;
@Slf4j
public class BanyanDBNoneStreamDAO extends AbstractDAO<BanyanDBStorageClient> implements INoneStreamDAO {
private final StorageBuilder<NoneStream> storageBuilder;
public BanyanDBNoneStreamDAO(BanyanDBStorageClient client, StorageBuilder<NoneStream> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
@Override
public void insert(Model model, NoneStream noneStream) throws IOException {
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(model.getName());
if (schema == null) {
throw new IOException(model.getName() + " is not registered");
}
StreamWrite streamWrite = new StreamWrite(schema.getMetadata().getGroup(), // group name
model.getName(), // index-name
noneStream.id(), // identity
TimeBucket.getTimestamp(noneStream.getTimeBucket(), model.getDownsampling())); // timestamp
Convert2Storage<StreamWrite> convert2Storage = new BanyanDBConverter.StreamToStorage(schema, streamWrite);
storageBuilder.entity2Storage(noneStream, convert2Storage);
getClient().write(streamWrite);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import io.grpc.Status;
import org.apache.skywalking.banyandb.v1.client.BanyanDBClient;
import org.apache.skywalking.banyandb.v1.client.MeasureBulkWriteProcessor;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.StreamBulkWriteProcessor;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.StreamWrite;
import org.apache.skywalking.banyandb.v1.client.grpc.exception.BanyanDBException;
import org.apache.skywalking.banyandb.v1.client.metadata.Measure;
import org.apache.skywalking.banyandb.v1.client.metadata.Property;
import org.apache.skywalking.banyandb.v1.client.metadata.Stream;
import org.apache.skywalking.oap.server.library.client.Client;
import org.apache.skywalking.oap.server.library.client.healthcheck.DelegatedHealthChecker;
import org.apache.skywalking.oap.server.library.client.healthcheck.HealthCheckable;
import org.apache.skywalking.oap.server.library.util.HealthChecker;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
/**
* BanyanDBStorageClient is a simple wrapper for the underlying {@link BanyanDBClient},
* which implement {@link Client} and {@link HealthCheckable}.
*/
public class BanyanDBStorageClient implements Client, HealthCheckable {
final BanyanDBClient client;
private final DelegatedHealthChecker healthChecker = new DelegatedHealthChecker();
public BanyanDBStorageClient(String host, int port) {
this.client = new BanyanDBClient(host, port);
}
@Override
public void connect() throws Exception {
this.client.connect();
}
@Override
public void shutdown() throws IOException {
this.client.close();
}
public List<Property> listProperties(String group, String name) throws IOException {
try {
List<Property> properties = this.client.findProperties(group, name);
this.healthChecker.health();
return properties;
} catch (BanyanDBException ex) {
if (ex.getStatus().equals(Status.Code.NOT_FOUND)) {
this.healthChecker.health();
return Collections.emptyList();
}
healthChecker.unHealth(ex);
throw new IOException("fail to list properties", ex);
}
}
public Property queryProperty(String group, String name, String id) throws IOException {
try {
Property p = this.client.findProperty(group, name, id);
this.healthChecker.health();
return p;
} catch (BanyanDBException ex) {
if (ex.getStatus().equals(Status.Code.NOT_FOUND)) {
this.healthChecker.health();
return null;
}
healthChecker.unHealth(ex);
throw new IOException("fail to query property", ex);
}
}
public StreamQueryResponse query(StreamQuery q) throws IOException {
try {
StreamQueryResponse response = this.client.query(q);
this.healthChecker.health();
return response;
} catch (BanyanDBException ex) {
healthChecker.unHealth(ex);
throw new IOException("fail to query stream", ex);
}
}
public MeasureQueryResponse query(MeasureQuery q) throws IOException {
try {
MeasureQueryResponse response = this.client.query(q);
this.healthChecker.health();
return response;
} catch (BanyanDBException ex) {
healthChecker.unHealth(ex);
throw new IOException("fail to query measure", ex);
}
}
public void define(Property property) throws IOException {
try {
this.client.save(property);
this.healthChecker.health();
} catch (BanyanDBException ex) {
healthChecker.unHealth(ex);
throw new IOException("fail to define property", ex);
}
}
public void define(Stream stream) throws IOException {
try {
this.client.define(stream);
this.healthChecker.health();
} catch (BanyanDBException ex) {
healthChecker.unHealth(ex);
throw new IOException("fail to define stream", ex);
}
}
public void define(Measure measure) throws IOException {
try {
this.client.define(measure);
this.healthChecker.health();
} catch (BanyanDBException ex) {
healthChecker.unHealth(ex);
throw new IOException("fail to define stream", ex);
}
}
public void write(StreamWrite streamWrite) {
this.client.write(streamWrite);
}
public StreamBulkWriteProcessor createStreamBulkProcessor(int maxBulkSize, int flushInterval, int concurrency) {
return this.client.buildStreamWriteProcessor(maxBulkSize, flushInterval, concurrency);
}
public MeasureBulkWriteProcessor createMeasureBulkProcessor(int maxBulkSize, int flushInterval, int concurrency) {
return this.client.buildMeasureWriteProcessor(maxBulkSize, flushInterval, concurrency);
}
@Override
public void registerChecker(HealthChecker healthChecker) {
this.healthChecker.register(healthChecker);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
@Getter
@Setter
public class BanyanDBStorageConfig extends ModuleConfig {
private String host = "127.0.0.1";
private int port = 17912;
/**
* Group of the schema in BanyanDB
*/
private String group = "default";
/**
* The maximum size of write entities in a single batch write call.
*/
private int maxBulkSize = 5000;
/**
* Period of flush interval. In the timeunit of seconds.
*/
private int flushInterval = 15;
/**
* Concurrent consumer threads for batch writing.
*/
private int concurrentWriteThreads = 2;
/**
* Max size of {@link org.apache.skywalking.oap.server.core.query.type.ProfileTaskLog} to be fetched
* in a single request.
*/
private int fetchTaskLogMaxSize;
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.storage.IBatchDAO;
import org.apache.skywalking.oap.server.core.storage.IHistoryDeleteDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilderFactory;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.StorageModule;
import org.apache.skywalking.oap.server.core.storage.cache.INetworkAddressAliasDAO;
import org.apache.skywalking.oap.server.core.storage.management.UITemplateManagementDAO;
import org.apache.skywalking.oap.server.core.storage.model.ModelCreator;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingDataDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingScheduleDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingTaskDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IServiceLabelDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskLogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskQueryDAO;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileThreadSnapshotQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IAggregationQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IAlarmQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IBrowserLogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IMetadataQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IMetricsQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITagAutoCompleteQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITopNRecordsQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITopologyQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITraceQueryDAO;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBEventQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBMetadataQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBMetricsQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBNetworkAddressAliasDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBTagAutocompleteQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBProfileTaskQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBServiceLabelDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBTopologyQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBAlarmQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBBrowserLogQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBEBPFProfilingDataDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBEBPFProfilingScheduleQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBEBPFProfilingTaskDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBHistoryDeleteDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBLogQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBProfileTaskLogQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBProfileThreadSnapshotQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBStorageDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.BanyanDBTraceQueryDAO;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import org.apache.skywalking.oap.server.telemetry.api.HealthCheckMetrics;
import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
import org.apache.skywalking.oap.server.telemetry.api.MetricsTag;
public class BanyanDBStorageProvider extends ModuleProvider {
private BanyanDBStorageConfig config;
private BanyanDBStorageClient client;
public BanyanDBStorageProvider() {
this.config = new BanyanDBStorageConfig();
}
@Override
public String name() {
return "banyandb";
}
@Override
public Class<? extends ModuleDefine> module() {
return StorageModule.class;
}
@Override
public ModuleConfig createConfigBeanIfAbsent() {
return config;
}
@Override
public void prepare() throws ServiceNotProvidedException, ModuleStartException {
this.registerServiceImplementation(StorageBuilderFactory.class, new StorageBuilderFactory.Default());
this.client = new BanyanDBStorageClient(config.getHost(), config.getPort());
// Stream
this.registerServiceImplementation(IBatchDAO.class, new BanyanDBBatchDAO(client, config.getMaxBulkSize(), config.getFlushInterval(), config.getConcurrentWriteThreads()));
this.registerServiceImplementation(StorageDAO.class, new BanyanDBStorageDAO(client));
this.registerServiceImplementation(INetworkAddressAliasDAO.class, new BanyanDBNetworkAddressAliasDAO(client));
this.registerServiceImplementation(ITraceQueryDAO.class, new BanyanDBTraceQueryDAO(client));
this.registerServiceImplementation(IBrowserLogQueryDAO.class, new BanyanDBBrowserLogQueryDAO(client));
this.registerServiceImplementation(IMetadataQueryDAO.class, new BanyanDBMetadataQueryDAO(client));
this.registerServiceImplementation(IAlarmQueryDAO.class, new BanyanDBAlarmQueryDAO(client));
this.registerServiceImplementation(ILogQueryDAO.class, new BanyanDBLogQueryDAO(client));
this.registerServiceImplementation(IProfileTaskQueryDAO.class, new BanyanDBProfileTaskQueryDAO(client));
this.registerServiceImplementation(IProfileTaskLogQueryDAO.class, new BanyanDBProfileTaskLogQueryDAO(client, this.config.getFetchTaskLogMaxSize()));
this.registerServiceImplementation(IProfileThreadSnapshotQueryDAO.class, new BanyanDBProfileThreadSnapshotQueryDAO(client));
this.registerServiceImplementation(UITemplateManagementDAO.class, new BanyanDBUITemplateManagementDAO(client));
this.registerServiceImplementation(IEventQueryDAO.class, new BanyanDBEventQueryDAO(client));
this.registerServiceImplementation(ITopologyQueryDAO.class, new BanyanDBTopologyQueryDAO(client));
this.registerServiceImplementation(IEBPFProfilingTaskDAO.class, new BanyanDBEBPFProfilingTaskDAO(client));
this.registerServiceImplementation(IEBPFProfilingDataDAO.class, new BanyanDBEBPFProfilingDataDAO(client));
this.registerServiceImplementation(IEBPFProfilingScheduleDAO.class, new BanyanDBEBPFProfilingScheduleQueryDAO(client));
this.registerServiceImplementation(IServiceLabelDAO.class, new BanyanDBServiceLabelDAO(client));
this.registerServiceImplementation(ITagAutoCompleteQueryDAO.class, new BanyanDBTagAutocompleteQueryDAO(client));
this.registerServiceImplementation(IHistoryDeleteDAO.class, new BanyanDBHistoryDeleteDAO());
this.registerServiceImplementation(IMetricsQueryDAO.class, new BanyanDBMetricsQueryDAO(client));
this.registerServiceImplementation(IAggregationQueryDAO.class, new BanyanDBAggregationQueryDAO(client));
this.registerServiceImplementation(ITopNRecordsQueryDAO.class, new BanyanDBTopNRecordsQueryDAO(client));
}
@Override
public void start() throws ServiceNotProvidedException, ModuleStartException {
final ConfigService configService = getManager().find(CoreModule.NAME)
.provider()
.getService(ConfigService.class);
MetricsCreator metricCreator = getManager().find(TelemetryModule.NAME)
.provider()
.getService(MetricsCreator.class);
HealthCheckMetrics healthChecker = metricCreator.createHealthCheckerGauge(
"storage_banyandb", MetricsTag.EMPTY_KEY, MetricsTag.EMPTY_VALUE);
this.client.registerChecker(healthChecker);
try {
this.client.connect();
BanyanDBIndexInstaller installer = new BanyanDBIndexInstaller(client, getManager());
getManager().find(CoreModule.NAME).provider().getService(ModelCreator.class).addModelListener(installer);
} catch (Exception e) {
throw new ModuleStartException(e.getMessage(), e);
}
}
@Override
public void notifyAfterCompleted() throws ServiceNotProvidedException, ModuleStartException {
}
@Override
public String[] requiredModules() {
return new String[]{CoreModule.NAME};
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.Duration;
import org.apache.skywalking.oap.server.core.query.input.TopNCondition;
import org.apache.skywalking.oap.server.core.query.type.SelectedRecord;
import org.apache.skywalking.oap.server.core.storage.query.ITopNRecordsQueryDAO;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.util.ByteUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class BanyanDBTopNRecordsQueryDAO extends AbstractBanyanDBDAO implements ITopNRecordsQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(TopN.TIME_BUCKET, TopN.SERVICE_ID, TopN.STATEMENT, TopN.TRACE_ID);
public BanyanDBTopNRecordsQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<SelectedRecord> readSampledRecords(TopNCondition condition, String valueColumnName, Duration duration) throws IOException {
final String modelName = condition.getName();
final TimestampRange timestampRange = new TimestampRange(duration.getStartTimestamp(), duration.getEndTimestamp());
MeasureQueryResponse resp = query(modelName, TAGS,
Collections.singleton(valueColumnName), timestampRange, new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(condition.getParentService())) {
final String serviceId =
IDManager.ServiceID.buildId(condition.getParentService(), condition.isNormal());
query.and(eq(TopN.SERVICE_ID, serviceId));
}
if (condition.getOrder() == Order.DES) {
query.topN(condition.getTopN(), valueColumnName);
} else {
query.bottomN(condition.getTopN(), valueColumnName);
}
query.and(gte(TopN.TIME_BUCKET, duration.getStartTimeBucketInSec()));
query.and(lte(TopN.TIME_BUCKET, duration.getEndTimeBucketInSec()));
}
});
if (resp.size() == 0) {
return Collections.emptyList();
}
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
if (schema == null) {
throw new IOException("schema is not registered");
}
MetadataRegistry.ColumnSpec spec = schema.getSpec(valueColumnName);
if (spec == null) {
throw new IOException("field spec is not registered");
}
List<SelectedRecord> results = new ArrayList<>(condition.getTopN());
for (final DataPoint dataPoint : resp.getDataPoints()) {
SelectedRecord record = new SelectedRecord();
record.setName(dataPoint.getTagValue(TopN.STATEMENT));
record.setRefId(dataPoint.getTagValue(TopN.TRACE_ID));
record.setId(dataPoint.getTagValue(Metrics.ENTITY_ID));
record.setValue(extractFieldValueAsString(spec, valueColumnName, dataPoint));
results.add(record);
}
return results;
}
private String extractFieldValueAsString(MetadataRegistry.ColumnSpec spec, String fieldName, DataPoint dataPoint) throws IOException {
if (double.class.equals(spec.getColumnClass())) {
return String.valueOf(ByteUtil.bytes2Double(dataPoint.getFieldValue(fieldName)).longValue());
} else if (String.class.equals(spec.getColumnClass())) {
return dataPoint.getFieldValue(fieldName);
} else {
return String.valueOf(((Number) dataPoint.getFieldValue(fieldName)).longValue());
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.TagAndValue;
import org.apache.skywalking.banyandb.v1.client.metadata.Property;
import org.apache.skywalking.oap.server.core.management.ui.template.UITemplate;
import org.apache.skywalking.oap.server.core.query.input.DashboardSetting;
import org.apache.skywalking.oap.server.core.query.type.DashboardConfiguration;
import org.apache.skywalking.oap.server.core.query.type.TemplateChangeStatus;
import org.apache.skywalking.oap.server.core.storage.management.UITemplateManagementDAO;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j
public class BanyanDBUITemplateManagementDAO extends AbstractBanyanDBDAO implements UITemplateManagementDAO {
private static final String GROUP = "sw";
public BanyanDBUITemplateManagementDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public DashboardConfiguration getTemplate(String id) throws IOException {
Property p = getClient().queryProperty(GROUP, UITemplate.INDEX_NAME, id);
if (p == null) {
return null;
}
return fromEntity(parse(p));
}
@Override
public List<DashboardConfiguration> getAllTemplates(Boolean includingDisabled) throws IOException {
List<Property> propertyList = getClient().listProperties(GROUP, UITemplate.INDEX_NAME);
return propertyList.stream().map(p -> fromEntity(parse(p)))
.filter(conf -> includingDisabled || !conf.isDisabled())
.collect(Collectors.toList());
}
@Override
public TemplateChangeStatus addTemplate(DashboardSetting setting) {
Property newTemplate = convert(setting.toEntity());
try {
this.getClient().define(newTemplate);
return TemplateChangeStatus.builder()
.status(true)
.id(newTemplate.id())
.build();
} catch (IOException ioEx) {
log.error("fail to add new template", ioEx);
return TemplateChangeStatus.builder().status(false).id(setting.getId()).message("Can't add a new template")
.build();
}
}
@Override
public TemplateChangeStatus changeTemplate(DashboardSetting setting) {
Property newTemplate = convert(setting.toEntity());
try {
this.getClient().define(newTemplate);
return TemplateChangeStatus.builder()
.status(true)
.id(newTemplate.id())
.build();
} catch (IOException ioEx) {
log.error("fail to modify the template", ioEx);
return TemplateChangeStatus.builder().status(false).id(setting.getId()).message("Can't change an existed template")
.build();
}
}
@Override
public TemplateChangeStatus disableTemplate(String id) throws IOException {
Property oldProperty = this.getClient().queryProperty(GROUP, UITemplate.INDEX_NAME, id);
if (oldProperty == null) {
return TemplateChangeStatus.builder().status(false).id(id).message("Can't find the template")
.build();
}
UITemplate uiTemplate = parse(oldProperty);
uiTemplate.setDisabled(BooleanUtils.FALSE);
try {
this.getClient().define(convert(uiTemplate));
return TemplateChangeStatus.builder()
.status(true)
.id(uiTemplate.id())
.build();
} catch (IOException ioEx) {
log.error("fail to disable the template", ioEx);
return TemplateChangeStatus.builder().status(false).id(uiTemplate.id()).message("Can't disable the template")
.build();
}
}
public DashboardConfiguration fromEntity(UITemplate uiTemplate) {
DashboardConfiguration conf = new DashboardConfiguration();
conf.fromEntity(uiTemplate);
return conf;
}
public UITemplate parse(Property property) {
UITemplate uiTemplate = new UITemplate();
uiTemplate.setTemplateId(property.id());
for (TagAndValue<?> tagAndValue : property.tags()) {
if (tagAndValue.getTagName().equals(UITemplate.CONFIGURATION)) {
uiTemplate.setConfiguration((String) tagAndValue.getValue());
} else if (tagAndValue.getTagName().equals(UITemplate.DISABLED)) {
uiTemplate.setDisabled(((Number) tagAndValue.getValue()).intValue());
} else if (tagAndValue.getTagName().equals(UITemplate.UPDATE_TIME)) {
uiTemplate.setUpdateTime(((Number) tagAndValue.getValue()).longValue());
}
}
return uiTemplate;
}
public Property convert(UITemplate uiTemplate) {
return Property.create(GROUP, UITemplate.INDEX_NAME, uiTemplate.id())
.addTag(TagAndValue.newStringTag(UITemplate.CONFIGURATION, uiTemplate.getConfiguration()))
.addTag(TagAndValue.newLongTag(UITemplate.DISABLED, uiTemplate.getDisabled()))
.addTag(TagAndValue.newLongTag(UITemplate.UPDATE_TIME, uiTemplate.getUpdateTime()))
.build();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb;
import com.google.gson.JsonObject;
import io.grpc.Status;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.Singular;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.BanyanDBClient;
import org.apache.skywalking.banyandb.v1.client.grpc.exception.BanyanDBException;
import org.apache.skywalking.banyandb.v1.client.metadata.Catalog;
import org.apache.skywalking.banyandb.v1.client.metadata.Duration;
import org.apache.skywalking.banyandb.v1.client.metadata.Group;
import org.apache.skywalking.banyandb.v1.client.metadata.IndexRule;
import org.apache.skywalking.banyandb.v1.client.metadata.Measure;
import org.apache.skywalking.banyandb.v1.client.metadata.NamedSchema;
import org.apache.skywalking.banyandb.v1.client.metadata.Stream;
import org.apache.skywalking.banyandb.v1.client.metadata.TagFamilySpec;
import org.apache.skywalking.oap.server.core.alarm.AlarmRecord;
import org.apache.skywalking.oap.server.core.analysis.DownSampling;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.IntList;
import org.apache.skywalking.oap.server.core.storage.annotation.ValueColumnMetadata;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import javax.annotation.Nullable;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
@Slf4j
public enum MetadataRegistry {
INSTANCE;
private final Map<String, Schema> registry = new HashMap<>();
public NamedSchema<?> registerModel(Model model) {
final SchemaMetadata schemaMetadata = parseMetadata(model);
Schema.SchemaBuilder schemaBuilder = Schema.builder().metadata(schemaMetadata);
Map<String, ModelColumn> modelColumnMap = model.getColumns().stream()
.collect(Collectors.toMap(modelColumn -> modelColumn.getColumnName().getStorageName(), Function.identity()));
// parse and set sharding keys
List<String> entities = parseEntityNames(modelColumnMap);
// parse tag metadata
// this can be used to build both
// 1) a list of TagFamilySpec,
// 2) a list of IndexRule,
List<TagMetadata> tags = parseTagMetadata(model, schemaBuilder);
List<TagFamilySpec> tagFamilySpecs = schemaMetadata.extractTagFamilySpec(tags);
// iterate over tagFamilySpecs to save tag names
for (final TagFamilySpec tagFamilySpec : tagFamilySpecs) {
for (final TagFamilySpec.TagSpec tagSpec : tagFamilySpec.tagSpecs()) {
schemaBuilder.tag(tagSpec.getTagName());
}
}
List<IndexRule> indexRules = tags.stream()
.map(TagMetadata::getIndexRule)
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (schemaMetadata.getKind() == Kind.STREAM) {
final Stream.Builder builder = Stream.create(schemaMetadata.getGroup(), schemaMetadata.getName());
if (entities.isEmpty()) {
throw new IllegalStateException("sharding keys of model[stream." + model.getName() + "] must not be empty");
}
builder.setEntityRelativeTags(entities);
builder.addTagFamilies(tagFamilySpecs);
builder.addIndexes(indexRules);
registry.put(model.getName(), schemaBuilder.build());
return builder.build();
} else {
final Measure.Builder builder = Measure.create(schemaMetadata.getGroup(), schemaMetadata.getName(),
downSamplingDuration(model.getDownsampling()));
if (entities.isEmpty()) { // if shardingKeys is empty, for measure, we can use ID as a single sharding key.
builder.setEntityRelativeTags(Measure.ID);
} else {
builder.setEntityRelativeTags(entities);
}
builder.addTagFamilies(tagFamilySpecs);
builder.addIndexes(indexRules);
// parse and set field
Optional<ValueColumnMetadata.ValueColumn> valueColumnOpt = ValueColumnMetadata.INSTANCE
.readValueColumnDefinition(model.getName());
valueColumnOpt.ifPresent(valueColumn -> builder.addField(parseFieldSpec(modelColumnMap.get(valueColumn.getValueCName()), valueColumn)));
valueColumnOpt.ifPresent(valueColumn -> schemaBuilder.field(valueColumn.getValueCName()));
registry.put(model.getName(), schemaBuilder.build());
return builder.build();
}
}
public Schema findMetadata(final String name) {
return this.registry.get(name);
}
private Measure.FieldSpec parseFieldSpec(ModelColumn modelColumn, ValueColumnMetadata.ValueColumn valueColumn) {
if (String.class.equals(modelColumn.getType())) {
return Measure.FieldSpec.newIntField(valueColumn.getValueCName())
.compressWithZSTD()
.build();
} else if (long.class.equals(modelColumn.getType()) || int.class.equals(modelColumn.getType())) {
return Measure.FieldSpec.newIntField(valueColumn.getValueCName())
.compressWithZSTD()
.encodeWithGorilla()
.build();
} else if (DataTable.class.equals(modelColumn.getType())) {
return Measure.FieldSpec.newBinaryField(valueColumn.getValueCName())
.compressWithZSTD()
.build();
} else if (double.class.equals(modelColumn.getType())) {
// TODO: natively support double/float in BanyanDB
log.warn("Double is stored as binary");
return Measure.FieldSpec.newBinaryField(valueColumn.getValueCName())
.compressWithZSTD()
.build();
} else {
throw new UnsupportedOperationException(modelColumn.getType().getSimpleName() + " is not supported for field");
}
}
Duration downSamplingDuration(DownSampling downSampling) {
switch (downSampling) {
case Hour:
return Duration.ofHours(1);
case Minute:
return Duration.ofMinutes(1);
case Day:
return Duration.ofDays(1);
default:
throw new UnsupportedOperationException("unsupported downSampling interval");
}
}
IndexRule parseIndexRule(String tagName, ModelColumn modelColumn) {
// TODO: we need to add support index type in the OAP core
// Currently, we only register INVERTED type
// if it is null, it must be a user-defined tag
if (modelColumn == null) {
return IndexRule.create(tagName, IndexRule.IndexType.INVERTED, IndexRule.IndexLocation.SERIES);
}
if (modelColumn.getBanyanDBExtension().isGlobalIndexing()) {
return IndexRule.create(tagName, IndexRule.IndexType.INVERTED, IndexRule.IndexLocation.GLOBAL);
} else {
return IndexRule.create(tagName, IndexRule.IndexType.INVERTED, IndexRule.IndexLocation.SERIES);
}
}
/**
* Parse sharding keys from the {@link Model}
*
* @param modelColumnMap the mapping between column storageName and {@link ModelColumn}
* @return a list of column names in strict order
*/
List<String> parseEntityNames(Map<String, ModelColumn> modelColumnMap) {
List<ModelColumn> shardingColumns = new ArrayList<>();
for (final ModelColumn col : modelColumnMap.values()) {
if (col.getBanyanDBExtension().isShardingKey()) {
shardingColumns.add(col);
}
}
return shardingColumns.stream()
.sorted(Comparator.comparingInt(col -> col.getBanyanDBExtension().getShardingKeyIdx()))
.map(col -> col.getColumnName().getName())
.collect(Collectors.toList());
}
List<TagMetadata> parseTagMetadata(Model model, Schema.SchemaBuilder builder) {
List<TagMetadata> tagMetadataList = new ArrayList<>();
// skip metric
Optional<ValueColumnMetadata.ValueColumn> valueColumnOpt = ValueColumnMetadata.INSTANCE
.readValueColumnDefinition(model.getName());
for (final ModelColumn col : model.getColumns()) {
if (valueColumnOpt.isPresent() && valueColumnOpt.get().getValueCName().equals(col.getColumnName().getStorageName())) {
builder.spec(col.getColumnName().getStorageName(), new ColumnSpec(ColumnType.FIELD, col.getType()));
continue;
}
final TagFamilySpec.TagSpec tagSpec = parseTagSpec(col);
if (tagSpec == null) {
continue;
}
builder.spec(col.getColumnName().getStorageName(), new ColumnSpec(ColumnType.TAG, col.getType()));
if (col.shouldIndex()) {
// build indexRule
IndexRule indexRule = parseIndexRule(tagSpec.getTagName(), col);
tagMetadataList.add(new TagMetadata(indexRule, tagSpec));
} else {
tagMetadataList.add(new TagMetadata(null, tagSpec));
}
}
return tagMetadataList;
}
/**
* Extract extra tags from Configuration.
* They are for tags defined for {@link SegmentRecord}, {@link LogRecord} and {@link AlarmRecord}.
*
* @param tags a series of tags joint by comma
* @return a list of {@link org.apache.skywalking.banyandb.v1.client.metadata.TagFamilySpec.TagSpec} generated from input
*/
private List<TagMetadata> parseExtraTagSpecs(String tags, Schema.SchemaBuilder builder) {
if (StringUtil.isEmpty(tags)) {
return Collections.emptyList();
}
String[] tagsArray = tags.split(",");
if (tagsArray.length == 0) {
return Collections.emptyList();
}
List<TagMetadata> extraTagMetadataList = new ArrayList<>();
for (final String tagName : tagsArray) {
builder.spec(tagName, new ColumnSpec(ColumnType.TAG, String.class));
extraTagMetadataList.add(new TagMetadata(parseIndexRule(tagName, null),
TagFamilySpec.TagSpec.newStringTag(tagName)));
}
return extraTagMetadataList;
}
/**
* Parse TagSpec from {@link ModelColumn}
*
* @param modelColumn the column in the model to be parsed
* @return a typed tag spec
*/
@Nullable
private TagFamilySpec.TagSpec parseTagSpec(ModelColumn modelColumn) {
final Class<?> clazz = modelColumn.getType();
final String colName = modelColumn.getColumnName().getStorageName();
if (String.class.equals(clazz) || DataTable.class.equals(clazz) || JsonObject.class.equals(clazz)) {
return TagFamilySpec.TagSpec.newStringTag(colName);
} else if (int.class.equals(clazz) || long.class.equals(clazz)) {
return TagFamilySpec.TagSpec.newIntTag(colName);
} else if (byte[].class.equals(clazz)) {
return TagFamilySpec.TagSpec.newBinaryTag(colName);
} else if (clazz.isEnum()) {
return TagFamilySpec.TagSpec.newIntTag(colName);
} else if (double.class.equals(clazz) || Double.class.equals(clazz)) {
// serialize double as binary
return TagFamilySpec.TagSpec.newBinaryTag(colName);
} else if (IntList.class.isAssignableFrom(clazz)) {
return TagFamilySpec.TagSpec.newIntArrayTag(colName);
} else if (List.class.isAssignableFrom(clazz)) { // handle exceptions
ParameterizedType t = (ParameterizedType) modelColumn.getGenericType();
if (String.class.equals(t.getActualTypeArguments()[0])) {
return TagFamilySpec.TagSpec.newStringArrayTag(colName);
}
}
throw new IllegalStateException("type " + modelColumn.getType().toString() + " is not supported");
}
public SchemaMetadata parseMetadata(Model model) {
if (model.isRecord()) {
String group = "stream-default";
if (model.isSuperDataset()) {
// for superDataset, we should use separate group
group = "stream-" + model.getName();
}
return new SchemaMetadata(group, model.getName(), Kind.STREAM);
}
return new SchemaMetadata("measure-default", model.getName(), Kind.MEASURE);
}
@RequiredArgsConstructor
@Data
public static class SchemaMetadata {
private final String group;
private final String name;
private final Kind kind;
public Optional<NamedSchema<?>> findRemoteSchema(BanyanDBClient client) throws BanyanDBException {
try {
switch (kind) {
case STREAM:
return Optional.ofNullable(client.findStream(this.group, this.name));
case MEASURE:
return Optional.ofNullable(client.findMeasure(this.group, this.name));
default:
throw new IllegalStateException("should not reach here");
}
} catch (BanyanDBException ex) {
if (ex.getStatus().equals(Status.Code.NOT_FOUND)) {
return Optional.empty();
}
throw ex;
}
}
private List<TagFamilySpec> extractTagFamilySpec(List<TagMetadata> tagMetadataList) {
Map<String, List<TagMetadata>> tagMetadataMap = tagMetadataList.stream()
.collect(Collectors.groupingBy(tagMetadata -> tagMetadata.isIndex() ? SchemaMetadata.this.indexFamily() : SchemaMetadata.this.nonIndexFamily()));
final List<TagFamilySpec> tagFamilySpecs = new ArrayList<>(tagMetadataMap.size());
for (final Map.Entry<String, List<TagMetadata>> entry : tagMetadataMap.entrySet()) {
final TagFamilySpec.Builder b = TagFamilySpec.create(entry.getKey())
.addTagSpecs(entry.getValue().stream().map(TagMetadata::getTagSpec).collect(Collectors.toList()));
if (this.getKind() == Kind.MEASURE && entry.getKey().equals(this.indexFamily())) {
// append measure ID, but it should not generate an index in the client side.
// BanyanDB will take care of the ID index registration.
b.addIDTagSpec();
}
tagFamilySpecs.add(b.build());
}
return tagFamilySpecs;
}
public Group getOrCreateGroup(BanyanDBClient client) throws BanyanDBException {
Group g = client.findGroup(this.group);
if (g != null) {
return g;
}
switch (kind) {
case STREAM:
return client.define(Group.create(this.group, Catalog.STREAM, 2, 0, Duration.ofDays(7)));
case MEASURE:
return client.define(Group.create(this.group, Catalog.MEASURE, 2, 12, Duration.ofDays(7)));
default:
throw new IllegalStateException("should not reach here");
}
}
public String indexFamily() {
switch (kind) {
case MEASURE:
return "default";
case STREAM:
return "searchable";
default:
throw new IllegalStateException("should not reach here");
}
}
public String nonIndexFamily() {
switch (kind) {
case MEASURE:
case STREAM:
return "storage-only";
default:
throw new IllegalStateException("should not reach here");
}
}
}
public enum Kind {
MEASURE, STREAM;
}
@RequiredArgsConstructor
@Getter
private static class TagMetadata {
private final IndexRule indexRule;
private final TagFamilySpec.TagSpec tagSpec;
boolean isIndex() {
return this.indexRule != null;
}
}
@Builder
@EqualsAndHashCode
public static class Schema {
@Getter
private final SchemaMetadata metadata;
@Singular
private final Map<String, ColumnSpec> specs;
@Getter
@Singular
private final Set<String> tags;
@Getter
@Singular
private final Set<String> fields;
public ColumnSpec getSpec(String columnName) {
return this.specs.get(columnName);
}
}
@RequiredArgsConstructor
@Getter
public static class ColumnSpec {
private final ColumnType columnType;
private final Class<?> columnClass;
}
public enum ColumnType {
TAG, FIELD;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.AbstractQuery;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingScheduleRecord;
import org.apache.skywalking.oap.server.core.query.type.EBPFProfilingSchedule;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingScheduleDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class BanyanDBEBPFProfilingScheduleQueryDAO extends AbstractBanyanDBDAO implements IEBPFProfilingScheduleDAO {
private static final Set<String> TAGS = ImmutableSet.of(EBPFProfilingScheduleRecord.START_TIME,
EBPFProfilingScheduleRecord.TASK_ID,
EBPFProfilingScheduleRecord.PROCESS_ID,
EBPFProfilingScheduleRecord.END_TIME);
public BanyanDBEBPFProfilingScheduleQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<EBPFProfilingSchedule> querySchedules(String taskId) throws IOException {
MeasureQueryResponse resp = query(EBPFProfilingScheduleRecord.INDEX_NAME,
TAGS,
Collections.emptySet(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(EBPFProfilingScheduleRecord.TASK_ID, taskId));
query.setOrderBy(new AbstractQuery.OrderBy(EBPFProfilingScheduleRecord.START_TIME, AbstractQuery.Sort.DESC));
}
});
return resp.getDataPoints().stream().map(this::buildEBPFProfilingSchedule).collect(Collectors.toList());
}
private EBPFProfilingSchedule buildEBPFProfilingSchedule(DataPoint dataPoint) {
final EBPFProfilingSchedule schedule = new EBPFProfilingSchedule();
schedule.setScheduleId(dataPoint.getId());
schedule.setTaskId(dataPoint.getTagValue(EBPFProfilingScheduleRecord.TASK_ID));
schedule.setProcessId(dataPoint.getTagValue(EBPFProfilingScheduleRecord.PROCESS_ID));
schedule.setStartTime(((Number) dataPoint.getTagValue(EBPFProfilingScheduleRecord.START_TIME)).longValue());
schedule.setEndTime(((Number) dataPoint.getTagValue(EBPFProfilingScheduleRecord.END_TIME)).longValue());
return schedule;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.oap.server.core.analysis.Layer;
import org.apache.skywalking.oap.server.core.query.PaginationUtils;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.Duration;
import org.apache.skywalking.oap.server.core.query.type.event.EventQueryCondition;
import org.apache.skywalking.oap.server.core.query.type.event.EventType;
import org.apache.skywalking.oap.server.core.query.type.event.Events;
import org.apache.skywalking.oap.server.core.query.type.event.Source;
import org.apache.skywalking.oap.server.core.source.Event;
import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.util.Objects.isNull;
public class BanyanDBEventQueryDAO extends AbstractBanyanDBDAO implements IEventQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(
Event.UUID, Event.SERVICE, Event.SERVICE_INSTANCE, Event.ENDPOINT, Event.NAME,
Event.MESSAGE, Event.TYPE, Event.START_TIME, Event.END_TIME, Event.PARAMETERS, Event.LAYER);
public BanyanDBEventQueryDAO(final BanyanDBStorageClient client) {
super(client);
}
@Override
public Events queryEvents(EventQueryCondition condition) throws Exception {
MeasureQueryResponse resp = query(Event.INDEX_NAME, TAGS,
Collections.emptySet(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (!isNullOrEmpty(condition.getUuid())) {
query.and(eq(Event.UUID, condition.getUuid()));
}
final Source source = condition.getSource();
if (source != null) {
if (!isNullOrEmpty(source.getService())) {
query.and(eq(Event.SERVICE, source.getService()));
}
if (!isNullOrEmpty(source.getServiceInstance())) {
query.and(eq(Event.SERVICE_INSTANCE, source.getServiceInstance()));
}
if (!isNullOrEmpty(source.getEndpoint())) {
query.and(eq(Event.ENDPOINT, source.getEndpoint()));
}
}
if (!isNullOrEmpty(condition.getName())) {
query.and(eq(Event.NAME, condition.getName()));
}
if (condition.getType() != null) {
query.and(eq(Event.TYPE, condition.getType().name()));
}
final Duration startTime = condition.getTime();
if (startTime != null) {
if (startTime.getStartTimestamp() > 0) {
query.and(gte(Event.START_TIME, startTime.getStartTimestamp()));
}
if (startTime.getEndTimestamp() > 0) {
query.and(lte(Event.END_TIME, startTime.getEndTimestamp()));
}
}
if (!isNullOrEmpty(condition.getLayer())) {
query.and(eq(Event.LAYER, Layer.valueOf(condition.getLayer()).value()));
}
}
});
Events events = new Events();
if (resp.size() == 0) {
return events;
}
events.setTotal(resp.size());
for (final DataPoint dataPoint : resp.getDataPoints()) {
events.getEvents().add(buildEventView(dataPoint));
}
sortEvents(events, condition);
return events;
}
@Override
public Events queryEvents(List<EventQueryCondition> conditionList) throws Exception {
Events totalEvents = new Events();
for (final EventQueryCondition cond : conditionList) {
final Events singleEvents = this.queryEvents(cond);
totalEvents.getEvents().addAll(singleEvents.getEvents());
// TODO: a simple sum but may not be accurate
totalEvents.setTotal(totalEvents.getTotal() + singleEvents.getTotal());
}
return totalEvents;
}
protected org.apache.skywalking.oap.server.core.query.type.event.Event buildEventView(
final DataPoint dataPoint) {
final org.apache.skywalking.oap.server.core.query.type.event.Event event =
new org.apache.skywalking.oap.server.core.query.type.event.Event();
event.setUuid(dataPoint.getTagValue(Event.UUID));
String service = dataPoint.getTagValue(Event.SERVICE);
String serviceInstance = dataPoint.getTagValue(Event.SERVICE_INSTANCE);
String endpoint = dataPoint.getTagValue(Event.ENDPOINT);
event.setSource(new Source(service, serviceInstance, endpoint));
event.setName(dataPoint.getTagValue(Event.NAME));
event.setType(EventType.parse(dataPoint.getTagValue(Event.TYPE)));
event.setMessage(dataPoint.getTagValue(Event.MESSAGE));
event.setParameters((String) dataPoint.getTagValue(Event.PARAMETERS));
event.setStartTime(dataPoint.getTagValue(Event.START_TIME));
event.setEndTime(dataPoint.getTagValue(Event.END_TIME));
event.setLayer(Layer.valueOf(((Number) dataPoint.getTagValue(Event.LAYER)).intValue()).name());
return event;
}
private void sortEvents(Events events, EventQueryCondition condition) {
if (events.getEvents().isEmpty()) {
return;
}
final Comparator<org.apache.skywalking.oap.server.core.query.type.event.Event> c =
buildComparator(isNull(condition.getOrder()) ? Order.DES : condition.getOrder());
final PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
events.setEvents(
events.getEvents()
.stream()
.sorted(c)
.skip(page.getFrom())
.limit(page.getLimit())
.collect(Collectors.toList())
);
}
private Comparator<org.apache.skywalking.oap.server.core.query.type.event.Event> buildComparator(Order queryOrder) {
Comparator<org.apache.skywalking.oap.server.core.query.type.event.Event> c = Comparator.comparingLong(org.apache.skywalking.oap.server.core.query.type.event.Event::getStartTime);
if (queryOrder == Order.DES) {
c = c.reversed();
}
return c;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.banyandb.v1.client.MeasureWrite;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
@RequiredArgsConstructor
@Getter
public class BanyanDBMeasureInsertRequest implements InsertRequest {
private final MeasureWrite measureWrite;
}
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.banyandb.v1.client.MeasureWrite;
import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
@RequiredArgsConstructor
@Getter
public class BanyanDBMeasureUpdateRequest implements UpdateRequest {
private final MeasureWrite measureWrite;
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.Layer;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.endpoint.EndpointTraffic;
import org.apache.skywalking.oap.server.core.analysis.manual.instance.InstanceTraffic;
import org.apache.skywalking.oap.server.core.analysis.manual.process.ProcessDetectType;
import org.apache.skywalking.oap.server.core.analysis.manual.process.ProcessTraffic;
import org.apache.skywalking.oap.server.core.analysis.manual.service.ServiceTraffic;
import org.apache.skywalking.oap.server.core.query.enumeration.Language;
import org.apache.skywalking.oap.server.core.query.enumeration.ProfilingSupportStatus;
import org.apache.skywalking.oap.server.core.query.type.Attribute;
import org.apache.skywalking.oap.server.core.query.type.Endpoint;
import org.apache.skywalking.oap.server.core.query.type.Process;
import org.apache.skywalking.oap.server.core.query.type.Service;
import org.apache.skywalking.oap.server.core.query.type.ServiceInstance;
import org.apache.skywalking.oap.server.core.storage.query.IMetadataQueryDAO;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.apache.skywalking.oap.server.core.analysis.manual.instance.InstanceTraffic.PropertyUtil.LANGUAGE;
public class BanyanDBMetadataQueryDAO extends AbstractBanyanDBDAO implements IMetadataQueryDAO {
private static final Set<String> SERVICE_TRAFFIC_TAGS = ImmutableSet.of(ServiceTraffic.NAME,
ServiceTraffic.SHORT_NAME, ServiceTraffic.GROUP, ServiceTraffic.LAYER, ServiceTraffic.SERVICE_ID);
private static final Set<String> INSTANCE_TRAFFIC_TAGS = ImmutableSet.of(InstanceTraffic.NAME,
InstanceTraffic.PROPERTIES, InstanceTraffic.LAST_PING_TIME_BUCKET, InstanceTraffic.SERVICE_ID);
private static final Set<String> INSTANCE_TRAFFIC_COMPACT_TAGS = ImmutableSet.of(InstanceTraffic.NAME,
InstanceTraffic.PROPERTIES);
private static final Set<String> ENDPOINT_TRAFFIC_TAGS = ImmutableSet.of(EndpointTraffic.NAME,
EndpointTraffic.SERVICE_ID);
private static final Set<String> PROCESS_TRAFFIC_TAGS = ImmutableSet.of(ProcessTraffic.NAME,
ProcessTraffic.SERVICE_ID, ProcessTraffic.INSTANCE_ID, ProcessTraffic.AGENT_ID, ProcessTraffic.DETECT_TYPE,
ProcessTraffic.PROPERTIES, ProcessTraffic.LABELS_JSON, ProcessTraffic.LAST_PING_TIME_BUCKET,
ProcessTraffic.PROFILING_SUPPORT_STATUS);
private static final Set<String> PROCESS_TRAFFIC_COMPACT_TAGS = ImmutableSet.of(ProcessTraffic.NAME,
ProcessTraffic.SERVICE_ID, ProcessTraffic.INSTANCE_ID, ProcessTraffic.AGENT_ID, ProcessTraffic.DETECT_TYPE,
ProcessTraffic.PROPERTIES, ProcessTraffic.LABELS_JSON);
private static final Gson GSON = new Gson();
public BanyanDBMetadataQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<Service> listServices(String layer, String group) throws IOException {
MeasureQueryResponse resp = query(ServiceTraffic.INDEX_NAME,
SERVICE_TRAFFIC_TAGS,
Collections.emptySet(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(group)) {
query.and(eq(ServiceTraffic.GROUP, group));
}
if (StringUtil.isNotEmpty(layer)) {
query.and(eq(ServiceTraffic.LAYER, Layer.valueOf(layer).value()));
}
}
});
final List<Service> services = new ArrayList<>();
for (final DataPoint dataPoint : resp.getDataPoints()) {
services.add(buildService(dataPoint));
}
return services;
}
@Override
public List<Service> getServices(String serviceId) throws IOException {
MeasureQueryResponse resp = query(ServiceTraffic.INDEX_NAME,
SERVICE_TRAFFIC_TAGS,
Collections.emptySet(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(ServiceTraffic.SERVICE_ID, serviceId));
}
}
});
final List<Service> services = new ArrayList<>();
for (final DataPoint dataPoint : resp.getDataPoints()) {
services.add(buildService(dataPoint));
}
return services;
}
@Override
public List<ServiceInstance> listInstances(long startTimestamp, long endTimestamp, String serviceId) throws IOException {
MeasureQueryResponse resp = query(InstanceTraffic.INDEX_NAME,
INSTANCE_TRAFFIC_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(InstanceTraffic.SERVICE_ID, serviceId));
}
final long minuteTimeBucket = TimeBucket.getMinuteTimeBucket(startTimestamp);
query.and(gte(InstanceTraffic.LAST_PING_TIME_BUCKET, minuteTimeBucket));
}
});
final List<ServiceInstance> instances = new ArrayList<>();
for (final DataPoint dataPoint : resp.getDataPoints()) {
instances.add(buildInstance(dataPoint));
}
return instances;
}
@Override
public ServiceInstance getInstance(String instanceId) throws IOException {
MeasureQueryResponse resp = query(InstanceTraffic.INDEX_NAME,
INSTANCE_TRAFFIC_COMPACT_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(instanceId)) {
query.andWithID(instanceId);
}
}
});
return resp.size() > 0 ? buildInstance(resp.getDataPoints().get(0)) : null;
}
@Override
public List<Endpoint> findEndpoint(String keyword, String serviceId, int limit) throws IOException {
MeasureQueryResponse resp = query(EndpointTraffic.INDEX_NAME,
ENDPOINT_TRAFFIC_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(EndpointTraffic.SERVICE_ID, serviceId));
}
}
});
final List<Endpoint> endpoints = new ArrayList<>();
for (final DataPoint dataPoint : resp.getDataPoints()) {
endpoints.add(buildEndpoint(dataPoint));
}
if (StringUtil.isNotEmpty(serviceId)) {
return endpoints.stream().filter(e -> e.getName().contains(keyword)).collect(Collectors.toList());
}
return endpoints;
}
@Override
public List<Process> listProcesses(String serviceId, String instanceId, String agentId, ProfilingSupportStatus profilingSupportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) throws IOException {
MeasureQueryResponse resp = query(ProcessTraffic.INDEX_NAME,
PROCESS_TRAFFIC_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(ProcessTraffic.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(instanceId)) {
query.and(eq(ProcessTraffic.INSTANCE_ID, instanceId));
}
if (StringUtil.isNotEmpty(agentId)) {
query.and(eq(ProcessTraffic.AGENT_ID, agentId));
}
if (lastPingStartTimeBucket > 0) {
query.and(gte(ProcessTraffic.LAST_PING_TIME_BUCKET, lastPingStartTimeBucket));
}
if (lastPingEndTimeBucket > 0) {
query.and(lte(ProcessTraffic.LAST_PING_TIME_BUCKET, lastPingEndTimeBucket));
}
if (profilingSupportStatus != null) {
query.and(eq(ProcessTraffic.PROFILING_SUPPORT_STATUS, profilingSupportStatus.value()));
}
}
});
final List<Process> processes = new ArrayList<>();
for (final DataPoint dataPoint : resp.getDataPoints()) {
processes.add(buildProcess(dataPoint));
}
return processes;
}
@Override
public long getProcessesCount(String serviceId, String instanceId, String agentId, ProfilingSupportStatus profilingSupportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) throws IOException {
MeasureQueryResponse resp = query(ProcessTraffic.INDEX_NAME,
PROCESS_TRAFFIC_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(ProcessTraffic.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(instanceId)) {
query.and(eq(ProcessTraffic.INSTANCE_ID, instanceId));
}
if (StringUtil.isNotEmpty(agentId)) {
query.and(eq(ProcessTraffic.AGENT_ID, instanceId));
}
if (lastPingStartTimeBucket > 0) {
query.and(gte(ProcessTraffic.LAST_PING_TIME_BUCKET, lastPingStartTimeBucket));
}
if (lastPingEndTimeBucket > 0) {
query.and(lte(ProcessTraffic.LAST_PING_TIME_BUCKET, lastPingEndTimeBucket));
}
if (profilingSupportStatus != null) {
query.and(eq(ProcessTraffic.PROFILING_SUPPORT_STATUS, profilingSupportStatus.value()));
}
}
});
return resp.getDataPoints()
.stream()
.collect(Collectors.groupingBy((Function<DataPoint, String>) dataPoint -> dataPoint.getTagValue(ProcessTraffic.PROPERTIES)))
.size();
}
@Override
public Process getProcess(String processId) throws IOException {
MeasureQueryResponse resp = query(ProcessTraffic.INDEX_NAME,
PROCESS_TRAFFIC_COMPACT_TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
if (StringUtil.isNotEmpty(processId)) {
query.andWithID(processId);
}
}
});
return resp.size() > 0 ? buildProcess(resp.getDataPoints().get(0)) : null;
}
private Service buildService(DataPoint dataPoint) {
Service service = new Service();
service.setId(dataPoint.getTagValue(ServiceTraffic.SERVICE_ID));
service.setName(dataPoint.getTagValue(ServiceTraffic.NAME));
service.setShortName(dataPoint.getTagValue(ServiceTraffic.SHORT_NAME));
service.setGroup(dataPoint.getTagValue(ServiceTraffic.GROUP));
service.getLayers().add(Layer.valueOf(((Number) dataPoint.getTagValue(ServiceTraffic.LAYER)).intValue()).name());
return service;
}
private ServiceInstance buildInstance(DataPoint dataPoint) {
ServiceInstance serviceInstance = new ServiceInstance();
serviceInstance.setId(dataPoint.getId());
serviceInstance.setName(dataPoint.getTagValue(InstanceTraffic.NAME));
serviceInstance.setInstanceUUID(dataPoint.getId());
final String propString = dataPoint.getTagValue(InstanceTraffic.PROPERTIES);
JsonObject properties = null;
if (StringUtil.isNotEmpty(propString)) {
properties = GSON.fromJson(propString, JsonObject.class);
}
if (properties != null) {
for (Map.Entry<String, JsonElement> property : properties.entrySet()) {
String key = property.getKey();
String value = property.getValue().getAsString();
if (key.equals(LANGUAGE)) {
serviceInstance.setLanguage(Language.value(value));
} else {
serviceInstance.getAttributes().add(new Attribute(key, value));
}
}
} else {
serviceInstance.setLanguage(Language.UNKNOWN);
}
return serviceInstance;
}
private Endpoint buildEndpoint(DataPoint dataPoint) {
Endpoint endpoint = new Endpoint();
endpoint.setId(dataPoint.getId());
endpoint.setName(dataPoint.getTagValue(EndpointTraffic.NAME));
return endpoint;
}
private Process buildProcess(DataPoint dataPoint) {
Process process = new Process();
process.setId(dataPoint.getId());
process.setName(dataPoint.getTagValue(ProcessTraffic.NAME));
String serviceId = dataPoint.getTagValue(ProcessTraffic.SERVICE_ID);
process.setServiceId(serviceId);
process.setServiceName(IDManager.ServiceID.analysisId(serviceId).getName());
String instanceId = dataPoint.getTagValue(ProcessTraffic.INSTANCE_ID);
process.setInstanceId(instanceId);
process.setInstanceName(IDManager.ServiceInstanceID.analysisId(instanceId).getName());
process.setAgentId(dataPoint.getTagValue(ProcessTraffic.AGENT_ID));
process.setDetectType(ProcessDetectType.valueOf(((Number) dataPoint.getTagValue(ProcessTraffic.DETECT_TYPE)).intValue()).name());
String propString = dataPoint.getTagValue(ProcessTraffic.PROPERTIES);
if (!Strings.isNullOrEmpty(propString)) {
JsonObject properties = GSON.fromJson(propString, JsonObject.class);
for (Map.Entry<String, JsonElement> property : properties.entrySet()) {
String key = property.getKey();
String value = property.getValue().getAsString();
process.getAttributes().add(new Attribute(key, value));
}
}
String labelJson = dataPoint.getTagValue(ProcessTraffic.LABELS_JSON);
if (!Strings.isNullOrEmpty(labelJson)) {
List<String> labels = GSON.<List<String>>fromJson(labelJson, ArrayList.class);
process.getLabels().addAll(labels);
}
return process;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.MeasureWrite;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.MetadataRegistry;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@Slf4j
public class BanyanDBMetricsDAO extends AbstractBanyanDBDAO implements IMetricsDAO {
private final StorageBuilder<Metrics> storageBuilder;
public BanyanDBMetricsDAO(BanyanDBStorageClient client, StorageBuilder<Metrics> storageBuilder) {
super(client);
this.storageBuilder = storageBuilder;
}
@Override
public List<Metrics> multiGet(Model model, List<Metrics> metrics) throws IOException {
log.info("multiGet {} from BanyanDB", model.getName());
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(model.getName());
if (schema == null) {
throw new IOException(model.getName() + " is not registered");
}
// TODO: add time range
List<Metrics> metricsInStorage = new ArrayList<>(metrics.size());
for (final Metrics missCachedMetric : metrics) {
MeasureQueryResponse resp = query(model.getName(), schema.getTags(), schema.getFields(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.andWithID(missCachedMetric.id());
}
});
if (resp.size() == 0) {
continue;
}
for (final DataPoint dataPoint : resp.getDataPoints()) {
metricsInStorage.add(storageBuilder.storage2Entity(new BanyanDBConverter.StorageToMeasure(model.getName(), dataPoint)));
}
}
return metricsInStorage;
}
@Override
public InsertRequest prepareBatchInsert(Model model, Metrics metrics) throws IOException {
log.info("prepare to insert {}", model.getName());
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(model.getName());
if (schema == null) {
throw new IOException(model.getName() + " is not registered");
}
MeasureWrite measureWrite = new MeasureWrite(schema.getMetadata().getGroup(), // group name
model.getName(), // index-name
TimeBucket.getTimestamp(metrics.getTimeBucket(), model.getDownsampling())); // timestamp
final BanyanDBConverter.MeasureToStorage toStorage = new BanyanDBConverter.MeasureToStorage(schema, measureWrite);
storageBuilder.entity2Storage(metrics, toStorage);
toStorage.acceptID(metrics.id());
return new BanyanDBMeasureInsertRequest(toStorage.obtain());
}
@Override
public UpdateRequest prepareBatchUpdate(Model model, Metrics metrics) throws IOException {
log.info("prepare to update {}", model.getName());
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(model.getName());
if (schema == null) {
throw new IOException(model.getName() + " is not registered");
}
MeasureWrite measureWrite = new MeasureWrite(schema.getMetadata().getGroup(), // group name
model.getName(), // index-name
TimeBucket.getTimestamp(metrics.getTimeBucket(), model.getDownsampling())); // timestamp
final BanyanDBConverter.MeasureToStorage toStorage = new BanyanDBConverter.MeasureToStorage(schema, measureWrite);
storageBuilder.entity2Storage(metrics, toStorage);
toStorage.acceptID(metrics.id());
return new BanyanDBMeasureUpdateRequest(toStorage.obtain());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
import org.apache.skywalking.oap.server.core.analysis.metrics.HistogramMetrics;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.query.PointOfTime;
import org.apache.skywalking.oap.server.core.query.input.Duration;
import org.apache.skywalking.oap.server.core.query.input.MetricsCondition;
import org.apache.skywalking.oap.server.core.query.sql.Function;
import org.apache.skywalking.oap.server.core.query.type.HeatMap;
import org.apache.skywalking.oap.server.core.query.type.IntValues;
import org.apache.skywalking.oap.server.core.query.type.KVInt;
import org.apache.skywalking.oap.server.core.query.type.MetricsValues;
import org.apache.skywalking.oap.server.core.storage.annotation.ValueColumnMetadata;
import org.apache.skywalking.oap.server.core.storage.query.IMetricsQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.MetadataRegistry;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.util.ByteUtil;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class BanyanDBMetricsQueryDAO extends AbstractBanyanDBDAO implements IMetricsQueryDAO {
public BanyanDBMetricsQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public long readMetricsValue(MetricsCondition condition, String valueColumnName, Duration duration) throws IOException {
int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
Function function = ValueColumnMetadata.INSTANCE.getValueFunction(condition.getName());
if (function == Function.Latest) {
return readMetricsValues(condition, valueColumnName, duration)
.getValues().latestValue(defaultValue);
}
final String modelName = condition.getName();
TimestampRange timestampRange = new TimestampRange(duration.getStartTimestamp(), duration.getEndTimestamp());
final MeasureQueryResponse resp = query(modelName,
ImmutableSet.of(Metrics.ENTITY_ID),
ImmutableSet.of(valueColumnName),
timestampRange,
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
buildAggregationQuery(query, valueColumnName, function);
if (condition.getEntity().buildId() != null) {
query.and(eq(Metrics.ENTITY_ID, condition.getEntity().buildId()));
}
}
});
for (DataPoint dataPoint : resp.getDataPoints()) {
return ((Number) dataPoint.getFieldValue(valueColumnName)).longValue();
}
return defaultValue;
}
private void buildAggregationQuery(MeasureQuery query, String valueColumnName, Function function) {
switch (function) {
case Sum:
query.sumBy(valueColumnName, Collections.singleton(Metrics.ENTITY_ID));
break;
case Avg:
default:
query.meanBy(valueColumnName, Collections.singleton(Metrics.ENTITY_ID));
break;
}
}
@Override
public MetricsValues readMetricsValues(MetricsCondition condition, String valueColumnName, Duration duration) throws IOException {
String modelName = condition.getName();
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
if (schema == null) {
throw new IOException("schema is not registered");
}
final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
final List<String> ids = pointOfTimes.stream().map(pointOfTime -> {
String id = pointOfTime.id(condition.getEntity().buildId());
return id;
}).collect(Collectors.toList());
MetricsValues metricsValues = new MetricsValues();
Map<String, DataPoint> idMap = queryIDs(modelName, valueColumnName, ids);
if (!idMap.isEmpty()) {
// Label is null, because in readMetricsValues, no label parameter.
IntValues intValues = metricsValues.getValues();
for (String id : ids) {
KVInt kvInt = new KVInt();
kvInt.setId(id);
kvInt.setValue(0);
if (idMap.containsKey(id)) {
DataPoint dataPoint = idMap.get(id);
kvInt.setValue(extractFieldValue(schema, valueColumnName, dataPoint));
} else {
kvInt.setValue(ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName()));
}
intValues.addKVInt(kvInt);
}
}
metricsValues.setValues(
Util.sortValues(
metricsValues.getValues(), ids, ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName()))
);
return metricsValues;
}
private long extractFieldValue(MetadataRegistry.Schema schema, String fieldName, DataPoint dataPoint) throws IOException {
MetadataRegistry.ColumnSpec spec = schema.getSpec(fieldName);
if (spec == null) {
throw new IOException("field is not registered");
}
if (double.class.equals(spec.getColumnClass())) {
return ByteUtil.bytes2Double(dataPoint.getFieldValue(fieldName)).longValue();
} else {
return ((Number) dataPoint.getFieldValue(fieldName)).longValue();
}
}
@Override
public List<MetricsValues> readLabeledMetricsValues(MetricsCondition condition, String valueColumnName, List<String> labels, Duration duration) throws IOException {
final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
String modelName = condition.getName();
List<String> ids = new ArrayList<>(pointOfTimes.size());
pointOfTimes.forEach(pointOfTime -> {
String id = pointOfTime.id(condition.getEntity().buildId());
ids.add(id);
});
Map<String, DataPoint> idMap = queryIDs(modelName, valueColumnName, ids);
Map<String, DataTable> dataTableMap = new HashMap<>(idMap.size());
for (final Map.Entry<String, DataPoint> entry : idMap.entrySet()) {
dataTableMap.put(
entry.getKey(),
new DataTable(entry.getValue().getFieldValue(valueColumnName))
);
}
return Util.composeLabelValue(condition, labels, ids, dataTableMap);
}
@Override
public HeatMap readHeatMap(MetricsCondition condition, String valueColumnName, Duration duration) throws IOException {
final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
String modelName = condition.getName();
List<String> ids = new ArrayList<>(pointOfTimes.size());
pointOfTimes.forEach(pointOfTime -> {
String id = pointOfTime.id(condition.getEntity().buildId());
ids.add(id);
});
HeatMap heatMap = new HeatMap();
Map<String, DataPoint> idMap = queryIDs(modelName, valueColumnName, ids);
if (idMap.isEmpty()) {
return heatMap;
}
final int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
for (String id : ids) {
DataPoint dataPoint = idMap.get(id);
if (dataPoint != null) {
String value = dataPoint.getFieldValue(HistogramMetrics.DATASET);
heatMap.buildColumn(id, value, defaultValue);
}
}
heatMap.fixMissingColumns(ids, defaultValue);
return heatMap;
}
private Map<String, DataPoint> queryIDs(String modelName, String valueColumnName, List<String> measureIDs) throws IOException {
Map<String, DataPoint> map = new HashMap<>(measureIDs.size());
for (final String id : measureIDs) {
MeasureQueryResponse resp = query(modelName, Collections.emptySet(), ImmutableSet.of(valueColumnName), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.andWithID(id);
}
});
if (resp.size() > 0) {
map.putIfAbsent(resp.getDataPoints().get(0).getId(), resp.getDataPoints().get(0));
}
}
return map;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.oap.server.core.analysis.manual.networkalias.NetworkAddressAlias;
import org.apache.skywalking.oap.server.core.storage.cache.INetworkAddressAliasDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter.StorageToMeasure;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
@Slf4j
public class BanyanDBNetworkAddressAliasDAO extends AbstractBanyanDBDAO implements INetworkAddressAliasDAO {
private final NetworkAddressAlias.Builder builder = new NetworkAddressAlias.Builder();
private static final Set<String> TAGS = ImmutableSet.of(NetworkAddressAlias.ADDRESS,
NetworkAddressAlias.TIME_BUCKET, NetworkAddressAlias.LAST_UPDATE_TIME_BUCKET,
NetworkAddressAlias.REPRESENT_SERVICE_ID, NetworkAddressAlias.REPRESENT_SERVICE_INSTANCE_ID);
public BanyanDBNetworkAddressAliasDAO(final BanyanDBStorageClient client) {
super(client);
}
@Override
public List<NetworkAddressAlias> loadLastUpdate(long timeBucket) {
try {
MeasureQueryResponse resp = query(
NetworkAddressAlias.INDEX_NAME,
TAGS,
Collections.emptySet(),
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(final MeasureQuery query) {
query.and(gte(NetworkAddressAlias.LAST_UPDATE_TIME_BUCKET, timeBucket));
}
}
);
return resp.getDataPoints()
.stream()
.map(
point -> builder.storage2Entity(new StorageToMeasure(NetworkAddressAlias.INDEX_NAME, point))
)
.collect(Collectors.toList());
} catch (IOException e) {
log.error(e.getMessage(), e);
}
return Collections.emptyList();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.oap.server.core.analysis.manual.process.ServiceLabelRecord;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IServiceLabelDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
public class BanyanDBServiceLabelDAO extends AbstractBanyanDBDAO implements IServiceLabelDAO {
private static final Set<String> TAGS = ImmutableSet.of(ServiceLabelRecord.LABEL, ServiceLabelRecord.SERVICE_ID);
public BanyanDBServiceLabelDAO(final BanyanDBStorageClient client) {
super(client);
}
@Override
public List<String> queryAllLabels(String serviceId) throws IOException {
return query(ServiceLabelRecord.INDEX_NAME, TAGS,
Collections.emptySet(), new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(final MeasureQuery query) {
query.and(eq(ServiceLabelRecord.SERVICE_ID, serviceId));
}
}).getDataPoints()
.stream()
.map(point -> (String) point.getTagValue(ServiceLabelRecord.LABEL))
.collect(Collectors.toList());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.DataPoint;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.TagAutocompleteData;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.TagType;
import org.apache.skywalking.oap.server.core.storage.query.ITagAutoCompleteQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class BanyanDBTagAutocompleteQueryDAO extends AbstractBanyanDBDAO implements ITagAutoCompleteQueryDAO {
private static final Set<String> TAGS_KEY = ImmutableSet.of(TagAutocompleteData.TAG_TYPE,
TagAutocompleteData.TAG_KEY);
private static final Set<String> TAGS_KV = ImmutableSet.of(TagAutocompleteData.TAG_TYPE,
TagAutocompleteData.TAG_KEY, TagAutocompleteData.TAG_VALUE);
public BanyanDBTagAutocompleteQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public Set<String> queryTagAutocompleteKeys(TagType tagType, long startSecondTB, long endSecondTB) throws IOException {
TimestampRange range = null;
if (startSecondTB > 0 && endSecondTB > 0) {
range = new TimestampRange(TimeBucket.getTimestamp(startSecondTB), TimeBucket.getTimestamp(endSecondTB));
}
MeasureQueryResponse resp = query(TagAutocompleteData.INDEX_NAME,
TAGS_KEY, Collections.emptySet(),
range,
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.groupBy(ImmutableSet.of(TagAutocompleteData.TAG_KEY));
query.and(eq(TagAutocompleteData.TAG_TYPE, tagType.name()));
}
}
);
if (resp.size() == 0) {
return Collections.emptySet();
}
Set<String> keys = new HashSet<>();
for (final DataPoint dp : resp.getDataPoints()) {
keys.add(dp.getTagValue(TagAutocompleteData.TAG_KEY));
}
return keys;
}
@Override
public Set<String> queryTagAutocompleteValues(TagType tagType, String tagKey, int limit, long startSecondTB, long endSecondTB) throws IOException {
TimestampRange range = null;
if (startSecondTB > 0 && endSecondTB > 0) {
range = new TimestampRange(TimeBucket.getTimestamp(startSecondTB), TimeBucket.getTimestamp(endSecondTB));
}
MeasureQueryResponse resp = query(TagAutocompleteData.INDEX_NAME,
TAGS_KV, Collections.emptySet(),
range,
new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.groupBy(ImmutableSet.of(TagAutocompleteData.TAG_VALUE));
query.setLimit(limit);
query.and(eq(TagAutocompleteData.TAG_TYPE, tagType.name()));
query.and(eq(TagAutocompleteData.TAG_KEY, tagKey));
}
}
);
if (resp.size() == 0) {
return Collections.emptySet();
}
Set<String> values = new HashSet<>();
for (final DataPoint dp : resp.getDataPoints()) {
values.add(dp.getTagValue(TagAutocompleteData.TAG_VALUE));
}
return values;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.measure;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.relation.endpoint.EndpointRelationServerSideMetrics;
import org.apache.skywalking.oap.server.core.analysis.manual.relation.instance.ServiceInstanceRelationServerSideMetrics;
import org.apache.skywalking.oap.server.core.analysis.manual.relation.service.ServiceRelationClientSideMetrics;
import org.apache.skywalking.oap.server.core.analysis.manual.relation.service.ServiceRelationServerSideMetrics;
import org.apache.skywalking.oap.server.core.query.type.Call;
import org.apache.skywalking.oap.server.core.source.DetectPoint;
import org.apache.skywalking.oap.server.core.storage.query.ITopologyQueryDAO;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.stream.AbstractBanyanDBDAO;
public class BanyanDBTopologyQueryDAO extends AbstractBanyanDBDAO implements ITopologyQueryDAO {
public BanyanDBTopologyQueryDAO(final BanyanDBStorageClient client) {
super(client);
}
@Override
public List<Call.CallDetail> loadServiceRelationsDetectedAtServerSide(long startTB, long endTB, List<String> serviceIds) throws IOException {
if (CollectionUtils.isEmpty(serviceIds)) {
throw new UnexpectedException("Service id is empty");
}
List<QueryBuilder<MeasureQuery>> queryBuilderList = buildServiceRelationsQueries(serviceIds);
return queryServiceRelation(startTB, endTB, queryBuilderList, DetectPoint.SERVER);
}
@Override
public List<Call.CallDetail> loadServiceRelationDetectedAtClientSide(long startTB, long endTB, List<String> serviceIds) throws IOException {
if (CollectionUtils.isEmpty(serviceIds)) {
throw new UnexpectedException("Service id is empty");
}
List<QueryBuilder<MeasureQuery>> queryBuilderList = buildServiceRelationsQueries(serviceIds);
return queryServiceRelation(startTB, endTB, queryBuilderList, DetectPoint.CLIENT);
}
@Override
public List<Call.CallDetail> loadServiceRelationsDetectedAtServerSide(long startTB, long endTB) throws IOException {
return queryServiceRelation(startTB, endTB, Collections.singletonList(emptyMeasureQuery()), DetectPoint.SERVER);
}
@Override
public List<Call.CallDetail> loadServiceRelationDetectedAtClientSide(long startTB, long endTB) throws IOException {
return queryServiceRelation(startTB, endTB, Collections.singletonList(emptyMeasureQuery()), DetectPoint.CLIENT);
}
private List<QueryBuilder<MeasureQuery>> buildServiceRelationsQueries(List<String> serviceIds) {
List<QueryBuilder<MeasureQuery>> queryBuilderList = new ArrayList<>(serviceIds.size());
for (final String serviceId : serviceIds) {
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(ServiceRelationServerSideMetrics.SOURCE_SERVICE_ID, serviceId));
}
});
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(ServiceRelationServerSideMetrics.DEST_SERVICE_ID, serviceId));
}
});
}
return queryBuilderList;
}
List<Call.CallDetail> queryServiceRelation(long startTB, long endTB, List<QueryBuilder<MeasureQuery>> queryBuilderList, DetectPoint detectPoint) throws IOException {
TimestampRange timestampRange = null;
if (startTB > 0 && endTB > 0) {
timestampRange = new TimestampRange(TimeBucket.getTimestamp(startTB), TimeBucket.getTimestamp(endTB));
}
final Map<String, Call.CallDetail> callMap = new HashMap<>();
for (final QueryBuilder<MeasureQuery> q : queryBuilderList) {
MeasureQueryResponse resp = query(ServiceRelationClientSideMetrics.INDEX_NAME,
ImmutableSet.of(ServiceRelationClientSideMetrics.COMPONENT_ID,
ServiceRelationClientSideMetrics.SOURCE_SERVICE_ID,
ServiceRelationClientSideMetrics.DEST_SERVICE_ID,
ServiceRelationClientSideMetrics.ENTITY_ID),
Collections.emptySet(), timestampRange, q);
if (resp.size() == 0) {
continue;
}
final Call.CallDetail call = new Call.CallDetail();
final String entityId = resp.getDataPoints().get(0).getTagValue(ServiceRelationClientSideMetrics.ENTITY_ID);
final int componentId = ((Number) resp.getDataPoints().get(0).getTagValue(ServiceRelationClientSideMetrics.COMPONENT_ID)).intValue();
call.buildFromServiceRelation(entityId, componentId, detectPoint);
callMap.putIfAbsent(entityId, call);
}
return new ArrayList<>(callMap.values());
}
@Override
public List<Call.CallDetail> loadInstanceRelationDetectedAtServerSide(String clientServiceId, String serverServiceId, long startTB, long endTB) throws IOException {
List<QueryBuilder<MeasureQuery>> queryBuilderList = buildInstanceRelationsQueries(clientServiceId, serverServiceId);
return queryInstanceRelation(startTB, endTB, queryBuilderList, DetectPoint.SERVER);
}
@Override
public List<Call.CallDetail> loadInstanceRelationDetectedAtClientSide(String clientServiceId, String serverServiceId, long startTB, long endTB) throws IOException {
List<QueryBuilder<MeasureQuery>> queryBuilderList = buildInstanceRelationsQueries(clientServiceId, serverServiceId);
return queryInstanceRelation(startTB, endTB, queryBuilderList, DetectPoint.CLIENT);
}
private List<QueryBuilder<MeasureQuery>> buildInstanceRelationsQueries(String clientServiceId, String serverServiceId) {
List<QueryBuilder<MeasureQuery>> queryBuilderList = new ArrayList<>(2);
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(ServiceInstanceRelationServerSideMetrics.SOURCE_SERVICE_ID, clientServiceId))
.and(eq(ServiceInstanceRelationServerSideMetrics.DEST_SERVICE_ID, serverServiceId));
}
});
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(ServiceInstanceRelationServerSideMetrics.DEST_SERVICE_ID, clientServiceId))
.and(eq(ServiceInstanceRelationServerSideMetrics.SOURCE_SERVICE_ID, serverServiceId));
}
});
return queryBuilderList;
}
List<Call.CallDetail> queryInstanceRelation(long startTB, long endTB, List<QueryBuilder<MeasureQuery>> queryBuilderList, DetectPoint detectPoint) throws IOException {
TimestampRange timestampRange = null;
if (startTB > 0 && endTB > 0) {
timestampRange = new TimestampRange(TimeBucket.getTimestamp(startTB), TimeBucket.getTimestamp(endTB));
}
final Map<String, Call.CallDetail> callMap = new HashMap<>();
for (final QueryBuilder<MeasureQuery> q : queryBuilderList) {
MeasureQueryResponse resp = query(ServiceInstanceRelationServerSideMetrics.INDEX_NAME,
ImmutableSet.of(ServiceInstanceRelationServerSideMetrics.COMPONENT_ID,
ServiceInstanceRelationServerSideMetrics.SOURCE_SERVICE_ID,
ServiceInstanceRelationServerSideMetrics.DEST_SERVICE_ID,
ServiceInstanceRelationServerSideMetrics.ENTITY_ID),
Collections.emptySet(), timestampRange, q);
if (resp.size() == 0) {
continue;
}
final Call.CallDetail call = new Call.CallDetail();
final String entityId = resp.getDataPoints().get(0).getTagValue(ServiceInstanceRelationServerSideMetrics.ENTITY_ID);
final int componentId = ((Number) resp.getDataPoints().get(0).getTagValue(ServiceRelationClientSideMetrics.COMPONENT_ID)).intValue();
call.buildFromInstanceRelation(entityId, componentId, detectPoint);
callMap.putIfAbsent(entityId, call);
}
return new ArrayList<>(callMap.values());
}
@Override
public List<Call.CallDetail> loadEndpointRelation(long startTB, long endTB, String destEndpointId) throws IOException {
List<QueryBuilder<MeasureQuery>> queryBuilderList = buildEndpointRelationsQueries(destEndpointId);
return queryEndpointRelation(startTB, endTB, queryBuilderList, DetectPoint.SERVER);
}
private List<QueryBuilder<MeasureQuery>> buildEndpointRelationsQueries(String destEndpointId) {
List<QueryBuilder<MeasureQuery>> queryBuilderList = new ArrayList<>(2);
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(EndpointRelationServerSideMetrics.SOURCE_ENDPOINT, destEndpointId));
}
});
queryBuilderList.add(new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
query.and(eq(EndpointRelationServerSideMetrics.DEST_ENDPOINT, destEndpointId));
}
});
return queryBuilderList;
}
List<Call.CallDetail> queryEndpointRelation(long startTB, long endTB, List<QueryBuilder<MeasureQuery>> queryBuilderList, DetectPoint detectPoint) throws IOException {
TimestampRange timestampRange = null;
if (startTB > 0 && endTB > 0) {
timestampRange = new TimestampRange(TimeBucket.getTimestamp(startTB), TimeBucket.getTimestamp(endTB));
}
final Map<String, Call.CallDetail> callMap = new HashMap<>();
for (final QueryBuilder<MeasureQuery> q : queryBuilderList) {
MeasureQueryResponse resp = query(EndpointRelationServerSideMetrics.INDEX_NAME,
ImmutableSet.of(EndpointRelationServerSideMetrics.DEST_ENDPOINT,
EndpointRelationServerSideMetrics.SOURCE_ENDPOINT,
EndpointRelationServerSideMetrics.ENTITY_ID),
Collections.emptySet(), timestampRange, q);
if (resp.size() == 0) {
continue;
}
final Call.CallDetail call = new Call.CallDetail();
final String entityId = resp.getDataPoints().get(0).getTagValue(EndpointRelationServerSideMetrics.ENTITY_ID);
call.buildFromEndpointRelation(entityId, detectPoint);
callMap.putIfAbsent(entityId, call);
}
return new ArrayList<>(callMap.values());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import org.apache.skywalking.banyandb.v1.client.AbstractQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQuery;
import org.apache.skywalking.banyandb.v1.client.MeasureQueryResponse;
import org.apache.skywalking.banyandb.v1.client.PairQueryCondition;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.storage.AbstractDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.MetadataRegistry;
import java.io.IOException;
import java.time.Instant;
import java.util.List;
import java.util.Set;
public abstract class AbstractBanyanDBDAO extends AbstractDAO<BanyanDBStorageClient> {
private static final Instant UPPER_BOUND = Instant.ofEpochSecond(0, Long.MAX_VALUE);
private static final TimestampRange LARGEST_TIME_RANGE = new TimestampRange(0, UPPER_BOUND.toEpochMilli());
protected AbstractBanyanDBDAO(BanyanDBStorageClient client) {
super(client);
}
protected StreamQueryResponse query(String modelName, Set<String> tags, QueryBuilder<StreamQuery> builder) throws IOException {
return this.query(modelName, tags, null, builder);
}
protected StreamQueryResponse query(String modelName, Set<String> tags, TimestampRange timestampRange,
QueryBuilder<StreamQuery> builder) throws IOException {
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
if (schema == null) {
throw new IllegalStateException("schema is not registered");
}
final StreamQuery query;
if (timestampRange == null) {
query = new StreamQuery(schema.getMetadata().getGroup(), schema.getMetadata().getName(), LARGEST_TIME_RANGE, tags);
} else {
query = new StreamQuery(schema.getMetadata().getGroup(), schema.getMetadata().getName(), timestampRange, tags);
}
builder.apply(query);
return getClient().query(query);
}
protected MeasureQueryResponse query(String modelName, Set<String> tags, Set<String> fields,
QueryBuilder<MeasureQuery> builder) throws IOException {
return this.query(modelName, tags, fields, null, builder);
}
protected MeasureQueryResponse query(String modelName, Set<String> tags, Set<String> fields,
TimestampRange timestampRange, QueryBuilder<MeasureQuery> builder) throws IOException {
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(modelName);
if (schema == null) {
throw new IllegalStateException("schema is not registered");
}
final MeasureQuery query;
if (timestampRange == null) {
query = new MeasureQuery(schema.getMetadata().getGroup(), schema.getMetadata().getName(), LARGEST_TIME_RANGE, tags, fields);
} else {
query = new MeasureQuery(schema.getMetadata().getGroup(), schema.getMetadata().getName(), timestampRange, tags, fields);
}
builder.apply(query);
return getClient().query(query);
}
protected static QueryBuilder<MeasureQuery> emptyMeasureQuery() {
return new QueryBuilder<MeasureQuery>() {
@Override
protected void apply(MeasureQuery query) {
}
};
}
protected abstract static class QueryBuilder<T extends AbstractQuery<? extends com.google.protobuf.GeneratedMessageV3>> {
protected abstract void apply(final T query);
protected PairQueryCondition<Long> eq(String name, long value) {
return PairQueryCondition.LongQueryCondition.eq(name, value);
}
protected PairQueryCondition<List<String>> having(String name, List<String> value) {
return PairQueryCondition.StringArrayQueryCondition.having(name, value);
}
protected PairQueryCondition<Long> lte(String name, long value) {
return PairQueryCondition.LongQueryCondition.le(name, value);
}
protected PairQueryCondition<Long> gte(String name, long value) {
return PairQueryCondition.LongQueryCondition.ge(name, value);
}
protected PairQueryCondition<Long> gt(String name, long value) {
return PairQueryCondition.LongQueryCondition.gt(name, value);
}
protected PairQueryCondition<String> eq(String name, String value) {
return PairQueryCondition.StringQueryCondition.eq(name, value);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.alarm.AlarmRecord;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Scope;
import org.apache.skywalking.oap.server.core.query.type.AlarmMessage;
import org.apache.skywalking.oap.server.core.query.type.Alarms;
import org.apache.skywalking.oap.server.core.storage.query.IAlarmQueryDAO;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* {@link org.apache.skywalking.oap.server.core.alarm.AlarmRecord} is a stream,
* which can be used to build a {@link org.apache.skywalking.oap.server.core.query.type.AlarmMessage}
*/
public class BanyanDBAlarmQueryDAO extends AbstractBanyanDBDAO implements IAlarmQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(AlarmRecord.SCOPE,
AlarmRecord.NAME, AlarmRecord.ID0, AlarmRecord.ID1, AlarmRecord.ALARM_MESSAGE, AlarmRecord.START_TIME,
AlarmRecord.TIME_BUCKET, AlarmRecord.RULE_NAME, AlarmRecord.TAGS, AlarmRecord.TAGS_RAW_DATA);
public BanyanDBAlarmQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public Alarms getAlarm(Integer scopeId, String keyword, int limit, int from, long startTB, long endTB, List<Tag> tags) throws IOException {
TimestampRange tsRange = null;
if (startTB > 0 && endTB > 0) {
tsRange = new TimestampRange(TimeBucket.getTimestamp(startTB), TimeBucket.getTimestamp(endTB));
}
StreamQueryResponse resp = query(AlarmRecord.INDEX_NAME, TAGS,
tsRange,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
if (Objects.nonNull(scopeId)) {
query.and(eq(AlarmRecord.SCOPE, (long) scopeId));
}
if (CollectionUtils.isNotEmpty(tags)) {
List<String> tagsConditions = new ArrayList<>(tags.size());
for (final Tag tag : tags) {
tagsConditions.add(tag.toString());
}
query.and(having(AlarmRecord.TAGS, tagsConditions));
}
query.setLimit(limit);
query.setOffset(from);
}
});
Alarms alarms = new Alarms();
alarms.setTotal(resp.size());
for (final RowEntity rowEntity : resp.getElements()) {
AlarmRecord.Builder builder = new AlarmRecord.Builder();
AlarmRecord alarmRecord = builder.storage2Entity(
new BanyanDBConverter.StorageToStream(AlarmRecord.INDEX_NAME, rowEntity)
);
AlarmMessage message = new AlarmMessage();
message.setId(String.valueOf(alarmRecord.getId0()));
message.setId1(String.valueOf(alarmRecord.getId1()));
message.setMessage(alarmRecord.getAlarmMessage());
message.setStartTime(alarmRecord.getStartTime());
message.setScope(Scope.Finder.valueOf(alarmRecord.getScope()));
message.setScopeId(alarmRecord.getScope());
if (!CollectionUtils.isEmpty(alarmRecord.getTagsRawData())) {
parserDataBinary(alarmRecord.getTagsRawData(), message.getTags());
}
alarms.getMsgs().add(message);
}
return alarms;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.browser.manual.errorlog.BrowserErrorLogRecord;
import org.apache.skywalking.oap.server.core.browser.source.BrowserErrorCategory;
import org.apache.skywalking.oap.server.core.query.type.BrowserErrorLog;
import org.apache.skywalking.oap.server.core.query.type.BrowserErrorLogs;
import org.apache.skywalking.oap.server.core.query.type.ErrorCategory;
import org.apache.skywalking.oap.server.core.storage.query.IBrowserLogQueryDAO;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.Objects;
import java.util.Set;
/**
* {@link org.apache.skywalking.oap.server.core.browser.manual.errorlog.BrowserErrorLogRecord} is a stream
*/
public class BanyanDBBrowserLogQueryDAO extends AbstractBanyanDBDAO implements IBrowserLogQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(BrowserErrorLogRecord.SERVICE_ID,
BrowserErrorLogRecord.SERVICE_VERSION_ID, BrowserErrorLogRecord.PAGE_PATH_ID,
BrowserErrorLogRecord.ERROR_CATEGORY, BrowserErrorLogRecord.DATA_BINARY);
public BanyanDBBrowserLogQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public BrowserErrorLogs queryBrowserErrorLogs(String serviceId, String serviceVersionId, String pagePathId, BrowserErrorCategory category, long startSecondTB, long endSecondTB, int limit, int from) throws IOException {
TimestampRange tsRange = null;
if (startSecondTB > 0 && endSecondTB > 0) {
tsRange = new TimestampRange(TimeBucket.getTimestamp(startSecondTB), TimeBucket.getTimestamp(endSecondTB));
}
StreamQueryResponse resp = query(BrowserErrorLogRecord.INDEX_NAME, TAGS,
tsRange, new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(BrowserErrorLogRecord.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(serviceVersionId)) {
query.and(eq(BrowserErrorLogRecord.SERVICE_VERSION_ID, serviceVersionId));
}
if (StringUtil.isNotEmpty(pagePathId)) {
query.and(eq(BrowserErrorLogRecord.PAGE_PATH_ID, pagePathId));
}
if (Objects.nonNull(category)) {
query.and(eq(BrowserErrorLogRecord.ERROR_CATEGORY, category.getValue()));
}
query.setOffset(from);
query.setLimit(limit);
}
});
BrowserErrorLogs logs = new BrowserErrorLogs();
logs.setTotal(resp.size());
for (final RowEntity rowEntity : resp.getElements()) {
final byte[] dataBinary = rowEntity.getTagValue(BrowserErrorLogRecord.DATA_BINARY);
if (dataBinary != null && dataBinary.length > 0) {
BrowserErrorLog log = parserDataBinary(dataBinary);
logs.getLogs().add(log);
}
}
return logs;
}
/**
* TODO: merge the default method in the interface
*/
private BrowserErrorLog parserDataBinary(
byte[] dataBinary) {
try {
BrowserErrorLog log = new BrowserErrorLog();
org.apache.skywalking.apm.network.language.agent.v3.BrowserErrorLog browserErrorLog = org.apache.skywalking.apm.network.language.agent.v3.BrowserErrorLog
.parseFrom(dataBinary);
log.setService(browserErrorLog.getService());
log.setServiceVersion(browserErrorLog.getServiceVersion());
log.setTime(browserErrorLog.getTime());
log.setPagePath(browserErrorLog.getPagePath());
log.setCategory(ErrorCategory.valueOf(browserErrorLog.getCategory().name().toUpperCase()));
log.setGrade(browserErrorLog.getGrade());
log.setMessage(browserErrorLog.getMessage());
log.setLine(browserErrorLog.getLine());
log.setCol(browserErrorLog.getCol());
log.setStack(browserErrorLog.getStack());
log.setErrorUrl(browserErrorLog.getErrorUrl());
log.setFirstReportedError(browserErrorLog.getFirstReportedError());
return log;
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingDataRecord;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingDataDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class BanyanDBEBPFProfilingDataDAO extends AbstractBanyanDBDAO implements IEBPFProfilingDataDAO {
private static final Set<String> TAGS = ImmutableSet.of(EBPFProfilingDataRecord.UPLOAD_TIME,
EBPFProfilingDataRecord.SCHEDULE_ID,
EBPFProfilingDataRecord.STACK_DUMP_COUNT,
EBPFProfilingDataRecord.STACK_ID_LIST,
EBPFProfilingDataRecord.STACKS_BINARY,
EBPFProfilingDataRecord.TASK_ID,
EBPFProfilingDataRecord.TIME_BUCKET);
public BanyanDBEBPFProfilingDataDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<EBPFProfilingDataRecord> queryData(List<String> scheduleIdList, long beginTime, long endTime) throws IOException {
List<EBPFProfilingDataRecord> records = new ArrayList<>();
for (final String scheduleId : scheduleIdList) {
StreamQueryResponse resp = query(EBPFProfilingDataRecord.INDEX_NAME,
TAGS,
new QueryBuilder<StreamQuery>() {
@Override
protected void apply(StreamQuery query) {
query.and(eq(EBPFProfilingDataRecord.SCHEDULE_ID, scheduleId));
query.and(gte(EBPFProfilingDataRecord.UPLOAD_TIME, beginTime));
query.and(lte(EBPFProfilingDataRecord.UPLOAD_TIME, endTime));
}
}
);
records.addAll(resp.getElements().stream().map(this::buildDataRecord).collect(Collectors.toList()));
}
return records;
}
private EBPFProfilingDataRecord buildDataRecord(RowEntity rowEntity) {
final EBPFProfilingDataRecord.Builder builder = new EBPFProfilingDataRecord.Builder();
return builder.storage2Entity(new BanyanDBConverter.StorageToStream(EBPFProfilingDataRecord.INDEX_NAME, rowEntity));
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import com.google.gson.Gson;
import org.apache.skywalking.banyandb.v1.client.AbstractQuery;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingTargetType;
import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingTaskRecord;
import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingTriggerType;
import org.apache.skywalking.oap.server.core.query.type.EBPFProfilingTask;
import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingTaskDAO;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class BanyanDBEBPFProfilingTaskDAO extends AbstractBanyanDBDAO implements IEBPFProfilingTaskDAO {
private static final Set<String> TAGS = ImmutableSet.of(EBPFProfilingTaskRecord.SERVICE_ID,
EBPFProfilingTaskRecord.PROCESS_LABELS_JSON,
EBPFProfilingTaskRecord.TRIGGER_TYPE,
EBPFProfilingTaskRecord.START_TIME,
EBPFProfilingTaskRecord.FIXED_TRIGGER_DURATION,
EBPFProfilingTaskRecord.TARGET_TYPE,
EBPFProfilingTaskRecord.CREATE_TIME,
EBPFProfilingTaskRecord.LAST_UPDATE_TIME,
EBPFProfilingTaskRecord.TIME_BUCKET);
private static final Gson GSON = new Gson();
public BanyanDBEBPFProfilingTaskDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<EBPFProfilingTask> queryTasks(List<String> serviceIdList, EBPFProfilingTargetType targetType, long taskStartTime, long latestUpdateTime) throws IOException {
List<EBPFProfilingTask> tasks = new ArrayList<>();
for (final String serviceId : serviceIdList) {
StreamQueryResponse resp = query(EBPFProfilingTaskRecord.INDEX_NAME, TAGS,
new QueryBuilder<StreamQuery>() {
@Override
protected void apply(StreamQuery query) {
query.and(eq(EBPFProfilingTaskRecord.SERVICE_ID, serviceId));
if (targetType != null) {
query.and(eq(EBPFProfilingTaskRecord.TARGET_TYPE, targetType.value()));
}
if (taskStartTime > 0) {
query.and(gte(EBPFProfilingTaskRecord.START_TIME, taskStartTime));
}
if (latestUpdateTime > 0) {
query.and(gt(EBPFProfilingTaskRecord.LAST_UPDATE_TIME, latestUpdateTime));
}
query.setOrderBy(new AbstractQuery.OrderBy(EBPFProfilingTaskRecord.CREATE_TIME, AbstractQuery.Sort.DESC));
}
});
tasks.addAll(resp.getElements().stream().map(this::buildTask).collect(Collectors.toList()));
}
return tasks;
}
private EBPFProfilingTask buildTask(final RowEntity rowEntity) {
final EBPFProfilingTaskRecord.Builder builder = new EBPFProfilingTaskRecord.Builder();
final EBPFProfilingTaskRecord record = builder.storage2Entity(new BanyanDBConverter.StorageToStream(
EBPFProfilingTaskRecord.INDEX_NAME,
rowEntity));
final EBPFProfilingTask task = new EBPFProfilingTask();
task.setTaskId(record.id());
task.setServiceId(record.getServiceId());
task.setServiceName(IDManager.ServiceID.analysisId(record.getServiceId()).getName());
if (StringUtil.isNotEmpty(record.getProcessLabelsJson())) {
task.setProcessLabels(GSON.<List<String>>fromJson(record.getProcessLabelsJson(), ArrayList.class));
} else {
task.setProcessLabels(Collections.emptyList());
}
task.setTaskStartTime(record.getStartTime());
task.setTriggerType(EBPFProfilingTriggerType.valueOf(record.getTriggerType()));
task.setFixedTriggerDuration(record.getFixedTriggerDuration());
task.setTargetType(EBPFProfilingTargetType.valueOf(record.getTargetType()));
task.setCreateTime(record.getCreateTime());
task.setLastUpdateTime(record.getLastUpdateTime());
return task;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import org.apache.skywalking.oap.server.core.storage.IHistoryDeleteDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import java.io.IOException;
public class BanyanDBHistoryDeleteDAO implements IHistoryDeleteDAO {
@Override
public void deleteHistory(Model model, String timeBucketColumnName, int ttl) throws IOException {
// do thing since we currently do not support deletion
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.skywalking.apm.network.logging.v3.LogTags;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* {@link org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord} is a stream
*/
public class BanyanDBLogQueryDAO extends AbstractBanyanDBDAO implements ILogQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(AbstractLogRecord.SERVICE_ID,
AbstractLogRecord.SERVICE_INSTANCE_ID,
AbstractLogRecord.ENDPOINT_ID,
AbstractLogRecord.TRACE_ID,
AbstractLogRecord.TRACE_SEGMENT_ID,
AbstractLogRecord.SPAN_ID,
AbstractLogRecord.TIMESTAMP,
AbstractLogRecord.CONTENT_TYPE,
AbstractLogRecord.CONTENT,
AbstractLogRecord.TAGS,
AbstractLogRecord.TAGS_RAW_DATA);
public BanyanDBLogQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public Logs queryLogs(String serviceId, String serviceInstanceId, String endpointId,
TraceScopeCondition relatedTrace, Order queryOrder, int from, int limit,
long startTB, long endTB, List<Tag> tags, List<String> keywordsOfContent,
List<String> excludingKeywordsOfContent) throws IOException {
final QueryBuilder<StreamQuery> query = new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(AbstractLogRecord.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(serviceInstanceId)) {
query.and(eq(AbstractLogRecord.SERVICE_INSTANCE_ID, serviceInstanceId));
}
if (StringUtil.isNotEmpty(endpointId)) {
query.and(eq(AbstractLogRecord.ENDPOINT_ID, endpointId));
}
if (Objects.nonNull(relatedTrace)) {
if (StringUtil.isNotEmpty(relatedTrace.getTraceId())) {
query.and(eq(AbstractLogRecord.TRACE_ID, relatedTrace.getTraceId()));
}
if (StringUtil.isNotEmpty(relatedTrace.getSegmentId())) {
query.and(eq(AbstractLogRecord.TRACE_SEGMENT_ID, relatedTrace.getSegmentId()));
}
if (Objects.nonNull(relatedTrace.getSpanId())) {
query.and(eq(AbstractLogRecord.SPAN_ID, (long) relatedTrace.getSpanId()));
}
}
if (CollectionUtils.isNotEmpty(tags)) {
List<String> tagsConditions = new ArrayList<>(tags.size());
for (final Tag tag : tags) {
tagsConditions.add(tag.toString());
}
query.and(having(LogRecord.TAGS, tagsConditions));
}
}
};
TimestampRange tsRange = null;
if (startTB > 0 && endTB > 0) {
tsRange = new TimestampRange(TimeBucket.getTimestamp(startTB), TimeBucket.getTimestamp(endTB));
}
StreamQueryResponse resp = query(LogRecord.INDEX_NAME, TAGS, tsRange, query);
Logs logs = new Logs();
logs.setTotal(resp.size());
for (final RowEntity rowEntity : resp.getElements()) {
Log log = new Log();
log.setServiceId(rowEntity.getTagValue(AbstractLogRecord.SERVICE_ID));
log.setServiceInstanceId(
rowEntity.getTagValue(AbstractLogRecord.SERVICE_INSTANCE_ID));
log.setEndpointId(
rowEntity.getTagValue(AbstractLogRecord.ENDPOINT_ID));
if (log.getEndpointId() != null) {
log.setEndpointName(
IDManager.EndpointID.analysisId(log.getEndpointId()).getEndpointName());
}
log.setTraceId(rowEntity.getTagValue(AbstractLogRecord.TRACE_ID));
log.setTimestamp(((Number) rowEntity.getTagValue(AbstractLogRecord.TIMESTAMP)).longValue());
log.setContentType(ContentType.instanceOf(
((Number) rowEntity.getTagValue(AbstractLogRecord.CONTENT_TYPE)).intValue()));
log.setContent(rowEntity.getTagValue(AbstractLogRecord.CONTENT));
byte[] dataBinary = rowEntity.getTagValue(AbstractLogRecord.TAGS_RAW_DATA);
if (dataBinary != null && dataBinary.length > 0) {
parserDataBinary(dataBinary, log.getTags());
}
logs.getLogs().add(log);
}
return logs;
}
/**
* Parser the raw tags.
* TODO: merge default method
*/
private void parserDataBinary(byte[] dataBinary, List<KeyValue> tags) {
try {
LogTags logTags = LogTags.parseFrom(dataBinary);
logTags.getDataList().forEach(pair -> tags.add(new KeyValue(pair.getKey(), pair.getValue())));
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskLogRecord;
import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLog;
import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLogOperationType;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskLogQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* {@link ProfileTaskLogRecord} is a stream
*/
public class BanyanDBProfileTaskLogQueryDAO extends AbstractBanyanDBDAO implements IProfileTaskLogQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(ProfileTaskLogRecord.OPERATION_TIME,
ProfileTaskLogRecord.INSTANCE_ID, ProfileTaskLogRecord.TASK_ID, ProfileTaskLogRecord.OPERATION_TYPE);
private final int queryMaxSize;
public BanyanDBProfileTaskLogQueryDAO(BanyanDBStorageClient client, int queryMaxSize) {
super(client);
this.queryMaxSize = queryMaxSize;
}
@Override
public List<ProfileTaskLog> getTaskLogList() throws IOException {
StreamQueryResponse resp = query(ProfileTaskLogRecord.INDEX_NAME, TAGS,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.setLimit(BanyanDBProfileTaskLogQueryDAO.this.queryMaxSize);
}
});
if (resp.size() == 0) {
return Collections.emptyList();
}
final LinkedList<ProfileTaskLog> tasks = new LinkedList<>();
for (final RowEntity rowEntity : resp.getElements()) {
tasks.add(buildProfileTaskLog(rowEntity));
}
return tasks;
}
private ProfileTaskLog buildProfileTaskLog(RowEntity data) {
return ProfileTaskLog.builder()
.id(data.getId())
.taskId(data.getTagValue(ProfileTaskLogRecord.TASK_ID))
.instanceId(data.getTagValue(ProfileTaskLogRecord.INSTANCE_ID))
.operationType(ProfileTaskLogOperationType.parse(((Number) data.getTagValue(ProfileTaskLogRecord.OPERATION_TYPE)).intValue()))
.operationTime(((Number) data.getTagValue(ProfileTaskLogRecord.OPERATION_TIME)).longValue())
.build();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskRecord;
import org.apache.skywalking.oap.server.core.query.type.ProfileTask;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskQueryDAO;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class BanyanDBProfileTaskQueryDAO extends AbstractBanyanDBDAO implements IProfileTaskQueryDAO {
private static final Set<String> TAGS = ImmutableSet.of(
ProfileTaskRecord.SERVICE_ID,
ProfileTaskRecord.ENDPOINT_NAME,
ProfileTaskRecord.START_TIME,
ProfileTaskRecord.CREATE_TIME,
ProfileTaskRecord.DURATION,
ProfileTaskRecord.MIN_DURATION_THRESHOLD,
ProfileTaskRecord.DUMP_PERIOD,
ProfileTaskRecord.MAX_SAMPLING_COUNT
);
public BanyanDBProfileTaskQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<ProfileTask> getTaskList(String serviceId, String endpointName, Long startTimeBucket, Long endTimeBucket, Integer limit) throws IOException {
StreamQueryResponse resp = query(ProfileTaskRecord.INDEX_NAME, TAGS,
new QueryBuilder<StreamQuery>() {
@Override
protected void apply(StreamQuery query) {
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(ProfileTaskRecord.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(endpointName)) {
query.and(eq(ProfileTaskRecord.ENDPOINT_NAME, endpointName));
}
if (startTimeBucket != null) {
query.and(gte(ProfileTaskRecord.TIME_BUCKET, startTimeBucket));
}
if (endTimeBucket != null) {
query.and(lte(ProfileTaskRecord.TIME_BUCKET, endTimeBucket));
}
if (limit != null) {
query.setLimit(limit);
}
}
});
if (resp.size() == 0) {
return Collections.emptyList();
}
List<ProfileTask> profileTasks = new ArrayList<>(resp.size());
for (final RowEntity entity : resp.getElements()) {
profileTasks.add(buildProfileTask(entity));
}
return profileTasks;
}
@Override
public ProfileTask getById(String id) throws IOException {
StreamQueryResponse resp = query(ProfileTaskRecord.INDEX_NAME, TAGS,
new QueryBuilder<StreamQuery>() {
@Override
protected void apply(StreamQuery query) {
if (StringUtil.isNotEmpty(id)) {
// TODO: support search by ID
}
// query.setLimit(1);
}
});
if (resp.size() == 0) {
return null;
}
RowEntity first = resp.getElements().stream().filter(e -> id.equals(e.getId())).findFirst().orElse(null);
return first == null ? null : buildProfileTask(first);
}
private ProfileTask buildProfileTask(RowEntity data) {
return ProfileTask.builder()
.id(data.getId())
.serviceId(data.getTagValue(ProfileTaskRecord.SERVICE_ID))
.endpointName(data.getTagValue(ProfileTaskRecord.ENDPOINT_NAME))
.startTime(((Number) data.getTagValue(ProfileTaskRecord.START_TIME)).longValue())
.createTime(((Number) data.getTagValue(ProfileTaskRecord.CREATE_TIME)).longValue())
.duration(((Number) data.getTagValue(ProfileTaskRecord.DURATION)).intValue())
.minDurationThreshold(((Number) data.getTagValue(ProfileTaskRecord.MIN_DURATION_THRESHOLD)).intValue())
.dumpPeriod(((Number) data.getTagValue(ProfileTaskRecord.DUMP_PERIOD)).intValue())
.maxSamplingCount(((Number) data.getTagValue(ProfileTaskRecord.MAX_SAMPLING_COUNT)).intValue())
.build();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.profiling.trace.ProfileThreadSnapshotRecord;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileThreadSnapshotQueryDAO;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* {@link ProfileThreadSnapshotRecord} is a stream
*/
public class BanyanDBProfileThreadSnapshotQueryDAO extends AbstractBanyanDBDAO implements IProfileThreadSnapshotQueryDAO {
private static final Set<String> TAGS_BASIC = ImmutableSet.of(ProfileThreadSnapshotRecord.TASK_ID, ProfileThreadSnapshotRecord.SEGMENT_ID,
ProfileThreadSnapshotRecord.DUMP_TIME, ProfileThreadSnapshotRecord.SEQUENCE);
private static final Set<String> TAGS_ALL = ImmutableSet.of(ProfileThreadSnapshotRecord.TASK_ID,
ProfileThreadSnapshotRecord.SEGMENT_ID,
ProfileThreadSnapshotRecord.DUMP_TIME,
ProfileThreadSnapshotRecord.SEQUENCE,
ProfileThreadSnapshotRecord.TIME_BUCKET,
ProfileThreadSnapshotRecord.STACK_BINARY);
private static final Set<String> TAGS_TRACE = ImmutableSet.of(SegmentRecord.TRACE_ID,
SegmentRecord.IS_ERROR,
SegmentRecord.SERVICE_ID,
SegmentRecord.SERVICE_INSTANCE_ID,
SegmentRecord.ENDPOINT_ID,
SegmentRecord.LATENCY,
SegmentRecord.START_TIME);
private static final Set<String> TAGS_TRACE_ALL = ImmutableSet.of(SegmentRecord.TRACE_ID,
SegmentRecord.IS_ERROR,
SegmentRecord.SERVICE_ID,
SegmentRecord.SERVICE_INSTANCE_ID,
SegmentRecord.ENDPOINT_ID,
SegmentRecord.LATENCY,
SegmentRecord.START_TIME,
SegmentRecord.TIME_BUCKET,
SegmentRecord.DATA_BINARY);
protected final ProfileThreadSnapshotRecord.Builder builder =
new ProfileThreadSnapshotRecord.Builder();
public BanyanDBProfileThreadSnapshotQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public List<BasicTrace> queryProfiledSegments(String taskId) throws IOException {
StreamQueryResponse resp = query(ProfileThreadSnapshotRecord.INDEX_NAME,
TAGS_BASIC,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.and(eq(ProfileThreadSnapshotRecord.TASK_ID, taskId))
.and(eq(ProfileThreadSnapshotRecord.SEQUENCE, 0L));
}
});
if (resp.getElements().isEmpty()) {
return Collections.emptyList();
}
final List<String> segmentIds = new LinkedList<>();
for (final RowEntity rowEntity : resp.getElements()) {
segmentIds.add(rowEntity.getTagValue(ProfileThreadSnapshotRecord.SEGMENT_ID));
}
// TODO: support `IN` or `OR` logic operation in BanyanDB
List<BasicTrace> basicTraces = new ArrayList<>();
for (String segmentID : segmentIds) {
final StreamQueryResponse segmentRecordResp = query(SegmentRecord.INDEX_NAME,
TAGS_TRACE,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery traceQuery) {
traceQuery.and(eq(SegmentRecord.SEGMENT_ID, segmentID));
}
});
for (final RowEntity row : segmentRecordResp.getElements()) {
BasicTrace basicTrace = new BasicTrace();
basicTrace.setSegmentId(row.getId());
basicTrace.setStart(String.valueOf((Number) row.getTagValue(SegmentRecord.START_TIME)));
basicTrace.getEndpointNames().add(IDManager.EndpointID.analysisId(
row.getTagValue(SegmentRecord.ENDPOINT_ID)
).getEndpointName());
basicTrace.setDuration(((Number) row.getTagValue(SegmentRecord.LATENCY)).intValue());
basicTrace.setError(BooleanUtils.valueToBoolean(
((Number) row.getTagValue(SegmentRecord.IS_ERROR)).intValue()
));
basicTrace.getTraceIds().add(row.getTagValue(SegmentRecord.TRACE_ID));
basicTraces.add(basicTrace);
}
}
// TODO: Sort in DB with DESC
basicTraces = basicTraces.stream()
// comparing start_time
.sorted(Comparator.comparing((Function<BasicTrace, Long>) basicTrace -> Long.parseLong(basicTrace.getStart()))
// and sort in reverse order
.reversed())
.collect(Collectors.toList());
return basicTraces;
}
@Override
public int queryMinSequence(String segmentId, long start, long end) throws IOException {
return querySequenceWithAgg(AggType.MIN, segmentId, start, end);
}
@Override
public int queryMaxSequence(String segmentId, long start, long end) throws IOException {
return querySequenceWithAgg(AggType.MAX, segmentId, start, end);
}
@Override
public List<ProfileThreadSnapshotRecord> queryRecords(String segmentId, int minSequence, int maxSequence) throws IOException {
StreamQueryResponse resp = query(ProfileThreadSnapshotRecord.INDEX_NAME,
TAGS_ALL,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.and(eq(ProfileThreadSnapshotRecord.SEGMENT_ID, segmentId))
.and(lte(ProfileThreadSnapshotRecord.SEQUENCE, maxSequence))
.and(gte(ProfileThreadSnapshotRecord.SEQUENCE, minSequence));
}
});
List<ProfileThreadSnapshotRecord> result = new ArrayList<>(maxSequence - minSequence);
for (final RowEntity rowEntity : resp.getElements()) {
ProfileThreadSnapshotRecord record = this.builder.storage2Entity(
new BanyanDBConverter.StorageToStream(ProfileThreadSnapshotRecord.INDEX_NAME, rowEntity));
result.add(record);
}
return result;
}
@Override
public SegmentRecord getProfiledSegment(String segmentId) throws IOException {
StreamQueryResponse resp = query(SegmentRecord.INDEX_NAME,
TAGS_TRACE_ALL,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.and(eq(SegmentRecord.SEGMENT_ID, segmentId));
}
});
if (resp.size() == 0) {
return null;
}
final RowEntity rowEntity = resp.getElements().iterator().next();
return new SegmentRecord.Builder().storage2Entity(
new BanyanDBConverter.StorageToStream(SegmentRecord.INDEX_NAME, rowEntity));
}
private int querySequenceWithAgg(AggType aggType, String segmentId, long start, long end) throws IOException {
StreamQueryResponse resp = query(ProfileThreadSnapshotRecord.INDEX_NAME,
TAGS_ALL,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.and(eq(ProfileThreadSnapshotRecord.SEGMENT_ID, segmentId))
.and(lte(ProfileThreadSnapshotRecord.DUMP_TIME, end))
.and(gte(ProfileThreadSnapshotRecord.DUMP_TIME, start));
}
});
List<ProfileThreadSnapshotRecord> records = new ArrayList<>();
for (final RowEntity rowEntity : resp.getElements()) {
ProfileThreadSnapshotRecord record = this.builder.storage2Entity(
new BanyanDBConverter.StorageToStream(ProfileThreadSnapshotRecord.INDEX_NAME, rowEntity));
records.add(record);
}
switch (aggType) {
case MIN:
int minValue = Integer.MAX_VALUE;
for (final ProfileThreadSnapshotRecord record : records) {
int sequence = record.getSequence();
minValue = Math.min(minValue, sequence);
}
return minValue;
case MAX:
int maxValue = Integer.MIN_VALUE;
for (ProfileThreadSnapshotRecord record : records) {
int sequence = record.getSequence();
maxValue = Math.max(maxValue, sequence);
}
return maxValue;
default:
throw new IllegalArgumentException("should not reach this line");
}
}
enum AggType {
MIN, MAX
}
}
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.banyandb.v1.client.StreamWrite;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.MetadataRegistry;
import java.io.IOException;
@RequiredArgsConstructor
public class BanyanDBRecordDAO implements IRecordDAO {
private final StorageBuilder<Record> storageBuilder;
@Override
public InsertRequest prepareBatchInsert(Model model, Record record) throws IOException {
MetadataRegistry.Schema schema = MetadataRegistry.INSTANCE.findMetadata(model.getName());
if (schema == null) {
throw new IOException(model.getName() + " is not registered");
}
StreamWrite streamWrite = new StreamWrite(schema.getMetadata().getGroup(), // group name
model.getName(), // index-name
record.id(), // identity
TimeBucket.getTimestamp(record.getTimeBucket(), model.getDownsampling())); // timestamp
Convert2Storage<StreamWrite> convert2Storage = new BanyanDBConverter.StreamToStorage(schema, streamWrite);
storageBuilder.entity2Storage(record, convert2Storage);
return new BanyanDBStreamInsertRequest(convert2Storage.obtain());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.AbstractDAO;
import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBManagementDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.measure.BanyanDBMetricsDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBNoneStreamDAO;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
@Slf4j
public class BanyanDBStorageDAO extends AbstractDAO<BanyanDBStorageClient> implements StorageDAO {
public BanyanDBStorageDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
return new BanyanDBMetricsDAO(getClient(), (StorageBuilder<Metrics>) storageBuilder);
}
@Override
public IRecordDAO newRecordDao(StorageBuilder storageBuilder) {
return new BanyanDBRecordDAO((StorageBuilder<Record>) storageBuilder);
}
@Override
public INoneStreamDAO newNoneStreamDao(StorageBuilder storageBuilder) {
return new BanyanDBNoneStreamDAO(getClient(), (StorageBuilder<NoneStream>) storageBuilder);
}
@Override
public IManagementDAO newManagementDao(StorageBuilder storageBuilder) {
return new BanyanDBManagementDAO(getClient(), (StorageBuilder<ManagementData>) storageBuilder);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.banyandb.v1.client.StreamWrite;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
@RequiredArgsConstructor
@Getter
public class BanyanDBStreamInsertRequest implements InsertRequest {
private final StreamWrite streamWrite;
}
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.stream;
import com.google.common.collect.ImmutableSet;
import org.apache.skywalking.banyandb.v1.client.AbstractQuery;
import org.apache.skywalking.banyandb.v1.client.RowEntity;
import org.apache.skywalking.banyandb.v1.client.StreamQuery;
import org.apache.skywalking.banyandb.v1.client.StreamQueryResponse;
import org.apache.skywalking.banyandb.v1.client.TimestampRange;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.Span;
import org.apache.skywalking.oap.server.core.query.type.TraceBrief;
import org.apache.skywalking.oap.server.core.query.type.TraceState;
import org.apache.skywalking.oap.server.core.storage.query.ITraceQueryDAO;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBConverter;
import org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageClient;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class BanyanDBTraceQueryDAO extends AbstractBanyanDBDAO implements ITraceQueryDAO {
private static final Set<String> BASIC_TAGS = ImmutableSet.of(SegmentRecord.TRACE_ID,
SegmentRecord.IS_ERROR,
SegmentRecord.SERVICE_ID,
SegmentRecord.SERVICE_INSTANCE_ID,
SegmentRecord.ENDPOINT_ID,
SegmentRecord.LATENCY,
SegmentRecord.START_TIME,
SegmentRecord.TAGS
);
private static final Set<String> TAGS = ImmutableSet.of(SegmentRecord.TRACE_ID,
SegmentRecord.IS_ERROR,
SegmentRecord.SERVICE_ID,
SegmentRecord.SERVICE_INSTANCE_ID,
SegmentRecord.ENDPOINT_ID,
SegmentRecord.LATENCY,
SegmentRecord.START_TIME,
SegmentRecord.TIME_BUCKET,
SegmentRecord.DATA_BINARY);
public BanyanDBTraceQueryDAO(BanyanDBStorageClient client) {
super(client);
}
@Override
public TraceBrief queryBasicTraces(long startSecondTB, long endSecondTB, long minDuration, long maxDuration, String serviceId, String serviceInstanceId, String endpointId, String traceId, int limit, int from, TraceState traceState, QueryOrder queryOrder, List<Tag> tags) throws IOException {
final QueryBuilder<StreamQuery> q = new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
if (minDuration != 0) {
// duration >= minDuration
query.and(gte(SegmentRecord.LATENCY, minDuration));
}
if (maxDuration != 0) {
// duration <= maxDuration
query.and(lte(SegmentRecord.LATENCY, maxDuration));
}
if (StringUtil.isNotEmpty(serviceId)) {
query.and(eq(SegmentRecord.SERVICE_ID, serviceId));
}
if (StringUtil.isNotEmpty(serviceInstanceId)) {
query.and(eq(SegmentRecord.SERVICE_INSTANCE_ID, serviceInstanceId));
}
if (StringUtil.isNotEmpty(endpointId)) {
query.and(eq(SegmentRecord.ENDPOINT_ID, endpointId));
}
switch (traceState) {
case ERROR:
query.and(eq(SegmentRecord.IS_ERROR, BooleanUtils.TRUE));
break;
case SUCCESS:
query.and(eq(SegmentRecord.IS_ERROR, BooleanUtils.FALSE));
break;
}
switch (queryOrder) {
case BY_START_TIME:
query.setOrderBy(new StreamQuery.OrderBy(SegmentRecord.START_TIME, AbstractQuery.Sort.DESC));
break;
case BY_DURATION:
query.setOrderBy(new StreamQuery.OrderBy(SegmentRecord.LATENCY, AbstractQuery.Sort.DESC));
break;
}
if (CollectionUtils.isNotEmpty(tags)) {
List<String> tagsConditions = new ArrayList<>(tags.size());
for (final Tag tag : tags) {
tagsConditions.add(tag.toString());
}
query.and(having(SegmentRecord.TAGS, tagsConditions));
}
query.setLimit(limit);
query.setOffset(from);
}
};
TimestampRange tsRange = null;
if (startSecondTB > 0 && endSecondTB > 0) {
tsRange = new TimestampRange(TimeBucket.getTimestamp(startSecondTB), TimeBucket.getTimestamp(endSecondTB));
}
StreamQueryResponse resp = query(SegmentRecord.INDEX_NAME,
BASIC_TAGS,
tsRange, q);
TraceBrief traceBrief = new TraceBrief();
traceBrief.setTotal(resp.size());
if (resp.size() == 0) {
return traceBrief;
}
for (final RowEntity row : resp.getElements()) {
BasicTrace basicTrace = new BasicTrace();
basicTrace.setSegmentId(row.getId());
basicTrace.setStart(String.valueOf((Number) row.getTagValue(SegmentRecord.START_TIME)));
basicTrace.getEndpointNames().add(IDManager.EndpointID.analysisId(
row.getTagValue(SegmentRecord.ENDPOINT_ID)
).getEndpointName());
basicTrace.setDuration(((Number) row.getTagValue(SegmentRecord.LATENCY)).intValue());
basicTrace.setError(BooleanUtils.valueToBoolean(
((Number) row.getTagValue(SegmentRecord.IS_ERROR)).intValue()
));
basicTrace.getTraceIds().add(row.getTagValue(SegmentRecord.TRACE_ID));
traceBrief.getTraces().add(basicTrace);
}
return traceBrief;
}
@Override
public List<SegmentRecord> queryByTraceId(String traceId) throws IOException {
StreamQueryResponse resp = query(SegmentRecord.INDEX_NAME, TAGS,
new QueryBuilder<StreamQuery>() {
@Override
public void apply(StreamQuery query) {
query.and(eq(SegmentRecord.TRACE_ID, traceId));
}
});
List<SegmentRecord> segmentRecords = new ArrayList<>(resp.getElements().size());
for (final RowEntity rowEntity : resp.getElements()) {
SegmentRecord segmentRecord = new SegmentRecord.Builder().storage2Entity(
new BanyanDBConverter.StorageToStream(SegmentRecord.INDEX_NAME, rowEntity));
segmentRecords.add(segmentRecord);
}
return segmentRecords;
}
@Override
public List<Span> doFlexibleTraceQuery(String traceId) throws IOException {
return Collections.emptyList();
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.util;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class ByteUtil {
private static final ThreadLocal<ByteBuf> BYTE_BUFFER = ThreadLocal.withInitial(() -> Unpooled.buffer(8));
public static Double bytes2Double(byte[] bytes) {
final ByteBuf buf = BYTE_BUFFER.get();
try {
return buf.writeBytes(bytes).readDouble();
} finally {
buf.clear();
}
}
public static byte[] double2Bytes(double number) {
final ByteBuf buf = BYTE_BUFFER.get();
try {
return buf.writeDouble(number).array();
} finally {
buf.clear();
}
}
}
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
org.apache.skywalking.oap.server.storage.plugin.banyandb.BanyanDBStorageProvider
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.banyandb.util;
import org.junit.Assert;
import org.junit.Test;
public class ByteUtilTest {
@Test
public void testConvertDoubleAndBackOnce() {
double pi = 3.14159;
byte[] data = ByteUtil.double2Bytes(pi);
Assert.assertEquals(8, data.length);
Assert.assertEquals(pi, ByteUtil.bytes2Double(data), 0.00001);
}
@Test
public void testConvertDoubleAndBackTwice() {
double pi = 3.14159;
byte[] binaryPI = ByteUtil.double2Bytes(pi);
Assert.assertEquals(8, binaryPI.length);
Assert.assertEquals(pi, ByteUtil.bytes2Double(binaryPI), 0.00001);
double e = 2.71828;
byte[] binaryE = ByteUtil.double2Bytes(e);
Assert.assertEquals(8, binaryE.length);
Assert.assertEquals(e, ByteUtil.bytes2Double(binaryE), 0.00001);
}
}
......@@ -12,6 +12,7 @@ armeria-graphql-1.14.1.jar
armeria-graphql-protocol-1.14.1.jar
armeria-protobuf-1.14.1.jar
audience-annotations-0.5.0.jar
banyandb-java-client-0.1.0-SNAPSHOT.jar
bcpkix-jdk15on-1.69.jar
bcprov-ext-jdk15on-1.69.jar
bcprov-jdk15on-1.69.jar
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册