未验证 提交 5d47d4d2 编写于 作者: Z Zhenxu Ke 提交者: GitHub

Remove state / isError and collect exception stacktrace in error logs (#6275)

上级 85083adf
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Cancel
on:
workflow_run:
workflows: [CI AND IT]
types:
- requested
jobs:
# Cancel outdated builds in the repo, including all action tasks of all commits.
# It runs for all workflow types
cancel-outdated-builds:
runs-on: ubuntu-18.04
timeout-minutes: 10
steps:
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds ci-it.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: ci-it.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds docker-ci.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: docker-ci.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.cluster.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.cluster.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.go.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.go.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.istio.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.istio.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.jdk-versions.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.jdk-versions.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.js.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.js.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.kafka.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.kafka.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.php.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.php.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.php.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.php.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.profiling.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.profiling.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.python.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.python.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.storages.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.storages.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.ttl.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.ttl.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds plugins-jdk14-test.0.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-jdk14-test.0.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds plugins-test.0.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-test.0.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds plugins-test.1.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-test.1.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds plugins-test.2.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-test.2.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds plugins-test.3.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-test.3.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.log.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.log.yaml
\ No newline at end of file
......@@ -23,6 +23,7 @@ on:
- '!**.md'
schedule:
- cron: '0 18 * * *'
env:
SW_AGENT_JDK_VERSION: 8
......
......@@ -81,6 +81,7 @@ Release Notes.
* Chore: Remove duplicate codes in Envoy ALS handler.
* Remove the strict rule of OAL disable statement parameter.
* Fix a legal metric query adoption bug. Don't support global level metric query.
* Remove unused log query parameters.
#### UI
* Fix un-removed tags in trace query.
......
......@@ -24,6 +24,7 @@ import java.util.Objects;
import org.apache.skywalking.apm.agent.core.boot.ServiceManager;
import org.apache.skywalking.apm.agent.core.conf.Config;
import org.apache.skywalking.apm.agent.core.context.ContextManager;
import org.apache.skywalking.apm.agent.core.context.util.ThrowableTransformer;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
......@@ -92,7 +93,7 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setKey("thread").setValue(event.getThreadName()).build())
.build())
.setBody(LogDataBody.newBuilder().setType(LogDataBody.ContentCase.TEXT.name())
.setText(TextLog.newBuilder().setText(event.getMessage()).build()).build());
.setText(TextLog.newBuilder().setText(transformLogText(event)).build()).build());
return -1 == ContextManager.getSpanId() ? builder.build()
: builder.setTraceContext(TraceContext.newBuilder()
.setTraceId(ContextManager.getGlobalTraceId())
......@@ -100,4 +101,8 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setTraceSegmentId(ContextManager.getSegmentId())
.build()).build();
}
private String transformLogText(final LoggingEvent event) {
return event.getMessage() + "\n" + ThrowableTransformer.INSTANCE.convert2String(event.getThrowable(), 2048);
}
}
......@@ -25,6 +25,7 @@ import org.apache.logging.log4j.core.LogEvent;
import org.apache.skywalking.apm.agent.core.boot.ServiceManager;
import org.apache.skywalking.apm.agent.core.conf.Config;
import org.apache.skywalking.apm.agent.core.context.ContextManager;
import org.apache.skywalking.apm.agent.core.context.util.ThrowableTransformer;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
......@@ -87,7 +88,7 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setKey("thread").setValue(event.getThreadName()).build())
.build())
.setBody(LogDataBody.newBuilder().setType(LogDataBody.ContentCase.TEXT.name())
.setText(TextLog.newBuilder().setText(event.getMessage().getFormattedMessage()).build())
.setText(TextLog.newBuilder().setText(transformLogText(event)).build())
.build());
return -1 == ContextManager.getSpanId() ? builder.build()
: builder.setTraceContext(TraceContext.newBuilder()
......@@ -96,4 +97,8 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setTraceSegmentId(ContextManager.getSegmentId())
.build()).build();
}
private String transformLogText(final LogEvent event) {
return event.getMessage().getFormattedMessage() + "\n" + ThrowableTransformer.INSTANCE.convert2String(event.getThrown(), 2048);
}
}
......@@ -18,12 +18,15 @@
package org.apache.skywalking.apm.toolkit.activation.log.logback.v1.x.log;
import ch.qos.logback.classic.spi.IThrowableProxy;
import ch.qos.logback.classic.spi.ThrowableProxy;
import java.lang.reflect.Method;
import java.util.Objects;
import org.apache.skywalking.apm.agent.core.boot.ServiceManager;
import org.apache.skywalking.apm.agent.core.conf.Config;
import org.apache.skywalking.apm.agent.core.context.ContextManager;
import org.apache.skywalking.apm.agent.core.context.util.ThrowableTransformer;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
......@@ -88,7 +91,7 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setKey("thread").setValue(event.getThreadName()).build())
.build())
.setBody(LogDataBody.newBuilder().setType(LogDataBody.ContentCase.TEXT.name())
.setText(TextLog.newBuilder().setText(event.getFormattedMessage()).build()).build());
.setText(TextLog.newBuilder().setText(transformLogText(event)).build()).build());
return -1 == ContextManager.getSpanId() ? builder.build()
: builder.setTraceContext(TraceContext.newBuilder()
.setTraceId(ContextManager.getGlobalTraceId())
......@@ -96,4 +99,13 @@ public class GRPCLogAppenderInterceptor implements InstanceMethodsAroundIntercep
.setTraceSegmentId(ContextManager.getSegmentId())
.build()).build();
}
private String transformLogText(final ILoggingEvent event) {
final IThrowableProxy throwableProxy = event.getThrowableProxy();
if (!(throwableProxy instanceof ThrowableProxy)) {
return event.getFormattedMessage();
}
final Throwable throwable = ((ThrowableProxy) throwableProxy).getThrowable();
return event.getFormattedMessage() + "\n" + ThrowableTransformer.INSTANCE.convert2String(throwable, 2048);
}
}
......@@ -30,7 +30,7 @@ core|default|role|Option values, `Mixed/Receiver/Aggregator`. **Receiver** mode
| - | - | instanceNameMaxLength| Max length limitation of service instance name. The max length of service + instance names should be less than 200.|SW_INSTANCE_NAME_MAX_LENGTH|70|
| - | - | endpointNameMaxLength| Max length limitation of endpoint name. The max length of service + endpoint names should be less than 240.|SW_ENDPOINT_NAME_MAX_LENGTH|150|
| - | - | searchableTracesTags | Define the set of span tag keys, which should be searchable through the GraphQL. Multiple values should be separated through the comma. | SW_SEARCHABLE_TAG_KEYS | http.method,status_code,db.type,db.instance,mq.queue,mq.topic,mq.broker|
| - | - | searchableLogsTags | Define the set of log tag keys, which should be searchable through the GraphQL. Multiple values should be separated through the comma. | SW_SEARCHABLE_LOGS_TAG_KEYS | level,logger,thread |
| - | - | searchableLogsTags | Define the set of log tag keys, which should be searchable through the GraphQL. Multiple values should be separated through the comma. | SW_SEARCHABLE_LOGS_TAG_KEYS | level |
| - | - | gRPCThreadPoolSize|Pool size of gRPC server| SW_CORE_GRPC_THREAD_POOL_SIZE | CPU core * 4|
| - | - | gRPCThreadPoolQueueSize| The queue size of gRPC server| SW_CORE_GRPC_POOL_QUEUE_SIZE | 10000|
| - | - | maxConcurrentCallsPerConnection | The maximum number of concurrent calls permitted for each incoming connection. Defaults to no limit. | SW_CORE_GRPC_MAX_CONCURRENT_CALL | - |
......
......@@ -101,7 +101,7 @@ core:
# Define the set of span tag keys, which should be searchable through the GraphQL.
searchableTracesTags: ${SW_SEARCHABLE_TAG_KEYS:http.method,status_code,db.type,db.instance,mq.queue,mq.topic,mq.broker}
# Define the set of log tag keys, which should be searchable through the GraphQL.
searchableLogsTags: ${SW_SEARCHABLE_LOGS_TAG_KEYS:level,logger,thread}
searchableLogsTags: ${SW_SEARCHABLE_LOGS_TAG_KEYS:level}
storage:
selector: ${SW_STORAGE:h2}
elasticsearch:
......
......@@ -42,7 +42,6 @@ public abstract class AbstractLogRecord extends Record {
public static final String TRACE_ID = "trace_id";
public static final String TRACE_SEGMENT_ID = "trace_segment_id";
public static final String SPAN_ID = "span_id";
public static final String IS_ERROR = "is_error";
public static final String CONTENT_TYPE = "content_type";
public static final String CONTENT = "content";
public static final String TAGS_RAW_DATA = "tags_raw_data";
......@@ -79,10 +78,6 @@ public abstract class AbstractLogRecord extends Record {
private int spanId;
@Setter
@Getter
@Column(columnName = IS_ERROR)
private int isError;
@Setter
@Getter
@Column(columnName = CONTENT_TYPE, storageOnly = true)
private int contentType = ContentType.NONE.value();
@Setter
......@@ -129,7 +124,6 @@ public abstract class AbstractLogRecord extends Record {
map.put(TRACE_ID, record.getTraceId());
map.put(TRACE_SEGMENT_ID, record.getTraceSegmentId());
map.put(SPAN_ID, record.getSpanId());
map.put(IS_ERROR, record.getIsError());
map.put(TIME_BUCKET, record.getTimeBucket());
map.put(CONTENT_TYPE, record.getContentType());
map.put(CONTENT, record.getContent());
......@@ -150,7 +144,6 @@ public abstract class AbstractLogRecord extends Record {
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setTraceSegmentId((String) dbMap.get(TRACE_SEGMENT_ID));
record.setSpanId(((Number) dbMap.get(SPAN_ID)).intValue());
record.setIsError(((Number) dbMap.get(IS_ERROR)).intValue());
record.setContentType(((Number) dbMap.get(CONTENT_TYPE)).intValue());
record.setContent((String) dbMap.get(CONTENT));
record.setTimestamp(((Number) dbMap.get(TIMESTAMP)).longValue());
......
......@@ -26,7 +26,6 @@ import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.storage.StorageModule;
......@@ -61,7 +60,6 @@ public class LogQueryService implements Service {
String endpointId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
Pagination paging,
Order queryOrder,
final long startTB,
......@@ -87,7 +85,6 @@ public class LogQueryService implements Service {
endpointId,
endpointName,
relatedTrace,
state,
queryOrder,
page.getFrom(), page.getLimit(),
startTB, endTB, tags,
......
......@@ -23,7 +23,6 @@ import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
@Getter
......@@ -35,7 +34,6 @@ public class LogQueryCondition {
private String endpointName;
private TraceScopeCondition relatedTrace;
private Duration queryDuration;
private LogState state;
private Pagination paging;
private List<Tag> tags;
private List<String> keywordsOfContent;
......
......@@ -34,8 +34,6 @@ public class Log {
private String endpointName;
private String traceId;
private String timestamp;
private boolean isError;
private String statusCode;
private ContentType contentType = ContentType.NONE;
private String content;
private final List<KeyValue> tags;
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.query.type;
public enum LogState {
ALL, SUCCESS, ERROR
}
......@@ -41,6 +41,7 @@ public abstract class AbstractLog extends Source {
private String content;
private byte[] tagsRawData;
private List<Tag> tags = new ArrayList<>();
private boolean error = false;
@Override
public String getEntityId() {
......
......@@ -27,7 +27,6 @@ import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.library.module.Service;
......@@ -42,7 +41,6 @@ public interface ILogQueryDAO extends Service {
String endpointId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
Order queryOrder,
int from,
int limit,
......
......@@ -68,7 +68,6 @@ public class LogQuery implements GraphQLQueryResolver {
condition.getEndpointId(),
condition.getEndpointName(),
condition.getRelatedTrace(),
condition.getState(),
condition.getPaging(),
queryOrder,
startSecondTB, endSecondTB,
......
Subproject commit 98d2dfd8b4cc549d64d7541c45b6808de907c42b
Subproject commit 3d91ce7e2704e2b4bf4b63d3f6a26dd19a59caa6
......@@ -29,11 +29,9 @@ import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MatchCNameBuilder;
......@@ -65,7 +63,6 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
......@@ -111,17 +108,6 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
QueryBuilders.termQuery(AbstractLogRecord.SPAN_ID, relatedTrace.getSpanId()));
}
}
if (LogState.ERROR.equals(state)) {
boolQueryBuilder.must()
.add(
QueryBuilders.termQuery(AbstractLogRecord.IS_ERROR, BooleanUtils.booleanToValue(true)));
} else if (LogState.SUCCESS.equals(state)) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(
AbstractLogRecord.IS_ERROR,
BooleanUtils.booleanToValue(false)
));
}
if (CollectionUtils.isNotEmpty(tags)) {
BoolQueryBuilder tagMatchQuery = QueryBuilders.boolQuery();
......@@ -162,8 +148,6 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
log.setEndpointName((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_NAME));
log.setTraceId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.TRACE_ID));
log.setTimestamp(searchHit.getSourceAsMap().get(AbstractLogRecord.TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) searchHit.getSourceAsMap()
.get(AbstractLogRecord.IS_ERROR)).intValue()));
log.setContentType(ContentType.instanceOf(((Number) searchHit.getSourceAsMap()
.get(
AbstractLogRecord.CONTENT_TYPE)).intValue()));
......
......@@ -29,11 +29,9 @@ import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MatchCNameBuilder;
......@@ -65,7 +63,6 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
......@@ -114,18 +111,6 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
}
}
if (LogState.ERROR.equals(state)) {
boolQueryBuilder.must()
.add(
QueryBuilders.termQuery(AbstractLogRecord.IS_ERROR, BooleanUtils.booleanToValue(true)));
} else if (LogState.SUCCESS.equals(state)) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(
AbstractLogRecord.IS_ERROR,
BooleanUtils.booleanToValue(false)
));
}
if (CollectionUtils.isNotEmpty(tags)) {
BoolQueryBuilder tagMatchQuery = QueryBuilders.boolQuery();
tags.forEach(tag -> tagMatchQuery.must(QueryBuilders.termQuery(AbstractLogRecord.TAGS, tag.toString())));
......@@ -164,8 +149,6 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
log.setEndpointName((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_NAME));
log.setTraceId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.TRACE_ID));
log.setTimestamp(searchHit.getSourceAsMap().get(AbstractLogRecord.TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) searchHit.getSourceAsMap()
.get(AbstractLogRecord.IS_ERROR)).intValue()));
log.setContentType(ContentType.instanceOf(((Number) searchHit.getSourceAsMap()
.get(
AbstractLogRecord.CONTENT_TYPE)).intValue()));
......
......@@ -30,11 +30,9 @@ import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxClient;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxConstants;
......@@ -50,7 +48,6 @@ import static java.util.Objects.nonNull;
import static org.apache.skywalking.apm.util.StringUtil.isNotEmpty;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_NAME;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_INSTANCE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SPAN_ID;
......@@ -78,7 +75,6 @@ public class LogQuery implements ILogQueryDAO {
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
......@@ -119,17 +115,6 @@ public class LogQuery implements ILogQueryDAO {
recallQuery.and(eq(SPAN_ID, relatedTrace.getSpanId()));
}
}
switch (state) {
case ERROR: {
recallQuery.and(eq(IS_ERROR, true));
break;
}
case SUCCESS: {
recallQuery.and(eq(IS_ERROR, false));
break;
}
}
if (startTB != 0 && endTB != 0) {
recallQuery.and(gte(AbstractLogRecord.TIME_BUCKET, startTB))
.and(lte(AbstractLogRecord.TIME_BUCKET, endTB));
......@@ -182,7 +167,6 @@ public class LogQuery implements ILogQueryDAO {
log.setEndpointName((String) data.get(ENDPOINT_NAME));
log.setTraceId((String) data.get(TRACE_ID));
log.setTimestamp(data.get(TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) data.get(IS_ERROR)).intValue()));
log.setContentType(
ContentType.instanceOf(((Number) data.get(AbstractLogRecord.CONTENT_TYPE)).intValue()));
log.setContent((String) data.get(AbstractLogRecord.CONTENT));
......
......@@ -37,12 +37,10 @@ import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.elasticsearch.search.sort.SortOrder;
......@@ -51,7 +49,6 @@ import static org.apache.skywalking.oap.server.core.analysis.manual.log.Abstract
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT_TYPE;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_NAME;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_INSTANCE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SPAN_ID;
......@@ -83,7 +80,6 @@ public class H2LogQueryDAO implements ILogQueryDAO {
String endpointId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
Order queryOrder,
int from,
int limit,
......@@ -144,14 +140,6 @@ public class H2LogQueryDAO implements ILogQueryDAO {
}
}
if (LogState.ERROR.equals(state)) {
sql.append(" and ").append(AbstractLogRecord.IS_ERROR).append(" = ?");
parameters.add(BooleanUtils.booleanToValue(true));
} else if (LogState.SUCCESS.equals(state)) {
sql.append(" and ").append(AbstractLogRecord.IS_ERROR).append(" = ?");
parameters.add(BooleanUtils.booleanToValue(false));
}
if (CollectionUtils.isNotEmpty(tags)) {
for (final Tag tag : tags) {
final int foundIdx = searchableTagKeys.indexOf(tag.getKey());
......@@ -199,7 +187,6 @@ public class H2LogQueryDAO implements ILogQueryDAO {
log.setEndpointName(resultSet.getString(ENDPOINT_NAME));
log.setTraceId(resultSet.getString(TRACE_ID));
log.setTimestamp(resultSet.getString(TIMESTAMP));
log.setError(BooleanUtils.valueToBoolean(resultSet.getInt(IS_ERROR)));
log.setContentType(ContentType.instanceOf(resultSet.getInt(CONTENT_TYPE)));
log.setContent(resultSet.getString(CONTENT));
String dataBinaryBase64 = resultSet.getString(TAGS_RAW_DATA);
......
......@@ -27,7 +27,6 @@ import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TAGS_RAW_DATA;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.CONTENT;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.CONTENT_TYPE;
......@@ -61,7 +60,6 @@ public class H2LogRecordBuilder extends AbstractSearchTagBuilder<Record> {
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setTraceSegmentId((String) dbMap.get(TRACE_SEGMENT_ID));
record.setSpanId(((Number) dbMap.get(SPAN_ID)).intValue());
record.setIsError(((Number) dbMap.get(IS_ERROR)).intValue());
record.setContentType(((Number) dbMap.get(CONTENT_TYPE)).intValue());
record.setContent((String) dbMap.get(CONTENT));
record.setTimestamp(((Number) dbMap.get(TIMESTAMP)).longValue());
......@@ -87,7 +85,6 @@ public class H2LogRecordBuilder extends AbstractSearchTagBuilder<Record> {
map.put(TRACE_ID, storageData.getTraceId());
map.put(TRACE_SEGMENT_ID, storageData.getTraceSegmentId());
map.put(SPAN_ID, storageData.getSpanId());
map.put(IS_ERROR, storageData.getIsError());
map.put(TIME_BUCKET, storageData.getTimeBucket());
map.put(CONTENT_TYPE, storageData.getContentType());
map.put(CONTENT, storageData.getContent());
......
......@@ -396,15 +396,12 @@ public class SimpleQueryClient {
final String queryString = Resources.readLines(queryFileUrl, StandardCharsets.UTF_8)
.stream().filter(it -> !it.startsWith("#"))
.collect(Collectors.joining())
.replace("{state}", query.state())
.replace("{serviceId}", query.serviceId())
.replace("{endpointId}", query.endpointId())
.replace("{endpointName}", query.endpointName())
.replace("{start}", query.start())
.replace("{end}", query.end())
.replace("{step}", query.step())
.replace("{tagKey}", query.tagKey())
.replace("{tagValue}", query.tagValue())
.replace("{pageNum}", query.pageNum())
.replace("{pageSize}", query.pageSize())
.replace("{needTotal}", query.needTotal())
......
......@@ -34,7 +34,6 @@ public class Log {
private String endpointId;
private String traceId;
private String timestamp;
private boolean isError;
private String contentType;
private String content;
private List<KeyValue> tags;
......
......@@ -17,7 +17,6 @@
package org.apache.skywalking.e2e.log;
import com.google.common.base.Strings;
import java.util.List;
import lombok.EqualsAndHashCode;
import lombok.Getter;
......@@ -43,7 +42,6 @@ public class LogMatcher extends AbstractMatcher<Log> {
private String endpointId;
private String traceId;
private String timestamp;
private String isError;
private String contentType;
private String content;
private List<KeyValueMatcher> tags;
......@@ -74,9 +72,6 @@ public class LogMatcher extends AbstractMatcher<Log> {
if (nonNull(getTimestamp())) {
doVerify(getTimestamp(), log.getTimestamp());
}
if (nonNull(getIsError())) {
doVerify(getIsError(), Strings.nullToEmpty(String.valueOf(log.isError())));
}
if (nonNull(getContentType())) {
doVerify(getContentType(), log.getContentType());
}
......
......@@ -21,27 +21,15 @@ import org.apache.skywalking.e2e.AbstractQuery;
public class LogsQuery extends AbstractQuery<LogsQuery> {
private String state = "ALL";
private String serviceId;
private String endpointId = "";
private String endpointName = "";
private String tagKey;
private String tagValue;
private String pageNum = "1";
private String pageSize = "15";
private String needTotal = "true";
private String keywordsOfContent = "";
private String excludingKeywordsOfContent = "";
public String state() {
return state;
}
public LogsQuery state(String state) {
this.state = state;
return this;
}
public String serviceId() {
return serviceId;
}
......@@ -69,20 +57,6 @@ public class LogsQuery extends AbstractQuery<LogsQuery> {
return this;
}
public String tagKey() {
return tagKey;
}
public String tagValue() {
return tagValue;
}
public LogsQuery tag(String key, String value) {
this.tagKey = key;
this.tagValue = value;
return this;
}
public String pageNum() {
return pageNum;
}
......
......@@ -25,7 +25,6 @@
endpointId,
traceId,
timestamp,
isError,
contentType,
content,
tags{
......@@ -36,7 +35,6 @@
}",
"variables": {
"condition": {
"state": "{state}",
"serviceId": "{serviceId}",
"endpointId": "{endpointId}",
"endpointName": "{endpointName}",
......@@ -51,12 +49,6 @@
"excludingKeywordsOfContent": [
{excludingKeywordsOfContent}
],
"tags": [
{
"key": "{tagKey}",
"value": "{tagValue}"
}
],
"paging": {
"pageNum": {pageNum},
"pageSize": {pageSize},
......@@ -64,4 +56,4 @@
}
}
}
}
\ No newline at end of file
}
......@@ -52,7 +52,6 @@ public class TestLogsMatcher {
.setEndpointId("ZTJl.1_L3RyYWZmaWM=")
.setTraceId("ac81b308-0d66-4c69-a7af-a023a536bd3e")
.setTimestamp("1609665785987")
.setError(false)
.setContentType("TEXT")
.setContent("log")
.setTags(
......@@ -65,7 +64,6 @@ public class TestLogsMatcher {
.setEndpointId("ZTJl.1_L3RyYWZmaWM=")
.setTraceId("ac81b308-0d66-4c69-a7af-a023a536bd3e")
.setTimestamp("1609665785987")
.setError(false)
.setContentType("TEXT")
.setContent("log")
.setTags(
......
......@@ -22,9 +22,8 @@ logs:
endpointId: not null
traceId: "ac81b308-0d66-4c69-a7af-a023a536bd3e"
timestamp: not null
isError: false
contentType: TEXT
content: log
tags:
- key: not null
value: not null
\ No newline at end of file
value: not null
......@@ -96,7 +96,6 @@ public class KafkaLogE2E extends SkyWalkingTestAdapter {
@RetryableTest
public void verifyLog() throws Exception {
LogsQuery logsQuery = new LogsQuery().serviceId("WW91cl9BcHBsaWNhdGlvbk5hbWU=.1")
.tag("logger", "org.apache.skywalking.e2e.controller.LogController")
.start(startTime)
.end(Times.now());
if (graphql.supportQueryLogsByKeywords()) {
......
......@@ -91,7 +91,6 @@ public class LogE2E extends SkyWalkingTestAdapter {
@RetryableTest
public void verifyLog() throws Exception {
LogsQuery logsQuery = new LogsQuery().serviceId("WW91cl9BcHBsaWNhdGlvbk5hbWU=.1")
.tag("logger", "org.apache.skywalking.e2e.controller.LogController")
.start(startTime)
.end(Times.now());
if (graphql.supportQueryLogsByKeywords()) {
......
......@@ -20,9 +20,8 @@ logs:
serviceInstanceId: not null
traceId: not null
timestamp: not null
isError: false
contentType: TEXT
content: not null
tags:
- key: level
value: INFO
\ No newline at end of file
value: INFO
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册