未验证 提交 15452025 编写于 作者: Z zhang-wei 提交者: GitHub

Log collecting and query implementation (#6120)

上级 9dcad4a0
......@@ -169,3 +169,10 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: plugins-test.3.yaml
- uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738
name: Cancel Outdated Builds e2e.log.yaml
with:
cancelMode: allDuplicates
token: ${{ secrets.GITHUB_TOKEN }}
sourceRunId: ${{ github.event.workflow_run.id }}
workflowFileName: e2e.log.yaml
\ No newline at end of file
......@@ -116,3 +116,31 @@ jobs:
with:
name: logs
path: logs
log:
name: Kafka(log)
runs-on: ubuntu-latest
timeout-minutes: 90
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Compile and Build
run: make docker
- name: Copy dist package
run: cp -R dist test/e2e/
- name: Kafka Log
run: ./mvnw --batch-mode -f test/e2e/pom.xml -am -DfailIfNoTests=false verify -Dit.test=org.apache.skywalking.e2e.kafka.KafkaLogE2E
- name: Report Coverage
run: bash -x tools/coverage/report.sh
- uses: actions/upload-artifact@v1
if: failure()
with:
name: logs
path: logs
\ No newline at end of file
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: E2E
on:
pull_request:
paths:
- '**'
- '!**.md'
schedule:
- cron: '0 18 * * *'
env:
SKIP_TEST: true
jobs:
Log:
name: Log
runs-on: ubuntu-latest
strategy:
matrix:
storage: ['h2', 'mysql', 'es6', 'es7', 'influxdb']
env:
SW_STORAGE: ${{ matrix.storage }}
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Compile and Build
run: make docker && ES_VERSION=es7 TAG=latest-es7 make docker
- name: Copy dist package
run: cp -R dist test/e2e/
- name: Log Receiver ${{ matrix.storage }}
run: ./mvnw --batch-mode -f test/e2e/pom.xml -am -DfailIfNoTests=false verify -Dit.test=org.apache.skywalking.e2e.log.LogE2E
- uses: actions/upload-artifact@v1
if: failure()
with:
name: logs
path: logs
......@@ -61,6 +61,8 @@ Release Notes.
* Fix `timeBucket` not taking effect in EqualsAndHashCode annotation of some relationship metrics.
* Fix `SharingServerConfig`'s propertie is not correct in the `application.yml`, contextPath -> restConnextPath.
* Istio control plane: remove redundant metrics and polish panel layout.
* Fix receiver analysis error count metrics
* Log collecting and query implementation
#### UI
* Fix un-removed tags in trace query.
......
Subproject commit 8c10f757a9088fef06d6d8b986b8a0650b7fa106
Subproject commit ea906c1ace2b5eaf19b1c36ead0fd6e1489feaeb
......@@ -26,7 +26,7 @@ public class DistributedTraceIds {
private LinkedList<DistributedTraceId> relatedGlobalTraces;
public DistributedTraceIds() {
relatedGlobalTraces = new LinkedList<DistributedTraceId>();
relatedGlobalTraces = new LinkedList<>();
}
public List<DistributedTraceId> getRelatedGlobalTraces() {
......
......@@ -32,7 +32,7 @@
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafk-clients.version}</version>
<version>${kafka-clients.version}</version>
</dependency>
</dependencies>
......
......@@ -41,7 +41,7 @@
<ant-contrib.version>1.0b3</ant-contrib.version>
<ant-nodeps.version>1.8.1</ant-nodeps.version>
<kafk-clients.version>2.4.1</kafk-clients.version>
<kafka-clients.version>2.4.1</kafka-clients.version>
<spring-kafka-test.version>2.4.6.RELEASE</spring-kafka-test.version>
</properties>
......
# Log Data Protocol
Report log data via protocol.
[gRPC service define](https://github.com/apache/skywalking-data-collect-protocol/blob/master/logging/Logging.proto)
......@@ -29,6 +29,8 @@ SkyWalking javaagent begins to support this since 8.0.0.
[SkyWalking Trace Data Protocol v3](Trace-Data-Protocol-v3.md) defines the communication way and format between agent and backend.
[SkyWalking Log Data Protocol](Log-Data-Protocol.md) defines the communication way and format between agent and backend.
### Browser probe protocol
The browser probe, such as [skywalking-client-js](https://github.com/apache/skywalking-client-js) could use this protocol to send to backend. This service provided by gRPC.
......
......@@ -16,6 +16,7 @@ We have following receivers, and `default` implementors are provided in our Apac
1. **receiver-otel**. See [details](#opentelemetry-receiver).
1. **receiver-meter**. See [details](backend-meter.md).
1. **receiver-browser**. gRPC services to accept browser performance data and error log.
1. **receiver-log**. gRPC services accept log data.
The sample settings of these receivers should be already in default `application.yml`, and also list here
```yaml
......@@ -61,6 +62,10 @@ receiver-browser:
selector: ${SW_RECEIVER_BROWSER:default}
default:
sampleRate: ${SW_RECEIVER_BROWSER_SAMPLE_RATE:10000}
receiver-log:
selector: ${SW_RECEIVER_LOG:default}
default:
```
## gRPC/HTTP server for receiver
......
......@@ -30,6 +30,7 @@ core|default|role|Option values, `Mixed/Receiver/Aggregator`. **Receiver** mode
| - | - | instanceNameMaxLength| Max length limitation of service instance name. The max length of service + instance names should be less than 200.|SW_INSTANCE_NAME_MAX_LENGTH|70|
| - | - | endpointNameMaxLength| Max length limitation of endpoint name. The max length of service + endpoint names should be less than 240.|SW_ENDPOINT_NAME_MAX_LENGTH|150|
| - | - | searchableTracesTags | Define the set of span tag keys, which should be searchable through the GraphQL. Multiple values should be separated through the comma. | SW_SEARCHABLE_TAG_KEYS | http.method,status_code,db.type,db.instance,mq.queue,mq.topic,mq.broker|
| - | - | searchableLogsTags | Define the set of log tag keys, which should be searchable through the GraphQL. Multiple values should be separated through the comma. | SW_SEARCHABLE_LOGS_TAG_KEYS | - |
| - | - | gRPCThreadPoolSize|Pool size of gRPC server| SW_CORE_GRPC_THREAD_POOL_SIZE | CPU core * 4|
| - | - | gRPCThreadPoolQueueSize| The queue size of gRPC server| SW_CORE_GRPC_POOL_QUEUE_SIZE | 10000|
| - | - | maxConcurrentCallsPerConnection | The maximum number of concurrent calls permitted for each incoming connection. Defaults to no limit. | SW_CORE_GRPC_MAX_CONCURRENT_CALL | - |
......@@ -204,6 +205,7 @@ core|default|role|Option values, `Mixed/Receiver/Aggregator`. **Receiver** mode
| - | - | createTopicIfNotExist | If true, create the Kafka topic when it does not exist. | - | true |
| - | - | partitions | The number of partitions for the topic being created. | SW_KAFKA_FETCHER_PARTITIONS | 3 |
| - | - | enableMeterSystem | To enable to fetch and handle [Meter System](backend-meter.md) data. | SW_KAFKA_FETCHER_ENABLE_METER_SYSTEM | false |
| - | - | enableLog | To enable to fetch and handle log data. | SW_KAFKA_FETCHER_ENABLE_LOG | false |
| - | - | replicationFactor | The replication factor for each partition in the topic being created. | SW_KAFKA_FETCHER_PARTITIONS_FACTOR | 2 |
| - | - | kafkaHandlerThreadPoolSize | Pool size of kafka message handler executor. | SW_KAFKA_HANDLER_THREAD_POOL_SIZE | CPU core * 2 |
| - | - | kafkaHandlerThreadPoolQueueSize | The queue size of kafka message handler executor. | SW_KAFKA_HANDLER_THREAD_POOL_QUEUE_SIZE | 10000 |
......@@ -212,12 +214,13 @@ core|default|role|Option values, `Mixed/Receiver/Aggregator`. **Receiver** mode
| - | - | topicNameOfProfiling | Specifying Kafka topic name for Profiling data. | - | skywalking-profilings |
| - | - | topicNameOfTracingSegments | Specifying Kafka topic name for Tracing data. | - | skywalking-segments |
| - | - | topicNameOfManagements | Specifying Kafka topic name for service instance reporting and registering. | - | skywalking-managements |
| - | - | topicNameOfLogs | Specifying Kafka topic name for log data. | - | skywalking-logs |
| receiver-browser | default | Read [receiver doc](backend-receivers.md) for more details | - | - | - |
| - | - | sampleRate | Sampling rate for receiving trace. The precision is 1/10000. 10000 means 100% sample in default. | SW_RECEIVER_BROWSER_SAMPLE_RATE | 10000 |
| query | graphql | - | GraphQL query implementation | - |
| - | - | path | Root path of GraphQL query and mutation. | SW_QUERY_GRAPHQL_PATH | /graphql|
| alarm | default | - | Read [alarm doc](backend-alarm.md) for more details. | - |
| telemetry | - | - | Read [telemetry doc](backend-telemetry.md) for more details. | - |
| telemetry | - | - | Read [telemetry doc](backend-telemetry.md) for more details. | - |
| - | none| - | No op implementation | - |
| - | prometheus| host | Binding host for Prometheus server fetching data| SW_TELEMETRY_PROMETHEUS_HOST|0.0.0.0|
| - | - | port| Binding port for Prometheus server fetching data|SW_TELEMETRY_PROMETHEUS_PORT|1234|
......@@ -239,7 +242,7 @@ core|default|role|Option values, `Mixed/Receiver/Aggregator`. **Receiver** mode
| - | etcd| clusterName| Service name used for SkyWalking cluster. |SW_CONFIG_ETCD_CLUSTER_NAME|default|
| - | - | serverAddr| hosts and ports used of etcd cluster.| SW_CONFIG_ETCD_SERVER_ADDR|localhost:2379|
| - | - | group |Additional prefix of the configuration key| SW_CONFIG_ETCD_GROUP | skywalking|
| - | - | period | The period of data sync. Unit is second. | SW_CONFIG_ZK_PERIOD | 60
| - | - | period | The period of data sync. Unit is second. | SW_CONFIG_ZK_PERIOD | 60
| - | consul | hostPort| hosts and ports used of Consul cluster.| SW_CONFIG_CONSUL_HOST_AND_PORTS|localhost:8500|
| - | - | aclToken| ALC Token of Consul. Empty string means `without ALC token`.| SW_CONFIG_CONSUL_ACL_TOKEN | - |
| - | - | period | The period of data sync. Unit is second. | SW_CONFIG_CONSUL_PERIOD | 60 |
......
......@@ -18,6 +18,7 @@
package org.apache.skywalking.oap.server.analyzer.provider;
import java.util.List;
import lombok.Getter;
import org.apache.skywalking.oap.server.analyzer.module.AnalyzerModule;
import org.apache.skywalking.oap.server.analyzer.provider.meter.config.MeterConfig;
......@@ -46,8 +47,6 @@ import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import java.util.List;
public class AnalyzerModuleProvider extends ModuleProvider {
@Getter
private final AnalyzerModuleConfig moduleConfig;
......@@ -102,7 +101,8 @@ public class AnalyzerModuleProvider extends ModuleProvider {
segmentParserService = new SegmentParserServiceImpl(getManager(), moduleConfig);
this.registerServiceImplementation(ISegmentParserService.class, segmentParserService);
meterConfigs = MeterConfigs.loadConfig(moduleConfig.getConfigPath(), moduleConfig.meterAnalyzerActiveFileNames());
meterConfigs = MeterConfigs.loadConfig(
moduleConfig.getConfigPath(), moduleConfig.meterAnalyzerActiveFileNames());
processService = new MeterProcessService(getManager());
this.registerServiceImplementation(IMeterProcessService.class, processService);
}
......
......@@ -25,7 +25,6 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.apm.network.language.agent.v3.SegmentObject;
import org.apache.skywalking.apm.network.language.agent.v3.SpanObject;
import org.apache.skywalking.apm.network.language.agent.v3.SpanType;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.analyzer.provider.AnalyzerModuleConfig;
import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.AnalysisListener;
import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.EntryAnalysisListener;
......@@ -33,6 +32,7 @@ import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.
import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.FirstAnalysisListener;
import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.LocalAnalysisListener;
import org.apache.skywalking.oap.server.analyzer.provider.trace.parser.listener.SegmentListener;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
@Slf4j
@RequiredArgsConstructor
......@@ -49,30 +49,26 @@ public class TraceAnalyzer {
createSpanListeners();
try {
notifySegmentListener(segmentObject);
segmentObject.getSpansList().forEach(spanObject -> {
if (spanObject.getSpanId() == 0) {
notifyFirstListener(spanObject, segmentObject);
}
if (SpanType.Exit.equals(spanObject.getSpanType())) {
notifyExitListener(spanObject, segmentObject);
} else if (SpanType.Entry.equals(spanObject.getSpanType())) {
notifyEntryListener(spanObject, segmentObject);
} else if (SpanType.Local.equals(spanObject.getSpanType())) {
notifyLocalListener(spanObject, segmentObject);
} else {
log.error("span type value was unexpected, span type name: {}", spanObject.getSpanType()
.name());
}
});
notifyListenerToBuild();
} catch (Throwable e) {
log.error(e.getMessage(), e);
}
notifySegmentListener(segmentObject);
segmentObject.getSpansList().forEach(spanObject -> {
if (spanObject.getSpanId() == 0) {
notifyFirstListener(spanObject, segmentObject);
}
if (SpanType.Exit.equals(spanObject.getSpanType())) {
notifyExitListener(spanObject, segmentObject);
} else if (SpanType.Entry.equals(spanObject.getSpanType())) {
notifyEntryListener(spanObject, segmentObject);
} else if (SpanType.Local.equals(spanObject.getSpanType())) {
notifyLocalListener(spanObject, segmentObject);
} else {
log.error("span type value was unexpected, span type name: {}", spanObject.getSpanType()
.name());
}
});
notifyListenerToBuild();
}
private void notifyListenerToBuild() {
......
......@@ -35,7 +35,7 @@ import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.NodeType;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.config.NamingControl;
import org.apache.skywalking.oap.server.core.source.Segment;
......@@ -155,10 +155,10 @@ public class SegmentAnalysisListener implements FirstAnalysisListener, EntryAnal
}
private void appendSearchableTags(SpanObject span) {
HashSet<SpanTag> segmentTags = new HashSet<>();
HashSet<Tag> segmentTags = new HashSet<>();
span.getTagsList().forEach(tag -> {
if (searchableTagKeys.contains(tag.getKey())) {
final SpanTag spanTag = new SpanTag(tag.getKey(), tag.getValue());
final Tag spanTag = new Tag(tag.getKey(), tag.getValue());
if (!segmentTags.contains(spanTag)) {
segmentTags.add(spanTag);
}
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>analyzer</artifactId>
<groupId>org.apache.skywalking</groupId>
<version>8.4.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>log-analyzer</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>server-core</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.module;
import org.apache.skywalking.oap.log.analyzer.provider.log.ILogAnalyzerService;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
public class LogAnalyzerModule extends ModuleDefine {
public static final String NAME = "log-analyzer";
public LogAnalyzerModule() {
super(NAME);
}
@Override
public Class[] services() {
return new Class[] {
ILogAnalyzerService.class
};
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
public class LogAnalyzerModuleConfig extends ModuleConfig {
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider;
import org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule;
import org.apache.skywalking.oap.log.analyzer.provider.log.ILogAnalyzerService;
import org.apache.skywalking.oap.log.analyzer.provider.log.LogAnalyzerServiceImpl;
import org.apache.skywalking.oap.log.analyzer.provider.log.listener.RecordAnalysisListener;
import org.apache.skywalking.oap.log.analyzer.provider.log.listener.TrafficAnalysisListener;
import org.apache.skywalking.oap.server.configuration.api.ConfigurationModule;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
public class LogAnalyzerModuleProvider extends ModuleProvider {
private final LogAnalyzerModuleConfig moduleConfig = new LogAnalyzerModuleConfig();
private LogAnalyzerServiceImpl logAnalyzerService;
@Override
public String name() {
return "default";
}
@Override
public Class<? extends ModuleDefine> module() {
return LogAnalyzerModule.class;
}
@Override
public ModuleConfig createConfigBeanIfAbsent() {
return moduleConfig;
}
@Override
public void prepare() throws ServiceNotProvidedException, ModuleStartException {
logAnalyzerService = new LogAnalyzerServiceImpl(getManager(), moduleConfig);
this.registerServiceImplementation(ILogAnalyzerService.class, logAnalyzerService);
}
@Override
public void start() throws ServiceNotProvidedException, ModuleStartException {
logAnalyzerService.addListenerFactory(new RecordAnalysisListener.Factory(getManager(), moduleConfig));
logAnalyzerService.addListenerFactory(new TrafficAnalysisListener.Factory(getManager(), moduleConfig));
}
@Override
public void notifyAfterCompleted() throws ServiceNotProvidedException, ModuleStartException {
}
@Override
public String[] requiredModules() {
return new String[] {
CoreModule.NAME,
ConfigurationModule.NAME
};
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log;
import java.util.List;
import org.apache.skywalking.oap.log.analyzer.provider.log.listener.LogAnalysisListenerFactory;
import org.apache.skywalking.oap.server.library.module.Service;
public interface ILogAnalysisListenerFactoryManager extends Service {
void addListenerFactory(LogAnalysisListenerFactory factory);
List<LogAnalysisListenerFactory> getLogAnalysisListenerFactories();
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.oap.server.library.module.Service;
/**
* Analyze the collected log data.
*/
public interface ILogAnalyzerService extends Service {
void doAnalysis(LogData.Builder log);
default void doAnalysis(LogData logData) {
doAnalysis(logData.toBuilder());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig;
import org.apache.skywalking.oap.log.analyzer.provider.log.listener.LogAnalysisListener;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
/**
* Analyze the collected log data, is the entry point for log analysis.
*/
@Slf4j
@RequiredArgsConstructor
public class LogAnalyzer {
private final ModuleManager moduleManager;
private final LogAnalyzerModuleConfig moduleConfig;
private final ILogAnalysisListenerFactoryManager factoryManager;
private final List<LogAnalysisListener> listeners = new ArrayList<>();
public void doAnalysis(LogData.Builder builder) {
if (StringUtil.isEmpty(builder.getService())) {
// If the service name is empty, the log will be ignored.
log.debug("The log is ignored because the Service name is empty");
return;
}
createListeners();
if (builder.getTimestamp() == 0) {
// If no timestamp, OAP server would use the received timestamp as log's timestamp
builder.setTimestamp(System.currentTimeMillis());
}
notifyListener(builder);
notifyListenerToBuild();
}
private void notifyListener(LogData.Builder builder) {
listeners.forEach(listener -> listener.parse(builder));
}
private void notifyListenerToBuild() {
listeners.forEach(LogAnalysisListener::build);
}
private void createListeners() {
factoryManager.getLogAnalysisListenerFactories()
.forEach(factory -> listeners.add(factory.create(moduleManager, moduleConfig)));
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log;
import java.util.ArrayList;
import java.util.List;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig;
import org.apache.skywalking.oap.log.analyzer.provider.log.listener.LogAnalysisListenerFactory;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
@RequiredArgsConstructor
public class LogAnalyzerServiceImpl implements ILogAnalyzerService, ILogAnalysisListenerFactoryManager {
private final ModuleManager moduleManager;
private final LogAnalyzerModuleConfig moduleConfig;
private final List<LogAnalysisListenerFactory> factories = new ArrayList<>();
@Override
public void doAnalysis(final LogData.Builder log) {
LogAnalyzer analyzer = new LogAnalyzer(moduleManager, moduleConfig, this);
analyzer.doAnalysis(log);
}
@Override
public void addListenerFactory(final LogAnalysisListenerFactory factory) {
factories.add(factory);
}
@Override
public List<LogAnalysisListenerFactory> getLogAnalysisListenerFactories() {
return factories;
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log.listener;
import org.apache.skywalking.apm.network.logging.v3.LogData;
/**
* LogAnalysisListener represents the callback when OAP does the log data analysis.
*/
public interface LogAnalysisListener {
/**
* The last step of the analysis process. Typically, the implementations forward the analysis results to the source
* receiver.
*/
void build();
/**
* Parse the raw data from the probe.
*/
void parse(LogData.Builder logData);
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log.listener;
import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
/**
* LogAnalysisListenerFactory implementation creates the listener instance when required.
* Every LogAnalysisListener could have its own creation factory.
*/
public interface LogAnalysisListenerFactory {
LogAnalysisListener create(ModuleManager moduleManager, LogAnalyzerModuleConfig moduleConfig);
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log.listener;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.UUID;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.apm.network.logging.v3.LogDataBody;
import org.apache.skywalking.apm.network.logging.v3.TraceContext;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.NodeType;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.config.NamingControl;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.source.Log;
import org.apache.skywalking.oap.server.core.source.SourceReceiver;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
/**
* RecordAnalysisListener forwards the log data to the persistence layer with the query required conditions.
*/
@RequiredArgsConstructor
public class RecordAnalysisListener implements LogAnalysisListener {
private final SourceReceiver sourceReceiver;
private final NamingControl namingControl;
private final List<String> searchableTagKeys;
private final Log log = new Log();
@Override
public void build() {
sourceReceiver.receive(log);
}
@Override
public void parse(final LogData.Builder logData) {
LogDataBody body = logData.getBody();
log.setUniqueId(UUID.randomUUID().toString().replace("-", ""));
// timestamp
log.setTimestamp(logData.getTimestamp());
log.setTimeBucket(TimeBucket.getRecordTimeBucket(logData.getTimestamp()));
// service
String serviceName = namingControl.formatServiceName(logData.getService());
String serviceId = IDManager.ServiceID.buildId(serviceName, NodeType.Normal);
log.setServiceId(serviceId);
// service instance
if (StringUtil.isNotEmpty(logData.getServiceInstance())) {
log.setServiceInstanceId(IDManager.ServiceInstanceID.buildId(
serviceId,
namingControl.formatInstanceName(logData.getServiceInstance())
));
}
// endpoint
if (StringUtil.isNotEmpty(logData.getEndpoint())) {
String endpointName = namingControl.formatEndpointName(serviceName, logData.getEndpoint());
log.setEndpointId(IDManager.EndpointID.buildId(serviceId, endpointName));
log.setEndpointName(endpointName);
}
// trace
TraceContext traceContext = logData.getTraceContext();
if (StringUtil.isNotEmpty(traceContext.getTraceId())) {
log.setTraceId(traceContext.getTraceId());
}
if (StringUtil.isNotEmpty(traceContext.getTraceSegmentId())) {
log.setTraceSegmentId(traceContext.getTraceSegmentId());
log.setSpanId(traceContext.getSpanId());
}
// content
if (body.hasText()) {
log.setContentType(ContentType.TEXT);
log.setContent(body.getText().getText());
} else if (body.hasYaml()) {
log.setContentType(ContentType.YAML);
log.setContent(body.getYaml().getYaml());
} else if (body.hasJson()) {
log.setContentType(ContentType.JSON);
log.setContent(body.getJson().getJson());
}
if (logData.getTags().getDataCount() > 0) {
log.setTagsRawData(logData.getTags().toByteArray());
}
log.getTags().addAll(appendSearchableTags(logData));
}
private Collection<Tag> appendSearchableTags(LogData.Builder logData) {
HashSet<Tag> logTags = new HashSet<>();
logData.getTags().getDataList().forEach(tag -> {
if (searchableTagKeys.contains(tag.getKey())) {
final Tag logTag = new Tag(tag.getKey(), tag.getValue());
if (!logTags.contains(logTag)) {
logTags.add(logTag);
}
}
});
return logTags;
}
public static class Factory implements LogAnalysisListenerFactory {
private final SourceReceiver sourceReceiver;
private final NamingControl namingControl;
private final List<String> searchableTagKeys;
public Factory(ModuleManager moduleManager, LogAnalyzerModuleConfig moduleConfig) {
this.sourceReceiver = moduleManager.find(CoreModule.NAME)
.provider()
.getService(SourceReceiver.class);
this.namingControl = moduleManager.find(CoreModule.NAME)
.provider()
.getService(NamingControl.class);
ConfigService configService = moduleManager.find(CoreModule.NAME)
.provider()
.getService(ConfigService.class);
this.searchableTagKeys = Arrays.asList(configService.getSearchableLogsTags().split(Const.COMMA));
}
@Override
public LogAnalysisListener create(final ModuleManager moduleManager,
final LogAnalyzerModuleConfig moduleConfig) {
return new RecordAnalysisListener(sourceReceiver, namingControl, searchableTagKeys);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.log.analyzer.provider.log.listener;
import lombok.RequiredArgsConstructor;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.DownSampling;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.NodeType;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.config.NamingControl;
import org.apache.skywalking.oap.server.core.source.EndpointMeta;
import org.apache.skywalking.oap.server.core.source.ServiceInstanceUpdate;
import org.apache.skywalking.oap.server.core.source.ServiceMeta;
import org.apache.skywalking.oap.server.core.source.SourceReceiver;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import static java.util.Objects.nonNull;
/**
* Generate service, service instance and endpoint traffic by log data.
*/
@RequiredArgsConstructor
public class TrafficAnalysisListener implements LogAnalysisListener {
private final SourceReceiver sourceReceiver;
private final NamingControl namingControl;
private ServiceMeta serviceMeta;
private ServiceInstanceUpdate instanceMeta;
private EndpointMeta endpointMeta;
@Override
public void build() {
if (nonNull(serviceMeta)) {
sourceReceiver.receive(serviceMeta);
}
if (nonNull(instanceMeta)) {
sourceReceiver.receive(instanceMeta);
}
if (nonNull(endpointMeta)) {
sourceReceiver.receive(endpointMeta);
}
}
@Override
public void parse(final LogData.Builder logData) {
final long timeBucket = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Minute);
// to service traffic
String serviceName = namingControl.formatServiceName(logData.getService());
String serviceId = IDManager.ServiceID.buildId(serviceName, NodeType.Normal);
serviceMeta = new ServiceMeta();
serviceMeta.setName(namingControl.formatServiceName(logData.getService()));
serviceMeta.setNodeType(NodeType.Normal);
serviceMeta.setTimeBucket(timeBucket);
// to service instance traffic
if (StringUtil.isNotEmpty(logData.getServiceInstance())) {
instanceMeta = new ServiceInstanceUpdate();
instanceMeta.setServiceId(serviceId);
instanceMeta.setName(namingControl.formatInstanceName(logData.getServiceInstance()));
instanceMeta.setTimeBucket(timeBucket);
}
// to endpoint traffic
if (StringUtil.isNotEmpty(logData.getEndpoint())) {
endpointMeta = new EndpointMeta();
endpointMeta.setServiceName(serviceName);
endpointMeta.setServiceNodeType(NodeType.Normal);
endpointMeta.setEndpoint(namingControl.formatEndpointName(serviceName, logData.getEndpoint()));
endpointMeta.setTimeBucket(timeBucket);
}
}
public static class Factory implements LogAnalysisListenerFactory {
private final SourceReceiver sourceReceiver;
private final NamingControl namingControl;
public Factory(ModuleManager moduleManager, LogAnalyzerModuleConfig moduleConfig) {
this.sourceReceiver = moduleManager.find(CoreModule.NAME)
.provider()
.getService(SourceReceiver.class);
this.namingControl = moduleManager.find(CoreModule.NAME)
.provider().
getService(NamingControl.class);
}
@Override
public LogAnalysisListener create(final ModuleManager moduleManager,
final LogAnalyzerModuleConfig moduleConfig) {
return new TrafficAnalysisListener(sourceReceiver, namingControl);
}
}
}
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
......@@ -12,13 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
FROM adoptopenjdk/openjdk8:alpine
WORKDIR /h2
VOLUME /h2/data
ADD https://repo.maven.apache.org/maven2/com/h2database/h2/1.4.200/h2-1.4.200.jar /h2
CMD ["sh", "-c", "java -cp /h2/*.jar org.h2.tools.Server -tcp -tcpAllowOthers -tcpPort 1521 -ifNotExists -baseDir /h2/data"]
\ No newline at end of file
org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleProvider
\ No newline at end of file
......@@ -30,6 +30,7 @@
<modules>
<module>agent-analyzer</module>
<module>log-analyzer</module>
<module>meter-analyzer</module>
</modules>
......
......@@ -126,6 +126,11 @@
<artifactId>skywalking-browser-receiver-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>skywalking-log-recevier-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<!-- receiver module -->
<!-- fetcher module -->
......
......@@ -100,6 +100,8 @@ core:
endpointNameMaxLength: ${SW_ENDPOINT_NAME_MAX_LENGTH:150}
# Define the set of span tag keys, which should be searchable through the GraphQL.
searchableTracesTags: ${SW_SEARCHABLE_TAG_KEYS:http.method,status_code,db.type,db.instance,mq.queue,mq.topic,mq.broker}
# Define the set of log tag keys, which should be searchable through the GraphQL.
searchableLogsTags: ${SW_SEARCHABLE_LOGS_TAG_KEYS:""}
storage:
selector: ${SW_STORAGE:h2}
elasticsearch:
......@@ -209,6 +211,10 @@ agent-analyzer:
slowTraceSegmentThreshold: ${SW_SLOW_TRACE_SEGMENT_THRESHOLD:-1} # Setting this threshold about the latency would make the slow trace segments sampled if they cost more time, even the sampling mechanism activated. The default value is `-1`, which means would not sample slow traces. Unit, millisecond.
meterAnalyzerActiveFiles: ${SW_METER_ANALYZER_ACTIVE_FILES:spring-sleuth.yaml} # Which files could be meter analyzed, files split by ","
log-analyzer:
selector: ${SW_LOG_ANALYZER:default}
default:
receiver-sharing-server:
selector: ${SW_RECEIVER_SHARING_SERVER:default}
default:
......@@ -279,6 +285,7 @@ kafka-fetcher:
partitions: ${SW_KAFKA_FETCHER_PARTITIONS:3}
replicationFactor: ${SW_KAFKA_FETCHER_PARTITIONS_FACTOR:2}
enableMeterSystem: ${SW_KAFKA_FETCHER_ENABLE_METER_SYSTEM:false}
enableLog: ${SW_KAFKA_FETCHER_ENABLE_LOG:false}
isSharding: ${SW_KAFKA_FETCHER_IS_SHARDING:false}
consumePartitions: ${SW_KAFKA_FETCHER_CONSUME_PARTITIONS:""}
kafkaHandlerThreadPoolSize: ${SW_KAFKA_HANDLER_THREAD_POOL_SIZE:-1}
......@@ -318,6 +325,10 @@ receiver-browser:
# The sample rate precision is 1/10000. 10000 means 100% sample in default.
sampleRate: ${SW_RECEIVER_BROWSER_SAMPLE_RATE:10000}
receiver-log:
selector: ${SW_RECEIVER_LOG:default}
default:
query:
selector: ${SW_QUERY:graphql}
graphql:
......
......@@ -125,6 +125,14 @@ public class CoreModuleConfig extends ModuleConfig {
@Setter
@Getter
private String searchableTracesTags = DEFAULT_SEARCHABLE_TAG_KEYS;
/**
* Define the set of logs tag keys, which should be searchable through the GraphQL.
*
* @since 8.4.0
*/
@Setter
@Getter
private String searchableLogsTags = "";
public CoreModuleConfig() {
this.downsampling = new ArrayList<>();
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.core.analysis.manual.endpoint;
import org.apache.skywalking.oap.server.core.analysis.SourceDispatcher;
import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
import org.apache.skywalking.oap.server.core.source.EndpointMeta;
public class EndpointMetaDispatcher implements SourceDispatcher<EndpointMeta> {
@Override
public void dispatch(final EndpointMeta source) {
EndpointTraffic traffic = new EndpointTraffic();
traffic.setTimeBucket(source.getTimeBucket());
traffic.setName(source.getEndpoint());
traffic.setServiceId(source.getServiceId());
MetricsStreamProcessor.getInstance().in(traffic);
}
}
......@@ -18,15 +18,20 @@
package org.apache.skywalking.oap.server.core.analysis.manual.log;
import java.util.HashMap;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
public abstract class AbstractLogRecord extends Record {
......@@ -35,88 +40,127 @@ public abstract class AbstractLogRecord extends Record {
public static final String ENDPOINT_NAME = "endpoint_name";
public static final String ENDPOINT_ID = "endpoint_id";
public static final String TRACE_ID = "trace_id";
public static final String TRACE_SEGMENT_ID = "trace_segment_id";
public static final String SPAN_ID = "span_id";
public static final String IS_ERROR = "is_error";
public static final String STATUS_CODE = "status_code";
public static final String CONTENT_TYPE = "content_type";
public static final String CONTENT = "content";
public static final String TAGS_RAW_DATA = "tags_raw_data";
public static final String TIMESTAMP = "timestamp";
public static final String TAGS = "tags";
@Setter
@Getter
@Column(columnName = SERVICE_ID)
private int serviceId;
private String serviceId;
@Setter
@Getter
@Column(columnName = SERVICE_INSTANCE_ID)
private int serviceInstanceId;
private String serviceInstanceId;
@Setter
@Getter
@Column(columnName = ENDPOINT_ID)
private String endpointId;
@Setter
@Getter
@Column(columnName = ENDPOINT_NAME)
@Column(columnName = ENDPOINT_NAME, matchQuery = true)
private String endpointName;
@Setter
@Getter
@Column(columnName = TRACE_ID)
@Column(columnName = TRACE_ID, length = 150)
private String traceId;
@Setter
@Getter
@Column(columnName = IS_ERROR)
private int isError;
@Column(columnName = TRACE_SEGMENT_ID, length = 150)
private String traceSegmentId;
@Setter
@Getter
@Column(columnName = STATUS_CODE)
private String statusCode;
@Column(columnName = SPAN_ID)
private int spanId;
@Setter
@Getter
@Column(columnName = CONTENT_TYPE)
@Column(columnName = IS_ERROR)
private int isError;
@Setter
@Getter
@Column(columnName = CONTENT_TYPE, storageOnly = true)
private int contentType = ContentType.NONE.value();
@Setter
@Getter
@Column(columnName = CONTENT)
@Column(columnName = CONTENT, length = 1_000_000, matchQuery = true)
private String content;
@Setter
@Getter
@Column(columnName = TIMESTAMP)
private long timestamp;
/**
* All tag binary data.
*/
@Setter
@Getter
@Column(columnName = TAGS_RAW_DATA, storageOnly = true)
private byte[] tagsRawData;
@Setter
@Getter
@Column(columnName = TAGS)
private List<String> tagsInString;
/**
* tags is a duplicate field of {@link #tagsInString}. Some storage don't support array values in a single
* column. Then, those implementations could use this raw data to generate necessary data structures.
*/
@Setter
@Getter
private List<Tag> tags;
@Override
public String id() {
throw new UnexpectedException("AbstractLogRecord doesn't provide id()");
}
public static abstract class Builder<T extends AbstractLogRecord> implements StorageBuilder<T> {
protected void map2Data(T record, Map<String, Object> dbMap) {
record.setServiceId(((Number) dbMap.get(SERVICE_ID)).intValue());
record.setServiceInstanceId(((Number) dbMap.get(SERVICE_INSTANCE_ID)).intValue());
record.setEndpointId((String) dbMap.get(ENDPOINT_ID));
record.setEndpointName((String) dbMap.get(ENDPOINT_NAME));
record.setIsError(((Number) dbMap.get(IS_ERROR)).intValue());
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setStatusCode((String) dbMap.get(STATUS_CODE));
record.setContentType(((Number) dbMap.get(CONTENT_TYPE)).intValue());
record.setContent((String) dbMap.get(CONTENT));
record.setTimestamp(((Number) dbMap.get(TIMESTAMP)).longValue());
record.setTimeBucket(((Number) dbMap.get(TIME_BUCKET)).longValue());
}
@Override
public Map<String, Object> data2Map(AbstractLogRecord record) {
Map<String, Object> map = new HashMap<>();
protected void data2Map(Map<String, Object> map, AbstractLogRecord record) {
map.put(SERVICE_ID, record.getServiceId());
map.put(SERVICE_INSTANCE_ID, record.getServiceInstanceId());
map.put(ENDPOINT_ID, record.getEndpointId());
map.put(ENDPOINT_NAME, record.getEndpointName());
map.put(TRACE_ID, record.getTraceId());
map.put(TRACE_SEGMENT_ID, record.getTraceSegmentId());
map.put(SPAN_ID, record.getSpanId());
map.put(IS_ERROR, record.getIsError());
map.put(STATUS_CODE, record.getStatusCode());
map.put(TIME_BUCKET, record.getTimeBucket());
map.put(CONTENT_TYPE, record.getContentType());
map.put(CONTENT, record.getContent());
map.put(TIMESTAMP, record.getTimestamp());
return map;
if (CollectionUtils.isEmpty(record.getTagsRawData())) {
map.put(TAGS_RAW_DATA, Const.EMPTY_STRING);
} else {
map.put(TAGS_RAW_DATA, new String(Base64.getEncoder().encode(record.getTagsRawData())));
}
map.put(TAGS, record.getTagsInString());
}
protected void map2Data(T record, Map<String, Object> dbMap) {
record.setServiceId((String) dbMap.get(SERVICE_ID));
record.setServiceInstanceId((String) dbMap.get(SERVICE_INSTANCE_ID));
record.setEndpointId((String) dbMap.get(ENDPOINT_ID));
record.setEndpointName((String) dbMap.get(ENDPOINT_NAME));
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setTraceSegmentId((String) dbMap.get(TRACE_SEGMENT_ID));
record.setSpanId(((Number) dbMap.get(SPAN_ID)).intValue());
record.setIsError(((Number) dbMap.get(IS_ERROR)).intValue());
record.setContentType(((Number) dbMap.get(CONTENT_TYPE)).intValue());
record.setContent((String) dbMap.get(CONTENT));
record.setTimestamp(((Number) dbMap.get(TIMESTAMP)).longValue());
if (StringUtil.isEmpty((String) dbMap.get(TAGS_RAW_DATA))) {
record.setTagsRawData(new byte[] {});
} else {
// Don't read the tags as they has been in the data binary already.
record.setTagsRawData(Base64.getDecoder().decode((String) dbMap.get(TAGS_RAW_DATA)));
}
record.setTimeBucket(((Number) dbMap.get(TIME_BUCKET)).longValue());
}
}
}
......@@ -6,36 +6,62 @@
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.manual.log;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
import org.apache.skywalking.oap.server.core.storage.annotation.Column;
import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
@SuperDataset
@Stream(name = LogRecord.INDEX_NAME, scopeId = DefaultScopeDefine.LOG, builder = LogRecord.Builder.class, processor = RecordStreamProcessor.class)
public class LogRecord extends AbstractLogRecord {
public static final String INDEX_NAME = "log";
import static org.apache.skywalking.oap.server.core.analysis.manual.log.HTTPAccessLogRecord.INDEX_NAME;
public static final String UNIQUE_ID = "unique_id";
@Stream(name = INDEX_NAME, scopeId = DefaultScopeDefine.HTTP_ACCESS_LOG, builder = HTTPAccessLogRecord.Builder.class, processor = RecordStreamProcessor.class)
public class HTTPAccessLogRecord extends AbstractLogRecord {
@Setter
@Getter
@Column(columnName = UNIQUE_ID)
private String uniqueId;
@Override
public String id() {
return uniqueId;
}
public static final String INDEX_NAME = "http_access_log";
public static class Builder extends AbstractLogRecord.Builder<LogRecord> {
public static class Builder extends AbstractLogRecord.Builder<HTTPAccessLogRecord> {
@Override
public HTTPAccessLogRecord map2Data(Map<String, Object> dbMap) {
HTTPAccessLogRecord record = new HTTPAccessLogRecord();
super.map2Data(record, dbMap);
public LogRecord map2Data(final Map<String, Object> dbMap) {
LogRecord record = new LogRecord();
map2Data(record, dbMap);
record.setUniqueId((String) dbMap.get(UNIQUE_ID));
return record;
}
@Override
public Map<String, Object> data2Map(final LogRecord record) {
Map<String, Object> dbMap = new HashMap<>();
data2Map(dbMap, record);
dbMap.put(UNIQUE_ID, record.getUniqueId());
return dbMap;
}
}
}
......@@ -13,20 +13,21 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.manual.log;
import org.apache.skywalking.oap.server.core.analysis.SourceDispatcher;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.HTTPAccessLog;
import org.apache.skywalking.oap.server.core.source.Log;
public class HTTPAccessLogDispatcher implements SourceDispatcher<HTTPAccessLog> {
public class LogRecordDispatcher implements SourceDispatcher<Log> {
@Override
public void dispatch(HTTPAccessLog source) {
HTTPAccessLogRecord record = new HTTPAccessLogRecord();
public void dispatch(final Log source) {
LogRecord record = new LogRecord();
record.setUniqueId(source.getUniqueId());
record.setTimestamp(source.getTimestamp());
record.setTimeBucket(source.getTimeBucket());
record.setServiceId(source.getServiceId());
......@@ -34,10 +35,13 @@ public class HTTPAccessLogDispatcher implements SourceDispatcher<HTTPAccessLog>
record.setEndpointId(source.getEndpointId());
record.setEndpointName(source.getEndpointName());
record.setTraceId(source.getTraceId());
record.setIsError(source.getIsError());
record.setStatusCode(source.getStatusCode());
record.setTraceSegmentId(source.getTraceSegmentId());
record.setSpanId(source.getSpanId());
record.setContentType(source.getContentType().value());
record.setContent(source.getContent());
record.setTagsRawData(source.getTagsRawData());
record.setTagsInString(Tag.Util.toStringList(source.getTags()));
record.setTags(source.getTags());
RecordStreamProcessor.getInstance().in(record);
}
......
......@@ -13,14 +13,13 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.manual.segment;
package org.apache.skywalking.oap.server.core.analysis.manual.searchtag;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
......@@ -33,7 +32,7 @@ import org.apache.skywalking.oap.server.library.util.CollectionUtils;
@EqualsAndHashCode
@NoArgsConstructor
@AllArgsConstructor
public class SpanTag {
public class Tag {
private String key;
private String value;
......@@ -43,14 +42,11 @@ public class SpanTag {
}
public static class Util {
public static List<String> toStringList(List<SpanTag> list) {
public static List<String> toStringList(List<Tag> list) {
if (CollectionUtils.isEmpty(list)) {
return Collections.emptyList();
}
List<String> result = new ArrayList<>(list.size());
list.forEach(e -> result.add(e.toString()));
return result;
return list.stream().map(Tag::toString).collect(Collectors.toList());
}
}
}
......@@ -19,6 +19,7 @@
package org.apache.skywalking.oap.server.core.analysis.manual.segment;
import org.apache.skywalking.oap.server.core.analysis.SourceDispatcher;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
import org.apache.skywalking.oap.server.core.source.Segment;
......@@ -41,7 +42,7 @@ public class SegmentDispatcher implements SourceDispatcher<Segment> {
segment.setTimeBucket(source.getTimeBucket());
segment.setVersion(source.getVersion());
segment.setTagsRawData(source.getTags());
segment.setTags(SpanTag.Util.toStringList(source.getTags()));
segment.setTags(Tag.Util.toStringList(source.getTags()));
RecordStreamProcessor.getInstance().in(segment);
}
......
......@@ -28,6 +28,7 @@ import lombok.Setter;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
......@@ -118,7 +119,7 @@ public class SegmentRecord extends Record {
*/
@Setter
@Getter
private List<SpanTag> tagsRawData;
private List<Tag> tagsRawData;
@Override
public String id() {
......
......@@ -27,10 +27,12 @@ public class ConfigService implements Service {
private final String gRPCHost;
private final int gRPCPort;
private final String searchableTracesTags;
private final String searchableLogsTags;
public ConfigService(CoreModuleConfig moduleConfig) {
this.gRPCHost = moduleConfig.getGRPCHost();
this.gRPCPort = moduleConfig.getGRPCPort();
this.searchableTracesTags = moduleConfig.getSearchableTracesTags();
this.searchableLogsTags = moduleConfig.getSearchableLogsTags();
}
}
......@@ -19,8 +19,12 @@
package org.apache.skywalking.oap.server.core.query;
import java.io.IOException;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
......@@ -45,21 +49,38 @@ public class LogQueryService implements Service {
return logQueryDAO;
}
public Logs queryLogs(final String metricName,
int serviceId,
int serviceInstanceId,
public boolean supportQueryLogsByKeywords() {
return getLogQueryDAO().supportQueryLogsByKeywords();
}
public Logs queryLogs(String metricName,
String serviceId,
String serviceInstanceId,
String endpointId,
String traceId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
String stateCode,
Pagination paging,
Order queryOrder,
final long startTB,
final long endTB) throws IOException {
final long endTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException {
PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(paging);
Logs logs = getLogQueryDAO().queryLogs(
metricName, serviceId, serviceInstanceId, endpointId, traceId, state, stateCode, paging, page
.getFrom(), page.getLimit(), startTB, endTB);
Logs logs = getLogQueryDAO().queryLogs(metricName,
serviceId,
serviceInstanceId,
endpointId,
endpointName,
relatedTrace,
state,
queryOrder,
page.getFrom(), page.getLimit(),
startTB, endTB, tags,
keywordsOfContent, excludingKeywordsOfContent
);
logs.getLogs().forEach(log -> {
if (StringUtil.isNotEmpty(log.getServiceId())) {
final IDManager.ServiceID.ServiceIDDefinition serviceIDDefinition = IDManager.ServiceID.analysisId(
......@@ -71,7 +92,6 @@ public class LogQueryService implements Service {
.analysisId(log.getServiceInstanceId());
log.setServiceInstanceName(instanceIDDefinition.getName());
}
log.setEndpointId(log.getEndpointId());
});
return logs;
}
......
......@@ -26,8 +26,8 @@ import java.util.List;
import org.apache.skywalking.apm.network.language.agent.v3.SegmentObject;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.config.IComponentLibraryCatalogService;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.LogEntity;
......@@ -85,7 +85,7 @@ public class TraceQueryService implements Service {
final Pagination paging,
final long startTB,
final long endTB,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(paging);
return getTraceQueryDAO().queryBasicTraces(
......
......@@ -18,8 +18,11 @@
package org.apache.skywalking.oap.server.core.query.input;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
......@@ -27,12 +30,16 @@ import org.apache.skywalking.oap.server.core.query.type.Pagination;
@Setter
public class LogQueryCondition {
private String metricName;
private int serviceId;
private int serviceInstanceId;
private String serviceId;
private String serviceInstanceId;
private String endpointId;
private String traceId;
private LogState state;
private String stateCode;
private String endpointName;
private TraceScopeCondition relatedTrace;
private Duration queryDuration;
private LogState state;
private Pagination paging;
private List<Tag> tags;
private List<String> keywordsOfContent;
private List<String> excludingKeywordsOfContent;
private Order queryOrder;
}
......@@ -21,7 +21,7 @@ package org.apache.skywalking.oap.server.core.query.input;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.TraceState;
......@@ -40,5 +40,5 @@ public class TraceQueryCondition {
private TraceState traceState;
private QueryOrder queryOrder;
private Pagination paging;
private List<SpanTag> tags;
private List<Tag> tags;
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.core.query.input;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
public class TraceScopeCondition {
private String traceId;
private String segmentId;
private Integer spanId;
}
......@@ -21,7 +21,7 @@ package org.apache.skywalking.oap.server.core.query.type;
import org.apache.skywalking.oap.server.core.UnexpectedException;
public enum ContentType {
TEXT(1), JSON(2), NONE(0);
NONE(0), TEXT(1), JSON(2), YAML(3);
private int value;
......@@ -35,12 +35,14 @@ public enum ContentType {
public static ContentType instanceOf(int value) {
switch (value) {
case 0:
return NONE;
case 1:
return TEXT;
case 2:
return JSON;
case 0:
return NONE;
case 3:
return YAML;
default:
throw new UnexpectedException("unexpected value=" + value);
}
......
......@@ -18,6 +18,8 @@
package org.apache.skywalking.oap.server.core.query.type;
import java.util.ArrayList;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
......@@ -36,4 +38,9 @@ public class Log {
private String statusCode;
private ContentType contentType = ContentType.NONE;
private String content;
private final List<KeyValue> tags;
public Log() {
tags = new ArrayList<>();
}
}
......@@ -18,25 +18,29 @@
package org.apache.skywalking.oap.server.core.source;
import java.util.ArrayList;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
@Setter
@Getter
public abstract class AbstractLog extends Source {
private long timeBucket;
private long timestamp;
private int serviceId;
private int serviceInstanceId;
private String serviceId;
private String serviceInstanceId;
private String endpointId;
private String endpointName;
private String traceId;
private int isError;
private String statusCode;
private String traceSegmentId;
private int spanId;
private ContentType contentType = ContentType.NONE;
private String content;
private byte[] tagsRawData;
private List<Tag> tags = new ArrayList<>();
@Override
public String getEntityId() {
......
......@@ -78,6 +78,9 @@ public class DefaultScopeDefine {
public static final int BROWSER_APP_SINGLE_VERSION_TRAFFIC = 39;
public static final int BROWSER_APP_PAGE_TRAFFIC = 40;
public static final int LOG = 41;
public static final int ENDPOINT_META = 42;
/**
* Catalog of scope, the metrics processor could use this to group all generated metrics by oal rt.
*/
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.core.source;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.NodeType;
@ScopeDeclaration(id = DefaultScopeDefine.ENDPOINT_META, name = "EndpointMeta")
@ScopeDefaultColumn.VirtualColumnDefinition(fieldName = "entityId", columnName = "entity_id", isID = true, type = String.class)
public class EndpointMeta extends Source {
@Getter
private String serviceId;
@Getter
@Setter
private String serviceName;
@Getter
@Setter
private NodeType serviceNodeType;
@Getter
@Setter
private String endpoint;
@Override
public int scope() {
return DefaultScopeDefine.ENDPOINT_META;
}
@Override
public String getEntityId() {
return IDManager.EndpointID.buildId(serviceId, endpoint);
}
@Override
public void prepare() {
this.serviceId = IDManager.ServiceID.buildId(serviceName, serviceNodeType);
}
}
......@@ -13,17 +13,27 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.source;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.HTTP_ACCESS_LOG;
import lombok.Getter;
import lombok.Setter;
@Setter
@Getter
@ScopeDeclaration(id = DefaultScopeDefine.LOG, name = "Log")
public class Log extends AbstractLog {
private String uniqueId;
@Override
public String getEntityId() {
return uniqueId;
}
@ScopeDeclaration(id = HTTP_ACCESS_LOG, name = "HTTPAccessLog")
public class HTTPAccessLog extends AbstractLog {
@Override
public int scope() {
return HTTP_ACCESS_LOG;
return DefaultScopeDefine.LOG;
}
}
......@@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import lombok.Getter;
import lombok.Setter;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.SEGMENT;
......@@ -77,5 +77,5 @@ public class Segment extends Source {
private int version;
@Setter
@Getter
private List<SpanTag> tags = new ArrayList<>();
private List<Tag> tags = new ArrayList<>();
}
......@@ -18,14 +18,50 @@
package org.apache.skywalking.oap.server.core.storage.query;
import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.Base64;
import java.util.List;
import org.apache.skywalking.apm.network.logging.v3.LogTags;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.library.module.Service;
public interface ILogQueryDAO extends Service {
Logs queryLogs(final String metricName, int serviceId, int serviceInstanceId, String endpointId, String traceId,
LogState state, String stateCode, Pagination paging, int from, int limit, final long startTB,
final long endTB) throws IOException;
default boolean supportQueryLogsByKeywords() {
return false;
}
Logs queryLogs(String metricName,
String serviceId,
String serviceInstanceId,
String endpointId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
Order queryOrder,
int from,
int limit,
final long startTB,
final long endTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException;
/**
* Parser the raw tags.
*/
default void parserDataBinary(String dataBinaryBase64, List<KeyValue> tags) {
try {
LogTags logTags = LogTags.parseFrom(Base64.getDecoder().decode(dataBinaryBase64));
logTags.getDataList().forEach(pair -> tags.add(new KeyValue(pair.getKey(), pair.getValue())));
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
}
......@@ -20,8 +20,8 @@ package org.apache.skywalking.oap.server.core.storage.query;
import java.io.IOException;
import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.Span;
import org.apache.skywalking.oap.server.core.query.type.TraceBrief;
......@@ -43,7 +43,7 @@ public interface ITraceQueryDAO extends Service {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException;
final List<Tag> tags) throws IOException;
List<SegmentRecord> queryByTraceId(String traceId) throws IOException;
......
......@@ -13,23 +13,22 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.analysis.manual.segment;
package org.apache.skywalking.oap.server.core.analysis.manual.searchtag;
import org.junit.Assert;
import org.junit.Test;
public class SpanTagTest {
public class TagTest {
@Test
public void testEqual() {
final SpanTag spanTag = new SpanTag("tag1", "value1");
final SpanTag spanTag1 = new SpanTag("tag1", "value2");
final SpanTag spanTag2 = new SpanTag("tag2", "value3");
final SpanTag spanTag3 = new SpanTag("tag1", "value1");
Assert.assertEquals(spanTag, spanTag3);
Assert.assertNotEquals(spanTag, spanTag1);
Assert.assertNotEquals(spanTag, spanTag2);
final Tag tag = new Tag("tag1", "value1");
final Tag tag1 = new Tag("tag1", "value2");
final Tag tag2 = new Tag("tag2", "value3");
final Tag tag3 = new Tag("tag1", "value1");
Assert.assertEquals(tag, tag3);
Assert.assertNotEquals(tag, tag1);
Assert.assertNotEquals(tag, tag2);
}
}
......@@ -34,6 +34,11 @@
<artifactId>agent-analyzer</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>log-analyzer</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
......
......@@ -69,6 +69,8 @@ public class KafkaFetcherConfig extends ModuleConfig {
private boolean enableMeterSystem = false;
private boolean enableLog = false;
private String configPath = "meter-analyzer-config";
private String topicNameOfMetrics = "skywalking-metrics";
......@@ -81,6 +83,8 @@ public class KafkaFetcherConfig extends ModuleConfig {
private String topicNameOfMeters = "skywalking-meters";
private String topicNameOfLogs = "skywalking-logs";
private int kafkaHandlerThreadPoolSize;
private int kafkaHandlerThreadPoolQueueSize;
......
......@@ -19,16 +19,17 @@
package org.apache.skywalking.oap.server.analyzer.agent.kafka.provider;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.KafkaFetcherHandlerRegister;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.module.KafkaFetcherConfig;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.module.KafkaFetcherModule;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.JVMMetricsHandler;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.LogHandler;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.MeterServiceHandler;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.ProfileTaskHandler;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.ServiceManagementHandler;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler.TraceSegmentHandler;
import org.apache.skywalking.oap.server.analyzer.module.AnalyzerModule;
import org.apache.skywalking.oap.server.analyzer.provider.meter.process.IMeterProcessService;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
......@@ -41,8 +42,6 @@ public class KafkaFetcherProvider extends ModuleProvider {
private KafkaFetcherHandlerRegister handlerRegister;
private KafkaFetcherConfig config;
private IMeterProcessService processService;
public KafkaFetcherProvider() {
config = new KafkaFetcherConfig();
}
......@@ -75,9 +74,11 @@ public class KafkaFetcherProvider extends ModuleProvider {
handlerRegister.register(new ProfileTaskHandler(getManager(), config));
if (config.isEnableMeterSystem()) {
processService = getManager().find(AnalyzerModule.NAME).provider().getService(IMeterProcessService.class);
handlerRegister.register(new MeterServiceHandler(getManager(), config));
}
if (config.isEnableLog()) {
handlerRegister.register(new LogHandler(getManager(), config));
}
handlerRegister.start();
}
......@@ -89,6 +90,7 @@ public class KafkaFetcherProvider extends ModuleProvider {
public String[] requiredModules() {
return new String[] {
AnalyzerModule.NAME,
LogAnalyzerModule.NAME,
CoreModule.NAME
};
}
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.analyzer.agent.kafka.provider.handler;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.utils.Bytes;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule;
import org.apache.skywalking.oap.log.analyzer.provider.log.ILogAnalyzerService;
import org.apache.skywalking.oap.server.analyzer.agent.kafka.module.KafkaFetcherConfig;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import org.apache.skywalking.oap.server.telemetry.api.CounterMetrics;
import org.apache.skywalking.oap.server.telemetry.api.HistogramMetrics;
import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
import org.apache.skywalking.oap.server.telemetry.api.MetricsTag;
@Slf4j
public class LogHandler implements KafkaHandler {
private final KafkaFetcherConfig config;
private final HistogramMetrics histogram;
private final CounterMetrics errorCounter;
private final ILogAnalyzerService logAnalyzerService;
public LogHandler(final ModuleManager moduleManager,
final KafkaFetcherConfig config) {
this.config = config;
this.logAnalyzerService = moduleManager.find(LogAnalyzerModule.NAME)
.provider()
.getService(ILogAnalyzerService.class);
MetricsCreator metricsCreator = moduleManager.find(TelemetryModule.NAME)
.provider()
.getService(MetricsCreator.class);
histogram = metricsCreator.createHistogramMetric(
"log_in_latency", "The process latency of log",
new MetricsTag.Keys("protocol"), new MetricsTag.Values("kafka-fetcher")
);
errorCounter = metricsCreator.createCounter("log_analysis_error_count", "The error number of log analysis",
new MetricsTag.Keys("protocol"),
new MetricsTag.Values("kafka-fetcher")
);
}
@Override
public String getConsumePartitions() {
return config.getConsumePartitions();
}
@Override
public String getTopic() {
return config.getTopicNameOfLogs();
}
@Override
public void handle(final ConsumerRecord<String, Bytes> record) {
HistogramMetrics.Timer timer = histogram.createTimer();
try {
LogData logData = LogData.parseFrom(record.value().get());
logAnalyzerService.doAnalysis(logData);
} catch (Exception e) {
errorCounter.inc();
log.error(e.getMessage(), e);
} finally {
timer.finish();
}
}
}
......@@ -86,6 +86,7 @@ public class TraceSegmentHandler implements KafkaHandler {
segmentParserService.send(segment);
} catch (Exception e) {
errorCounter.inc();
log.error(e.getMessage(), e);
} finally {
timer.finish();
}
......
......@@ -21,11 +21,14 @@ package org.apache.skywalking.oap.query.graphql.resolver;
import com.coxautodev.graphql.tools.GraphQLQueryResolver;
import java.io.IOException;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.query.LogQueryService;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.LogQueryCondition;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
public class LogQuery implements GraphQLQueryResolver {
......@@ -43,18 +46,36 @@ public class LogQuery implements GraphQLQueryResolver {
return logQueryService;
}
public boolean supportQueryLogsByKeywords() {
return getQueryService().supportQueryLogsByKeywords();
}
public Logs queryLogs(LogQueryCondition condition) throws IOException {
if (isNull(condition.getQueryDuration()) && isNull(condition.getRelatedTrace())) {
throw new UnexpectedException("The condition must contains either queryDuration or relatedTrace.");
}
long startSecondTB = 0;
long endSecondTB = 0;
if (nonNull(condition.getQueryDuration())) {
startSecondTB = condition.getQueryDuration().getStartTimeBucketInSec();
endSecondTB = condition.getQueryDuration().getEndTimeBucketInSec();
}
Order queryOrder = isNull(condition.getQueryOrder()) ? Order.DES : condition.getQueryOrder();
return getQueryService().queryLogs(
condition.getMetricName(), condition.getServiceId(), condition.getServiceInstanceId(), condition
.getEndpointId(), condition.getTraceId(), condition.getState(), condition.getStateCode(),
condition.getPaging(), startSecondTB, endSecondTB
condition.getMetricName(),
condition.getServiceId(),
condition.getServiceInstanceId(),
condition.getEndpointId(),
condition.getEndpointName(),
condition.getRelatedTrace(),
condition.getState(),
condition.getPaging(),
queryOrder,
startSecondTB, endSecondTB,
condition.getTags(),
condition.getKeywordsOfContent(),
condition.getExcludingKeywordsOfContent()
);
}
}
Subproject commit d3608e86f7c9aaf54aa4ad2c8d2afd09d0680dd6
Subproject commit 154b7c41b53376b7161bee3c4e716330cc752254
......@@ -42,6 +42,7 @@
<module>otel-receiver-plugin</module>
<module>skywalking-meter-receiver-plugin</module>
<module>skywalking-browser-receiver-plugin</module>
<module>skywalking-log-recevier-plugin</module>
</modules>
<dependencies>
......
......@@ -38,18 +38,14 @@ public class PerfDataAnalyzer {
public void doAnalysis(BrowserPerfData browserPerfData) {
createAnalysisListeners();
try {
BrowserPerfDataDecorator decorator = new BrowserPerfDataDecorator(browserPerfData);
// Use the server side current time.
long nowMillis = System.currentTimeMillis();
decorator.setTime(nowMillis);
BrowserPerfDataDecorator decorator = new BrowserPerfDataDecorator(browserPerfData);
// Use the server side current time.
long nowMillis = System.currentTimeMillis();
decorator.setTime(nowMillis);
notifyListener(decorator);
notifyListener(decorator);
notifyListenerToBuild();
} catch (Throwable e) {
log.error(e.getMessage(), e);
}
notifyListenerToBuild();
}
private void notifyListener(BrowserPerfDataDecorator decorator) {
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>server-receiver-plugin</artifactId>
<groupId>org.apache.skywalking</groupId>
<version>8.4.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>skywalking-log-recevier-plugin</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>log-analyzer</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>skywalking-sharing-server-plugin</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.recevier.log.module;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
public class LogModule extends ModuleDefine {
public LogModule() {
super("receiver-log");
}
@Override
public Class[] services() {
return new Class[0];
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.recevier.log.provider;
import org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.server.GRPCHandlerRegister;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
import org.apache.skywalking.oap.server.receiver.sharing.server.SharingServerModule;
import org.apache.skywalking.oap.server.recevier.log.module.LogModule;
import org.apache.skywalking.oap.server.recevier.log.provider.handler.LogReportServiceHandler;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
public class LogModuleProvider extends ModuleProvider {
@Override
public String name() {
return "default";
}
@Override
public Class<? extends ModuleDefine> module() {
return LogModule.class;
}
@Override
public ModuleConfig createConfigBeanIfAbsent() {
return null;
}
@Override
public void prepare() throws ServiceNotProvidedException, ModuleStartException {
}
@Override
public void start() throws ServiceNotProvidedException, ModuleStartException {
GRPCHandlerRegister grpcHandlerRegister = getManager().find(SharingServerModule.NAME)
.provider()
.getService(GRPCHandlerRegister.class);
grpcHandlerRegister.addHandler(new LogReportServiceHandler(getManager()));
}
@Override
public void notifyAfterCompleted() throws ServiceNotProvidedException, ModuleStartException {
}
@Override
public String[] requiredModules() {
return new String[] {
TelemetryModule.NAME,
CoreModule.NAME,
LogAnalyzerModule.NAME,
SharingServerModule.NAME
};
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.recevier.log.provider.handler;
import io.grpc.stub.StreamObserver;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.apm.network.common.v3.Commands;
import org.apache.skywalking.apm.network.logging.v3.LogData;
import org.apache.skywalking.apm.network.logging.v3.LogReportServiceGrpc;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.log.analyzer.module.LogAnalyzerModule;
import org.apache.skywalking.oap.log.analyzer.provider.log.ILogAnalyzerService;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.library.server.grpc.GRPCHandler;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import org.apache.skywalking.oap.server.telemetry.api.CounterMetrics;
import org.apache.skywalking.oap.server.telemetry.api.HistogramMetrics;
import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
import org.apache.skywalking.oap.server.telemetry.api.MetricsTag;
/**
* Collect log data
*/
@Slf4j
public class LogReportServiceHandler extends LogReportServiceGrpc.LogReportServiceImplBase implements GRPCHandler {
private final ModuleManager moduleManager;
private final HistogramMetrics histogram;
private final CounterMetrics errorCounter;
private final ILogAnalyzerService logAnalyzerService;
public LogReportServiceHandler(final ModuleManager moduleManager) {
this.moduleManager = moduleManager;
MetricsCreator metricsCreator = moduleManager.find(TelemetryModule.NAME)
.provider()
.getService(MetricsCreator.class);
this.logAnalyzerService = moduleManager.find(LogAnalyzerModule.NAME)
.provider()
.getService(ILogAnalyzerService.class);
histogram = metricsCreator.createHistogramMetric(
"log_in_latency", "The process latency of log",
new MetricsTag.Keys("protocol"), new MetricsTag.Values("grpc")
);
errorCounter = metricsCreator.createCounter("log_analysis_error_count", "The error number of log analysis",
new MetricsTag.Keys("protocol"), new MetricsTag.Values("grpc")
);
}
@Override
public StreamObserver<LogData> collect(final StreamObserver<Commands> responseObserver) {
return new StreamObserver<LogData>() {
private String serviceName;
/**
* If this is not the first element of the streaming,
* use the previous not-null name as the service name.
*/
private void setServiceName(LogData.Builder builder) {
if (StringUtil.isEmpty(serviceName) && StringUtil.isNotEmpty(builder.getService())) {
serviceName = builder.getService();
} else if (StringUtil.isNotEmpty(serviceName)) {
builder.setService(serviceName);
}
}
@Override
public void onNext(final LogData logData) {
if (log.isDebugEnabled()) {
log.debug("received log in streaming");
}
HistogramMetrics.Timer timer = histogram.createTimer();
try {
LogData.Builder builder = logData.toBuilder();
setServiceName(builder);
logAnalyzerService.doAnalysis(builder);
} catch (Exception e) {
errorCounter.inc();
log.error(e.getMessage(), e);
} finally {
timer.finish();
}
}
@Override
public void onError(final Throwable throwable) {
log.error(throwable.getMessage(), throwable);
responseObserver.onCompleted();
}
@Override
public void onCompleted() {
responseObserver.onNext(Commands.newBuilder().build());
responseObserver.onCompleted();
}
};
}
}
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
org.apache.skywalking.oap.server.recevier.log.module.LogModule
\ No newline at end of file
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
org.apache.skywalking.oap.server.recevier.log.provider.LogModuleProvider
\ No newline at end of file
......@@ -74,6 +74,7 @@ public class TraceSegmentReportServiceHandler extends TraceSegmentReportServiceG
segmentParserService.send(segment);
} catch (Exception e) {
errorCounter.inc();
log.error(e.getMessage(), e);
} finally {
timer.finish();
}
......@@ -105,6 +106,7 @@ public class TraceSegmentReportServiceHandler extends TraceSegmentReportServiceG
segmentParserService.send(segment);
} catch (Exception e) {
errorCounter.inc();
log.error(e.getMessage(), e);
} finally {
timer.finish();
}
......
......@@ -18,30 +18,36 @@
package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query;
import com.google.common.base.Strings;
import java.io.IOException;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MatchCNameBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_ID;
import static java.util.Objects.nonNull;
import static org.apache.skywalking.apm.util.StringUtil.isNotEmpty;
public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
public LogQueryEsDAO(ElasticSearchClient client) {
......@@ -49,9 +55,26 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
}
@Override
public Logs queryLogs(String metricName, int serviceId, int serviceInstanceId, String endpointId, String traceId,
LogState state, String stateCode, Pagination paging, int from, int limit, long startSecondTB,
long endSecondTB) throws IOException {
public boolean supportQueryLogsByKeywords() {
return true;
}
@Override
public Logs queryLogs(String metricName,
final String serviceId,
final String serviceInstanceId,
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
final long startSecondTB,
final long endSecondTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
......@@ -61,22 +84,33 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
if (startSecondTB != 0 && endSecondTB != 0) {
mustQueryList.add(QueryBuilders.rangeQuery(Record.TIME_BUCKET).gte(startSecondTB).lte(endSecondTB));
}
if (serviceId != Const.NONE) {
if (isNotEmpty(serviceId)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.SERVICE_ID, serviceId));
}
if (serviceInstanceId != Const.NONE) {
if (isNotEmpty(serviceInstanceId)) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(AbstractLogRecord.SERVICE_INSTANCE_ID, serviceInstanceId));
}
if (StringUtil.isNotEmpty(endpointId)) {
if (isNotEmpty(endpointId)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.ENDPOINT_ID, endpointId));
}
if (!Strings.isNullOrEmpty(stateCode)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.STATUS_CODE, stateCode));
if (isNotEmpty(endpointName)) {
String matchCName = MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.ENDPOINT_NAME);
mustQueryList.add(QueryBuilders.matchPhraseQuery(matchCName, endpointName));
}
if (!Strings.isNullOrEmpty(traceId)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(TRACE_ID, traceId));
if (nonNull(relatedTrace)) {
if (isNotEmpty(relatedTrace.getTraceId())) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(AbstractLogRecord.TRACE_ID, relatedTrace.getTraceId()));
}
if (isNotEmpty(relatedTrace.getSegmentId())) {
boolQueryBuilder.must().add(
QueryBuilders.termQuery(AbstractLogRecord.TRACE_SEGMENT_ID, relatedTrace.getSegmentId()));
}
if (nonNull(relatedTrace.getSpanId())) {
boolQueryBuilder.must().add(
QueryBuilders.termQuery(AbstractLogRecord.SPAN_ID, relatedTrace.getSpanId()));
}
}
if (LogState.ERROR.equals(state)) {
boolQueryBuilder.must()
......@@ -90,6 +124,28 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
));
}
if (CollectionUtils.isNotEmpty(tags)) {
BoolQueryBuilder tagMatchQuery = QueryBuilders.boolQuery();
tags.forEach(tag -> tagMatchQuery.must(QueryBuilders.termQuery(AbstractLogRecord.TAGS, tag.toString())));
mustQueryList.add(tagMatchQuery);
}
if (CollectionUtils.isNotEmpty(keywordsOfContent)) {
mustQueryList.add(
QueryBuilders.matchPhraseQuery(
MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.CONTENT),
String.join(Const.SPACE, keywordsOfContent)
));
}
if (CollectionUtils.isNotEmpty(excludingKeywordsOfContent)) {
boolQueryBuilder.mustNot(QueryBuilders.matchPhraseQuery(
MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.CONTENT),
String.join(Const.SPACE, excludingKeywordsOfContent)
));
}
sourceBuilder.sort(LogRecord.TIMESTAMP, Order.DES.equals(queryOrder) ? SortOrder.DESC : SortOrder.ASC);
sourceBuilder.size(limit);
sourceBuilder.from(from);
......@@ -105,17 +161,20 @@ public class LogQueryEsDAO extends EsDAO implements ILogQueryDAO {
.get(AbstractLogRecord.SERVICE_INSTANCE_ID));
log.setEndpointId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_ID));
log.setEndpointName((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_NAME));
log.setTraceId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.TRACE_ID));
log.setTimestamp(searchHit.getSourceAsMap().get(AbstractLogRecord.TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) searchHit.getSourceAsMap()
.get(AbstractLogRecord.IS_ERROR)).intValue()));
log.setStatusCode((String) searchHit.getSourceAsMap().get(AbstractLogRecord.STATUS_CODE));
log.setContentType(ContentType.instanceOf(((Number) searchHit.getSourceAsMap()
.get(
AbstractLogRecord.CONTENT_TYPE)).intValue()));
log.setContent((String) searchHit.getSourceAsMap().get(AbstractLogRecord.CONTENT));
String dataBinaryBase64 = (String) searchHit.getSourceAsMap().get(AbstractLogRecord.TAGS_RAW_DATA);
if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
parserDataBinary(dataBinaryBase64, log.getTags());
}
logs.getLogs().add(log);
}
return logs;
}
}
......@@ -25,8 +25,8 @@ import java.util.Base64;
import java.util.Collections;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.Span;
......@@ -71,7 +71,7 @@ public class TraceQueryEsDAO extends EsDAO implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
......@@ -126,9 +126,7 @@ public class TraceQueryEsDAO extends EsDAO implements ITraceQueryDAO {
}
if (CollectionUtils.isNotEmpty(tags)) {
BoolQueryBuilder tagMatchQuery = QueryBuilders.boolQuery();
tags.forEach(tag -> {
tagMatchQuery.must(QueryBuilders.termQuery(SegmentRecord.TAGS, tag.toString()));
});
tags.forEach(tag -> tagMatchQuery.must(QueryBuilders.termQuery(SegmentRecord.TAGS, tag.toString())));
mustQueryList.add(tagMatchQuery);
}
sourceBuilder.size(limit);
......
......@@ -18,30 +18,36 @@
package org.apache.skywalking.oap.server.storage.plugin.elasticsearch7.query;
import com.google.common.base.Strings;
import java.io.IOException;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MatchCNameBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_ID;
import static java.util.Objects.nonNull;
import static org.apache.skywalking.apm.util.StringUtil.isNotEmpty;
public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
public LogQueryEs7DAO(ElasticSearchClient client) {
......@@ -49,9 +55,26 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
}
@Override
public Logs queryLogs(String metricName, int serviceId, int serviceInstanceId, String endpointId, String traceId,
LogState state, String stateCode, Pagination paging, int from, int limit, long startSecondTB,
long endSecondTB) throws IOException {
public boolean supportQueryLogsByKeywords() {
return true;
}
@Override
public Logs queryLogs(String metricName,
final String serviceId,
final String serviceInstanceId,
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
final long startSecondTB,
final long endSecondTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
......@@ -62,22 +85,36 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
mustQueryList.add(QueryBuilders.rangeQuery(Record.TIME_BUCKET).gte(startSecondTB).lte(endSecondTB));
}
if (serviceId != Const.NONE) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.SERVICE_ID, serviceId));
if (isNotEmpty(serviceId)) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(AbstractLogRecord.SERVICE_ID, serviceId));
}
if (serviceInstanceId != Const.NONE) {
if (isNotEmpty(serviceInstanceId)) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(AbstractLogRecord.SERVICE_INSTANCE_ID, serviceInstanceId));
}
if (StringUtil.isNotEmpty(endpointId)) {
if (isNotEmpty(endpointId)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.ENDPOINT_ID, endpointId));
}
if (!Strings.isNullOrEmpty(stateCode)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(AbstractLogRecord.STATUS_CODE, stateCode));
if (isNotEmpty(endpointName)) {
String matchCName = MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.ENDPOINT_NAME);
mustQueryList.add(QueryBuilders.matchPhraseQuery(matchCName, endpointName));
}
if (!Strings.isNullOrEmpty(traceId)) {
boolQueryBuilder.must().add(QueryBuilders.termQuery(TRACE_ID, traceId));
if (nonNull(relatedTrace)) {
if (isNotEmpty(relatedTrace.getTraceId())) {
boolQueryBuilder.must()
.add(QueryBuilders.termQuery(AbstractLogRecord.TRACE_ID, relatedTrace.getTraceId()));
}
if (isNotEmpty(relatedTrace.getSegmentId())) {
boolQueryBuilder.must().add(
QueryBuilders.termQuery(AbstractLogRecord.TRACE_SEGMENT_ID, relatedTrace.getSegmentId()));
}
if (nonNull(relatedTrace.getSpanId())) {
boolQueryBuilder.must().add(
QueryBuilders.termQuery(AbstractLogRecord.SPAN_ID, relatedTrace.getSpanId()));
}
}
if (LogState.ERROR.equals(state)) {
boolQueryBuilder.must()
.add(
......@@ -90,6 +127,28 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
));
}
if (CollectionUtils.isNotEmpty(tags)) {
BoolQueryBuilder tagMatchQuery = QueryBuilders.boolQuery();
tags.forEach(tag -> tagMatchQuery.must(QueryBuilders.termQuery(AbstractLogRecord.TAGS, tag.toString())));
mustQueryList.add(tagMatchQuery);
}
if (CollectionUtils.isNotEmpty(keywordsOfContent)) {
mustQueryList.add(
QueryBuilders.matchPhraseQuery(
MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.CONTENT),
String.join(Const.SPACE, keywordsOfContent)
));
}
if (CollectionUtils.isNotEmpty(excludingKeywordsOfContent)) {
boolQueryBuilder.mustNot(QueryBuilders.matchPhraseQuery(
MatchCNameBuilder.INSTANCE.build(AbstractLogRecord.CONTENT),
String.join(Const.SPACE, excludingKeywordsOfContent)
));
}
sourceBuilder.sort(LogRecord.TIMESTAMP, Order.DES.equals(queryOrder) ? SortOrder.DESC : SortOrder.ASC);
sourceBuilder.size(limit);
sourceBuilder.from(from);
......@@ -104,17 +163,20 @@ public class LogQueryEs7DAO extends EsDAO implements ILogQueryDAO {
log.setServiceInstanceId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.SERVICE_INSTANCE_ID));
log.setEndpointId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_ID));
log.setEndpointName((String) searchHit.getSourceAsMap().get(AbstractLogRecord.ENDPOINT_NAME));
log.setTraceId((String) searchHit.getSourceAsMap().get(AbstractLogRecord.TRACE_ID));
log.setTimestamp(searchHit.getSourceAsMap().get(AbstractLogRecord.TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) searchHit.getSourceAsMap()
.get(AbstractLogRecord.IS_ERROR)).intValue()));
log.setStatusCode((String) searchHit.getSourceAsMap().get(AbstractLogRecord.STATUS_CODE));
log.setContentType(ContentType.instanceOf(((Number) searchHit.getSourceAsMap()
.get(
AbstractLogRecord.CONTENT_TYPE)).intValue()));
log.setContent((String) searchHit.getSourceAsMap().get(AbstractLogRecord.CONTENT));
String dataBinaryBase64 = (String) searchHit.getSourceAsMap().get(AbstractLogRecord.TAGS_RAW_DATA);
if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
parserDataBinary(dataBinaryBase64, log.getTags());
}
logs.getLogs().add(log);
}
return logs;
}
}
......@@ -22,8 +22,8 @@ import com.google.common.base.Strings;
import java.io.IOException;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.TraceBrief;
......@@ -63,7 +63,7 @@ public class TraceQueryEs7DAO extends TraceQueryEsDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
......@@ -125,7 +125,8 @@ public class TraceQueryEs7DAO extends TraceQueryEsDAO {
}
sourceBuilder.size(limit);
sourceBuilder.from(from);
SearchResponse response = getClient().search(new TimeRangeIndexNameMaker(SegmentRecord.INDEX_NAME, startSecondTB, endSecondTB), sourceBuilder);
SearchResponse response = getClient().search(
new TimeRangeIndexNameMaker(SegmentRecord.INDEX_NAME, startSecondTB, endSecondTB), sourceBuilder);
TraceBrief traceBrief = new TraceBrief();
traceBrief.setTotal((int) response.getHits().getTotalHits().value);
......
......@@ -21,6 +21,7 @@ package org.apache.skywalking.oap.server.storage.plugin.influxdb.base;
import com.google.common.collect.Maps;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.core.storage.StorageData;
......@@ -41,7 +42,7 @@ public class InfluxInsertRequest implements InsertRequest, UpdateRequest {
public <T extends StorageData> InfluxInsertRequest(Model model, T storageData, StorageBuilder<T> storageBuilder) {
final Map<String, Object> objectMap = storageBuilder.data2Map(storageData);
if (SegmentRecord.INDEX_NAME.equals(model.getName())) {
if (SegmentRecord.INDEX_NAME.equals(model.getName()) || LogRecord.INDEX_NAME.equals(model.getName())) {
objectMap.remove(SegmentRecord.TAGS);
}
......
......@@ -25,8 +25,9 @@ import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.skywalking.apm.commons.datacarrier.common.AtomicRangeInteger;
import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
......@@ -34,6 +35,8 @@ import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.TableMetaInfo;
import static java.util.Objects.nonNull;
public class RecordDAO implements IRecordDAO {
private static final int PADDING_SIZE = 1_000_000;
private static final AtomicRangeInteger SUFFIX = new AtomicRangeInteger(0, PADDING_SIZE);
......@@ -54,15 +57,19 @@ public class RecordDAO implements IRecordDAO {
.time(timestamp, TimeUnit.NANOSECONDS);
TableMetaInfo.get(model.getName()).getStorageAndTagMap().forEach(request::addFieldAsTag);
List<Tag> rawTags = null;
if (SegmentRecord.INDEX_NAME.equals(model.getName())) {
Map<String, List<SpanTag>> collect = ((SegmentRecord) record).getTagsRawData()
.stream()
.collect(
Collectors.groupingBy(SpanTag::getKey));
rawTags = ((SegmentRecord) record).getTagsRawData();
} else if (LogRecord.INDEX_NAME.equals(model.getName())) {
rawTags = ((LogRecord) record).getTags();
}
if (nonNull(rawTags)) {
Map<String, List<Tag>> collect = rawTags.stream()
.collect(
Collectors.groupingBy(Tag::getKey));
collect.forEach((key, value) -> request.tag(
key,
"'" + Joiner.on("'").join(value.stream().map(SpanTag::getValue).collect(Collectors.toSet())) + "'"
"'" + Joiner.on("'").join(value.stream().map(Tag::getValue).collect(Collectors.toSet())) + "'"
));
}
return request;
......
......@@ -23,36 +23,41 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxClient;
import org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxConstants;
import org.elasticsearch.common.Strings;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;
import org.influxdb.querybuilder.SelectQueryImpl;
import org.influxdb.querybuilder.WhereNested;
import org.influxdb.querybuilder.WhereQueryImpl;
import org.influxdb.querybuilder.clauses.ConjunctionClause;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT_TYPE;
import static java.util.Objects.nonNull;
import static org.apache.skywalking.apm.util.StringUtil.isNotEmpty;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_NAME;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_INSTANCE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.STATUS_CODE;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TIMESTAMP;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SPAN_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_SEGMENT_ID;
import static org.apache.skywalking.oap.server.core.browser.manual.errorlog.BrowserErrorLogRecord.TIMESTAMP;
import static org.apache.skywalking.oap.server.storage.plugin.influxdb.InfluxConstants.ALL_FIELDS;
import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.contains;
import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.eq;
import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.gte;
import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.lte;
......@@ -67,24 +72,54 @@ public class LogQuery implements ILogQueryDAO {
}
@Override
public Logs queryLogs(String metricName, int serviceId, int serviceInstanceId, String endpointId, String traceId,
LogState state, String stateCode, Pagination paging, int from, int limit,
long startTB, long endTB) throws IOException {
public Logs queryLogs(String metricName,
final String serviceId,
final String serviceInstanceId,
final String endpointId,
final String endpointName,
final TraceScopeCondition relatedTrace,
final LogState state,
final Order queryOrder,
final int from,
final int limit,
final long startTB,
final long endTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException {
WhereQueryImpl<SelectQueryImpl> recallQuery = select().raw(ALL_FIELDS)
.function(
Order.DES.equals(
queryOrder) ? InfluxConstants.SORT_DES : InfluxConstants.SORT_ASC,
AbstractLogRecord.TIMESTAMP, limit + from
)
.from(client.getDatabase(), metricName)
.where();
if (serviceId != Const.NONE) {
recallQuery.and(eq(InfluxConstants.TagName.SERVICE_ID, String.valueOf(serviceId)));
if (isNotEmpty(serviceId)) {
recallQuery.and(eq(InfluxConstants.TagName.SERVICE_ID, serviceId));
}
if (serviceInstanceId != Const.NONE) {
if (isNotEmpty(serviceInstanceId)) {
recallQuery.and(eq(SERVICE_INSTANCE_ID, serviceInstanceId));
}
if (StringUtil.isNotEmpty(endpointId)) {
if (isNotEmpty(endpointId)) {
recallQuery.and(eq(ENDPOINT_ID, endpointId));
}
if (!Strings.isNullOrEmpty(traceId)) {
recallQuery.and(eq(TRACE_ID, traceId));
if (isNotEmpty(endpointName)) {
recallQuery.and(contains(ENDPOINT_NAME, endpointName.replaceAll("/", "\\\\/")));
}
if (nonNull(relatedTrace)) {
if (isNotEmpty(relatedTrace.getTraceId())) {
recallQuery.and(eq(TRACE_ID, relatedTrace.getTraceId()));
}
if (isNotEmpty(relatedTrace.getSegmentId())) {
recallQuery.and(eq(TRACE_SEGMENT_ID, relatedTrace.getSegmentId()));
}
if (nonNull(relatedTrace.getSpanId())) {
recallQuery.and(eq(SPAN_ID, relatedTrace.getSpanId()));
}
}
switch (state) {
case ERROR: {
recallQuery.and(eq(IS_ERROR, true));
......@@ -95,16 +130,17 @@ public class LogQuery implements ILogQueryDAO {
break;
}
}
if (!Strings.isNullOrEmpty(stateCode)) {
recallQuery.and(eq(STATUS_CODE, stateCode));
if (startTB != 0 && endTB != 0) {
recallQuery.and(gte(AbstractLogRecord.TIME_BUCKET, startTB))
.and(lte(AbstractLogRecord.TIME_BUCKET, endTB));
}
recallQuery.and(gte(AbstractLogRecord.TIME_BUCKET, startTB))
.and(lte(AbstractLogRecord.TIME_BUCKET, endTB));
if (from > Const.NONE) {
recallQuery.limit(limit, from);
} else {
recallQuery.limit(limit);
if (CollectionUtils.isNotEmpty(tags)) {
WhereNested<WhereQueryImpl<SelectQueryImpl>> nested = recallQuery.andNested();
for (final Tag tag : tags) {
nested.and(contains(tag.getKey(), "'" + tag.getValue() + "'"));
}
nested.close();
}
SelectQueryImpl countQuery = select().count(ENDPOINT_ID).from(client.getDatabase(), metricName);
......@@ -140,19 +176,20 @@ public class LogQuery implements ILogQueryDAO {
}
data.put(columns.get(i), value);
}
log.setContent((String) data.get(CONTENT));
log.setContentType(ContentType.instanceOf(((Number) data.get(CONTENT_TYPE)).intValue()));
log.setServiceId((String) data.get(SERVICE_ID));
log.setServiceInstanceId((String) data.get(SERVICE_INSTANCE_ID));
log.setEndpointId((String) data.get(ENDPOINT_ID));
log.setEndpointName((String) data.get(ENDPOINT_NAME));
log.setTraceId((String) data.get(TRACE_ID));
log.setTimestamp((String) data.get(TIMESTAMP));
log.setStatusCode((String) data.get(STATUS_CODE));
log.setServiceId((String) data.get(SERVICE_ID));
log.setServiceInstanceId((String) data.get(SERVICE_INSTANCE_ID));
log.setTimestamp(data.get(TIMESTAMP).toString());
log.setError(BooleanUtils.valueToBoolean(((Number) data.get(IS_ERROR)).intValue()));
log.setContentType(
ContentType.instanceOf(((Number) data.get(AbstractLogRecord.CONTENT_TYPE)).intValue()));
log.setContent((String) data.get(AbstractLogRecord.CONTENT));
String dataBinaryBase64 = (String) data.get(AbstractLogRecord.TAGS_RAW_DATA);
if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
parserDataBinary(dataBinaryBase64, log.getTags());
}
logs.getLogs().add(log);
});
});
......
......@@ -25,8 +25,8 @@ import java.util.Collections;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.Span;
......@@ -73,7 +73,7 @@ public class TraceQuery implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags)
final List<Tag> tags)
throws IOException {
String orderBy = SegmentRecord.START_TIME;
......@@ -127,7 +127,7 @@ public class TraceQuery implements ITraceQueryDAO {
}
if (CollectionUtils.isNotEmpty(tags)) {
WhereNested<WhereQueryImpl<SelectQueryImpl>> nested = recallQuery.andNested();
for (final SpanTag tag : tags) {
for (final Tag tag : tags) {
nested.and(contains(tag.getKey(), "'" + tag.getValue() + "'"));
}
nested.close();
......
......@@ -29,8 +29,8 @@ import java.util.Collections;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.LogEntity;
......@@ -91,7 +91,7 @@ public class JaegerTraceQueryEsDAO extends EsDAO implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
......
......@@ -17,7 +17,8 @@
~
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>server-storage-plugin</artifactId>
<groupId>org.apache.skywalking</groupId>
......
......@@ -139,7 +139,15 @@ public class H2StorageProvider extends ModuleProvider {
this.registerServiceImplementation(
IHistoryDeleteDAO.class, new H2HistoryDeleteDAO(h2Client));
this.registerServiceImplementation(ITopNRecordsQueryDAO.class, new H2TopNRecordsQueryDAO(h2Client));
this.registerServiceImplementation(ILogQueryDAO.class, new H2LogQueryDAO(h2Client));
this.registerServiceImplementation(
ILogQueryDAO.class,
new H2LogQueryDAO(
h2Client,
getManager(),
config.getMaxSizeOfArrayColumn(),
config.getNumOfSearchableValuesPerTag()
)
);
this.registerServiceImplementation(IProfileTaskQueryDAO.class, new H2ProfileTaskQueryDAO(h2Client));
this.registerServiceImplementation(IProfileTaskLogQueryDAO.class, new H2ProfileTaskLogQueryDAO(h2Client));
......@@ -153,9 +161,16 @@ public class H2StorageProvider extends ModuleProvider {
final ConfigService configService = getManager().find(CoreModule.NAME)
.provider()
.getService(ConfigService.class);
final int numOfSearchableTags = configService.getSearchableTracesTags().split(Const.COMMA).length;
if (numOfSearchableTags * config.getNumOfSearchableValuesPerTag() > config.getMaxSizeOfArrayColumn()) {
throw new ModuleStartException("Size of searchableTracesTags[" + numOfSearchableTags
final int numOfSearchableTracesTags = configService.getSearchableTracesTags().split(Const.COMMA).length;
if (numOfSearchableTracesTags * config.getNumOfSearchableValuesPerTag() > config.getMaxSizeOfArrayColumn()) {
throw new ModuleStartException("Size of searchableTracesTags[" + numOfSearchableTracesTags
+ "] * numOfSearchableValuesPerTag[" + config.getNumOfSearchableValuesPerTag()
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
}
final int numOfSearchableLogsTags = configService.getSearchableLogsTags().split(Const.COMMA).length;
if (numOfSearchableLogsTags * config.getNumOfSearchableValuesPerTag() > config.getMaxSizeOfArrayColumn()) {
throw new ModuleStartException("Size of searchableLogsTags[" + numOfSearchableLogsTags
+ "] * numOfSearchableValuesPerTag[" + config.getNumOfSearchableValuesPerTag()
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.dao;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
public abstract class AbstractSearchTagBuilder<T extends Record> implements StorageBuilder<T> {
private final int numOfSearchableValuesPerTag;
private final List<String> searchTagKeys;
private final String tagColumn;
public AbstractSearchTagBuilder(final int maxSizeOfArrayColumn,
final int numOfSearchableValuesPerTag,
final List<String> searchTagKeys,
final String tagColumn) {
this.numOfSearchableValuesPerTag = numOfSearchableValuesPerTag;
final int maxNumOfTags = maxSizeOfArrayColumn / numOfSearchableValuesPerTag;
if (searchTagKeys.size() > maxNumOfTags) {
this.searchTagKeys = searchTagKeys.subList(0, maxNumOfTags);
} else {
this.searchTagKeys = searchTagKeys;
}
this.tagColumn = tagColumn;
}
protected void analysisSearchTag(List<Tag> rawTags, Map<String, Object> dbMap) {
rawTags.forEach(tag -> {
final int index = searchTagKeys.indexOf(tag.getKey());
boolean shouldAdd = true;
int tagInx = 0;
final String tagExpression = tag.toString();
for (int i = 0; i < numOfSearchableValuesPerTag; i++) {
tagInx = index + numOfSearchableValuesPerTag + i;
final String previousValue = (String) dbMap.get(tagColumn);
if (previousValue == null) {
// Still have at least one available slot, add directly.
shouldAdd = true;
break;
}
// If value is duplicated with added one, ignore.
if (previousValue.equals(tagExpression)) {
shouldAdd = false;
break;
}
// Reach the end of tag
if (i == numOfSearchableValuesPerTag - 1) {
shouldAdd = false;
}
}
if (shouldAdd) {
dbMap.put(tagColumn + "_" + tagInx, tagExpression);
}
});
}
}
......@@ -24,40 +24,84 @@ import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.query.enumeration.Order;
import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
import org.apache.skywalking.oap.server.core.query.type.ContentType;
import org.apache.skywalking.oap.server.core.query.type.Log;
import org.apache.skywalking.oap.server.core.query.type.LogState;
import org.apache.skywalking.oap.server.core.query.type.Logs;
import org.apache.skywalking.oap.server.core.query.type.Pagination;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.library.util.BooleanUtils;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import org.elasticsearch.search.sort.SortOrder;
import static java.util.Objects.nonNull;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT_TYPE;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_NAME;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SERVICE_INSTANCE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.STATUS_CODE;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.SPAN_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TAGS_RAW_DATA;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TIMESTAMP;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_SEGMENT_ID;
public class H2LogQueryDAO implements ILogQueryDAO {
private JDBCHikariCPClient h2Client;
private final JDBCHikariCPClient h2Client;
private final ModuleManager manager;
private final int maxSizeOfArrayColumn;
private final int numOfSearchValuesPerTag;
private List<String> searchableTagKeys;
public H2LogQueryDAO(JDBCHikariCPClient h2Client) {
public H2LogQueryDAO(final JDBCHikariCPClient h2Client,
final ModuleManager manager,
final int maxSizeOfArrayColumn,
final int numOfSearchValuesPerTag) {
this.h2Client = h2Client;
this.manager = manager;
this.maxSizeOfArrayColumn = maxSizeOfArrayColumn;
this.numOfSearchValuesPerTag = numOfSearchValuesPerTag;
}
@Override
public Logs queryLogs(String metricName, int serviceId, int serviceInstanceId, String endpointId, String traceId,
LogState state, String stateCode, Pagination paging, int from, int limit, long startSecondTB,
long endSecondTB) throws IOException {
public Logs queryLogs(String metricName,
String serviceId,
String serviceInstanceId,
String endpointId,
String endpointName,
TraceScopeCondition relatedTrace,
LogState state,
Order queryOrder,
int from,
int limit,
final long startSecondTB,
final long endSecondTB,
final List<Tag> tags,
final List<String> keywordsOfContent,
final List<String> excludingKeywordsOfContent) throws IOException {
if (searchableTagKeys == null) {
final ConfigService configService = manager.find(CoreModule.NAME)
.provider()
.getService(ConfigService.class);
searchableTagKeys = Arrays.asList(configService.getSearchableLogsTags().split(Const.COMMA));
if (searchableTagKeys.size() > maxSizeOfArrayColumn) {
searchableTagKeys = searchableTagKeys.subList(0, maxSizeOfArrayColumn);
}
}
StringBuilder sql = new StringBuilder();
List<Object> parameters = new ArrayList<>(10);
......@@ -70,11 +114,11 @@ public class H2LogQueryDAO implements ILogQueryDAO {
parameters.add(endSecondTB);
}
if (serviceId != Const.NONE) {
if (StringUtil.isNotEmpty(serviceId)) {
sql.append(" and ").append(SERVICE_ID).append(" = ?");
parameters.add(serviceId);
}
if (serviceInstanceId != Const.NONE) {
if (StringUtil.isNotEmpty(serviceInstanceId)) {
sql.append(" and ").append(AbstractLogRecord.SERVICE_INSTANCE_ID).append(" = ?");
parameters.add(serviceInstanceId);
}
......@@ -82,14 +126,25 @@ public class H2LogQueryDAO implements ILogQueryDAO {
sql.append(" and ").append(AbstractLogRecord.ENDPOINT_ID).append(" = ?");
parameters.add(endpointId);
}
if (!Strings.isNullOrEmpty(stateCode)) {
sql.append(" and ").append(AbstractLogRecord.STATUS_CODE).append(" = ?");
parameters.add(stateCode);
if (StringUtil.isNotEmpty(endpointName)) {
sql.append(" and ").append(ENDPOINT_NAME).append(" like concat('%',?,'%')");
parameters.add(endpointName);
}
if (!Strings.isNullOrEmpty(traceId)) {
sql.append(" and ").append(TRACE_ID).append(" = ?");
parameters.add(traceId);
if (nonNull(relatedTrace)) {
if (StringUtil.isNotEmpty(relatedTrace.getTraceId())) {
sql.append(" and ").append(TRACE_ID).append(" = ?");
parameters.add(relatedTrace.getTraceId());
}
if (StringUtil.isNotEmpty(relatedTrace.getSegmentId())) {
sql.append(" and ").append(TRACE_SEGMENT_ID).append(" = ?");
parameters.add(relatedTrace.getSegmentId());
}
if (nonNull(relatedTrace.getSpanId())) {
sql.append(" and ").append(SPAN_ID).append(" = ?");
parameters.add(relatedTrace.getSpanId());
}
}
if (LogState.ERROR.equals(state)) {
sql.append(" and ").append(AbstractLogRecord.IS_ERROR).append(" = ?");
parameters.add(BooleanUtils.booleanToValue(true));
......@@ -98,6 +153,31 @@ public class H2LogQueryDAO implements ILogQueryDAO {
parameters.add(BooleanUtils.booleanToValue(false));
}
if (CollectionUtils.isNotEmpty(tags)) {
for (final Tag tag : tags) {
final int foundIdx = searchableTagKeys.indexOf(tag.getKey());
if (foundIdx > -1) {
sql.append(" and (");
for (int i = 0; i < numOfSearchValuesPerTag; i++) {
final String physicalColumn = LogRecord.TAGS + "_" + (foundIdx * numOfSearchValuesPerTag + i);
sql.append(physicalColumn).append(" = ? ");
parameters.add(tag.toString());
if (i != numOfSearchValuesPerTag - 1) {
sql.append(" or ");
}
}
sql.append(")");
} else {
return new Logs();
}
}
}
sql.append(" order by ")
.append(TIMESTAMP)
.append(" ")
.append(Order.DES.equals(queryOrder) ? SortOrder.DESC : SortOrder.ASC);
Logs logs = new Logs();
try (Connection connection = h2Client.getConnection()) {
......@@ -120,9 +200,13 @@ public class H2LogQueryDAO implements ILogQueryDAO {
log.setEndpointName(resultSet.getString(ENDPOINT_NAME));
log.setTraceId(resultSet.getString(TRACE_ID));
log.setTimestamp(resultSet.getString(TIMESTAMP));
log.setStatusCode(resultSet.getString(STATUS_CODE));
log.setError(BooleanUtils.valueToBoolean(resultSet.getInt(IS_ERROR)));
log.setContentType(ContentType.instanceOf(resultSet.getInt(CONTENT_TYPE)));
log.setContent(resultSet.getString(CONTENT));
String dataBinaryBase64 = resultSet.getString(TAGS_RAW_DATA);
if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
parserDataBinary(dataBinaryBase64, log.getTags());
}
logs.getLogs().add(log);
}
}
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.dao;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.IS_ERROR;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TAGS_RAW_DATA;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.CONTENT;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.CONTENT_TYPE;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.ENDPOINT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.ENDPOINT_NAME;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.SERVICE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.SERVICE_INSTANCE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.SPAN_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.TIMESTAMP;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.TRACE_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.TRACE_SEGMENT_ID;
import static org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord.UNIQUE_ID;
import static org.apache.skywalking.oap.server.core.analysis.record.Record.TIME_BUCKET;
public class H2LogRecordBuilder extends AbstractSearchTagBuilder<Record> {
public H2LogRecordBuilder(final int maxSizeOfArrayColumn,
final int numOfSearchableValuesPerTag,
final List<String> searchTagKeys) {
super(maxSizeOfArrayColumn, numOfSearchableValuesPerTag, searchTagKeys, LogRecord.TAGS);
}
@Override
public Record map2Data(final Map<String, Object> dbMap) {
LogRecord record = new LogRecord();
record.setUniqueId((String) dbMap.get(UNIQUE_ID));
record.setServiceId((String) dbMap.get(SERVICE_ID));
record.setServiceInstanceId((String) dbMap.get(SERVICE_INSTANCE_ID));
record.setEndpointId((String) dbMap.get(ENDPOINT_ID));
record.setEndpointName((String) dbMap.get(ENDPOINT_NAME));
record.setTraceId((String) dbMap.get(TRACE_ID));
record.setTraceSegmentId((String) dbMap.get(TRACE_SEGMENT_ID));
record.setSpanId(((Number) dbMap.get(SPAN_ID)).intValue());
record.setIsError(((Number) dbMap.get(IS_ERROR)).intValue());
record.setContentType(((Number) dbMap.get(CONTENT_TYPE)).intValue());
record.setContent((String) dbMap.get(CONTENT));
record.setTimestamp(((Number) dbMap.get(TIMESTAMP)).longValue());
record.setTimeBucket(((Number) dbMap.get(TIME_BUCKET)).longValue());
if (StringUtil.isEmpty((String) dbMap.get(TAGS_RAW_DATA))) {
record.setTagsRawData(new byte[] {});
} else {
// Don't read the tags as they has been in the data binary already.
record.setTagsRawData(Base64.getDecoder().decode((String) dbMap.get(TAGS_RAW_DATA)));
}
return record;
}
@Override
public Map<String, Object> data2Map(final Record record) {
LogRecord storageData = (LogRecord) record;
Map<String, Object> map = new HashMap<>();
map.put(UNIQUE_ID, storageData.getUniqueId());
map.put(SERVICE_ID, storageData.getServiceId());
map.put(SERVICE_INSTANCE_ID, storageData.getServiceInstanceId());
map.put(ENDPOINT_ID, storageData.getEndpointId());
map.put(ENDPOINT_NAME, storageData.getEndpointName());
map.put(TRACE_ID, storageData.getTraceId());
map.put(TRACE_SEGMENT_ID, storageData.getTraceSegmentId());
map.put(SPAN_ID, storageData.getSpanId());
map.put(IS_ERROR, storageData.getIsError());
map.put(TIME_BUCKET, storageData.getTimeBucket());
map.put(CONTENT_TYPE, storageData.getContentType());
map.put(CONTENT, storageData.getContent());
map.put(TIMESTAMP, storageData.getTimestamp());
if (CollectionUtils.isEmpty(storageData.getTagsRawData())) {
map.put(TAGS_RAW_DATA, Const.EMPTY_STRING);
} else {
map.put(TAGS_RAW_DATA, new String(Base64.getEncoder().encode(storageData.getTagsRawData())));
}
analysisSearchTag(storageData.getTags(), map);
return map;
}
}
......@@ -24,6 +24,7 @@ import java.util.Map;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.config.ConfigService;
......@@ -60,6 +61,18 @@ public class H2RecordDAO extends H2SQLExecutor implements IRecordDAO {
numOfSearchableValuesPerTag,
Arrays.asList(configService.getSearchableTracesTags().split(Const.COMMA))
);
} else if (LogRecord.class.equals(
storageBuilder.getClass().getMethod("map2Data", Map.class).getReturnType())) {
this.maxSizeOfArrayColumn = maxSizeOfArrayColumn;
final ConfigService configService = manager.find(CoreModule.NAME)
.provider()
.getService(ConfigService.class);
this.storageBuilder = new H2LogRecordBuilder(
maxSizeOfArrayColumn,
numOfSearchableValuesPerTag,
Arrays.asList(configService.getSearchableLogsTags()
.split(Const.COMMA))
);
} else {
this.maxSizeOfArrayColumn = 1;
this.storageBuilder = storageBuilder;
......
......@@ -28,7 +28,6 @@ import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
import org.apache.skywalking.oap.server.library.util.CollectionUtils;
import static org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.DATA_BINARY;
......@@ -50,20 +49,12 @@ import static org.apache.skywalking.oap.server.core.analysis.manual.segment.Segm
* H2/MySQL is different from standard {@link org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.Builder},
* this maps the tags into multiple columns.
*/
public class H2SegmentRecordBuilder implements StorageBuilder<Record> {
private int numOfSearchableValuesPerTag;
private final List<String> searchTagKeys;
public class H2SegmentRecordBuilder extends AbstractSearchTagBuilder<Record> {
public H2SegmentRecordBuilder(final int maxSizeOfArrayColumn,
final int numOfSearchableValuesPerTag,
final List<String> searchTagKeys) {
this.numOfSearchableValuesPerTag = numOfSearchableValuesPerTag;
final int maxNumOfTags = maxSizeOfArrayColumn / numOfSearchableValuesPerTag;
if (searchTagKeys.size() > maxNumOfTags) {
this.searchTagKeys = searchTagKeys.subList(0, maxNumOfTags);
} else {
this.searchTagKeys = searchTagKeys;
}
super(maxSizeOfArrayColumn, numOfSearchableValuesPerTag, searchTagKeys, TAGS);
}
@Override
......@@ -92,33 +83,7 @@ public class H2SegmentRecordBuilder implements StorageBuilder<Record> {
map.put(DATA_BINARY, new String(Base64.getEncoder().encode(storageData.getDataBinary())));
}
map.put(VERSION, storageData.getVersion());
storageData.getTagsRawData().forEach(spanTag -> {
final int index = searchTagKeys.indexOf(spanTag.getKey());
boolean shouldAdd = true;
int tagIdx = 0;
final String tagExpression = spanTag.toString();
for (int i = 0; i < numOfSearchableValuesPerTag; i++) {
tagIdx = index * numOfSearchableValuesPerTag + i;
final String previousValue = (String) map.get(TAGS + "_" + tagIdx);
if (previousValue == null) {
// Still have at least one available slot, add directly.
shouldAdd = true;
break;
}
// If value is duplicated with added one, ignore.
if (previousValue.equals(tagExpression)) {
shouldAdd = false;
break;
}
// Reach the end of tag
if (i == numOfSearchableValuesPerTag - 1) {
shouldAdd = false;
}
}
if (shouldAdd) {
map.put(TAGS + "_" + tagIdx, tagExpression);
}
});
analysisSearchTag(storageData.getTagsRawData(), map);
return map;
}
......
......@@ -27,7 +27,6 @@ import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.core.analysis.NodeType;
import org.apache.skywalking.oap.server.core.storage.StorageException;
import org.apache.skywalking.oap.server.core.storage.model.ColumnName;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
import org.apache.skywalking.oap.server.core.storage.model.ModelInstaller;
......@@ -76,7 +75,6 @@ public class H2TableInstaller extends ModelInstaller {
tableCreateSQL.appendLine("id VARCHAR(512) PRIMARY KEY, ");
for (int i = 0; i < model.getColumns().size(); i++) {
ModelColumn column = model.getColumns().get(i);
ColumnName name = column.getColumnName();
tableCreateSQL.appendLine(
getColumn(column) + (i != model.getColumns().size() - 1 ? "," : ""));
}
......
......@@ -31,8 +31,8 @@ import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.config.ConfigService;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
......@@ -77,7 +77,7 @@ public class H2TraceQueryDAO implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
if (searchableTagKeys == null) {
final ConfigService configService = manager.find(CoreModule.NAME)
.provider()
......@@ -130,7 +130,7 @@ public class H2TraceQueryDAO implements ITraceQueryDAO {
parameters.add(traceId);
}
if (CollectionUtils.isNotEmpty(tags)) {
for (final SpanTag tag : tags) {
for (final Tag tag : tags) {
final int foundIdx = searchableTagKeys.indexOf(tag.getKey());
if (foundIdx > -1) {
sql.append(" and (");
......
......@@ -19,12 +19,15 @@
package org.apache.skywalking.oap.server.storage.plugin.jdbc.mysql;
import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.dao.H2LogQueryDAO;
public class MySQLLogQueryDAO extends H2LogQueryDAO {
public MySQLLogQueryDAO(JDBCHikariCPClient h2Client) {
super(h2Client);
public MySQLLogQueryDAO(final JDBCHikariCPClient h2Client,
final ModuleManager manager,
final int maxSizeOfArrayColumn, final int numOfSearchValuesPerTag) {
super(h2Client, manager, maxSizeOfArrayColumn, numOfSearchValuesPerTag);
}
@Override
......
......@@ -126,7 +126,15 @@ public class MySQLStorageProvider extends ModuleProvider {
this.registerServiceImplementation(
IHistoryDeleteDAO.class, new H2HistoryDeleteDAO(mysqlClient));
this.registerServiceImplementation(ITopNRecordsQueryDAO.class, new H2TopNRecordsQueryDAO(mysqlClient));
this.registerServiceImplementation(ILogQueryDAO.class, new MySQLLogQueryDAO(mysqlClient));
this.registerServiceImplementation(
ILogQueryDAO.class,
new MySQLLogQueryDAO(
mysqlClient,
getManager(),
config.getMaxSizeOfArrayColumn(),
config.getNumOfSearchableValuesPerTag()
)
);
this.registerServiceImplementation(IProfileTaskQueryDAO.class, new H2ProfileTaskQueryDAO(mysqlClient));
this.registerServiceImplementation(IProfileTaskLogQueryDAO.class, new H2ProfileTaskLogQueryDAO(mysqlClient));
......@@ -147,6 +155,13 @@ public class MySQLStorageProvider extends ModuleProvider {
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
}
final int numOfSearchableLogsTags = configService.getSearchableLogsTags().split(Const.COMMA).length;
if (numOfSearchableLogsTags * config.getNumOfSearchableValuesPerTag() > config.getMaxSizeOfArrayColumn()) {
throw new ModuleStartException("Size of searchableLogsTags[" + numOfSearchableLogsTags
+ "] * numOfSearchableValuesPerTag[" + config.getNumOfSearchableValuesPerTag()
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
}
try {
mysqlClient.connect();
......
......@@ -75,7 +75,7 @@ public class MySQLTableInstaller extends H2TableInstaller {
Model model) throws JDBCClientException {
int indexSeq = 0;
for (final ModelColumn modelColumn : model.getColumns()) {
if (!modelColumn.isStorageOnly()) {
if (!modelColumn.isStorageOnly() && modelColumn.getLength() < 256) {
final Class<?> type = modelColumn.getType();
if (List.class.isAssignableFrom(type)) {
for (int i = 0; i < maxSizeOfArrayColumn; i++) {
......
......@@ -129,7 +129,15 @@ public class TiDBStorageProvider extends ModuleProvider {
this.registerServiceImplementation(
IHistoryDeleteDAO.class, new H2HistoryDeleteDAO(mysqlClient));
this.registerServiceImplementation(ITopNRecordsQueryDAO.class, new H2TopNRecordsQueryDAO(mysqlClient));
this.registerServiceImplementation(ILogQueryDAO.class, new MySQLLogQueryDAO(mysqlClient));
this.registerServiceImplementation(
ILogQueryDAO.class,
new MySQLLogQueryDAO(
mysqlClient,
getManager(),
config.getMaxSizeOfArrayColumn(),
config.getNumOfSearchableValuesPerTag()
)
);
this.registerServiceImplementation(IProfileTaskQueryDAO.class, new H2ProfileTaskQueryDAO(mysqlClient));
this.registerServiceImplementation(IProfileTaskLogQueryDAO.class, new H2ProfileTaskLogQueryDAO(mysqlClient));
......@@ -152,6 +160,13 @@ public class TiDBStorageProvider extends ModuleProvider {
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
}
final int numOfSearchableLogsTags = configService.getSearchableLogsTags().split(Const.COMMA).length;
if (numOfSearchableLogsTags * config.getNumOfSearchableValuesPerTag() > config.getMaxSizeOfArrayColumn()) {
throw new ModuleStartException("Size of searchableLogsTags[" + numOfSearchableLogsTags
+ "] * numOfSearchableValuesPerTag[" + config.getNumOfSearchableValuesPerTag()
+ "] > maxSizeOfArrayColumn[" + config.getMaxSizeOfArrayColumn()
+ "]. Potential out of bound in the runtime.");
}
try {
mysqlClient.connect();
......
......@@ -26,8 +26,8 @@ import java.util.Collections;
import java.util.List;
import org.apache.skywalking.apm.util.StringUtil;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
import org.apache.skywalking.oap.server.core.query.type.KeyValue;
import org.apache.skywalking.oap.server.core.query.type.LogEntity;
......@@ -88,7 +88,7 @@ public class ZipkinTraceQueryEsDAO extends EsDAO implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource();
......
......@@ -23,8 +23,8 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.skywalking.apm.network.language.agent.v3.SegmentObject;
import org.apache.skywalking.apm.network.language.agent.v3.SpanObject;
import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
import org.apache.skywalking.oap.server.core.analysis.manual.segment.SpanTag;
import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
import org.apache.skywalking.oap.server.core.query.type.Span;
import org.apache.skywalking.oap.server.core.query.type.TraceBrief;
......@@ -52,7 +52,7 @@ public class ProfileTraceDAO implements ITraceQueryDAO {
int from,
TraceState traceState,
QueryOrder queryOrder,
final List<SpanTag> tags) throws IOException {
final List<Tag> tags) throws IOException {
return null;
}
......
......@@ -31,4 +31,9 @@ import lombok.experimental.Accessors;
public final class HostAndPort {
private final String host;
private final int port;
@Override
public String toString() {
return host + ":" + port;
}
}
......@@ -19,6 +19,12 @@
package org.apache.skywalking.e2e;
import com.google.common.io.Resources;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.e2e.alarm.AlarmQuery;
import org.apache.skywalking.e2e.alarm.GetAlarm;
......@@ -26,6 +32,10 @@ import org.apache.skywalking.e2e.alarm.GetAlarmData;
import org.apache.skywalking.e2e.browser.BrowserErrorLog;
import org.apache.skywalking.e2e.browser.BrowserErrorLogQuery;
import org.apache.skywalking.e2e.browser.BrowserErrorLogsData;
import org.apache.skywalking.e2e.log.Log;
import org.apache.skywalking.e2e.log.LogData;
import org.apache.skywalking.e2e.log.LogsQuery;
import org.apache.skywalking.e2e.log.SupportQueryLogsByKeywords;
import org.apache.skywalking.e2e.metrics.Metrics;
import org.apache.skywalking.e2e.metrics.MetricsData;
import org.apache.skywalking.e2e.metrics.MetricsQuery;
......@@ -57,13 +67,6 @@ import org.springframework.http.RequestEntity;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestTemplate;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@SuppressWarnings("UnstableApiUsage")
@Slf4j
public class SimpleQueryClient {
......@@ -366,14 +369,14 @@ public class SimpleQueryClient {
public GetAlarm readAlarms(final AlarmQuery query) throws Exception {
final URL queryFileUrl = Resources.getResource("read-alarms.gql");
final String queryString = Resources.readLines(queryFileUrl, StandardCharsets.UTF_8)
.stream()
.filter(it -> !it.startsWith("#"))
.collect(Collectors.joining())
.replace("{step}", query.step())
.replace("{start}", query.start())
.replace("{end}", query.end())
.replace("{pageSize}", "20")
.replace("{needTotal}", "true");
.stream()
.filter(it -> !it.startsWith("#"))
.collect(Collectors.joining())
.replace("{step}", query.step())
.replace("{start}", query.start())
.replace("{end}", query.end())
.replace("{pageSize}", "20")
.replace("{needTotal}", "true");
LOGGER.info("Query: {}", queryString);
final ResponseEntity<GQLResponse<GetAlarmData>> responseEntity = restTemplate.exchange(
new RequestEntity<>(queryString, HttpMethod.POST, URI.create(endpointUrl)),
......@@ -387,4 +390,59 @@ public class SimpleQueryClient {
return Objects.requireNonNull(responseEntity.getBody()).getData().getGetAlarm();
}
public List<Log> logs(final LogsQuery query) throws Exception {
final URL queryFileUrl = Resources.getResource("logs.gql");
final String queryString = Resources.readLines(queryFileUrl, StandardCharsets.UTF_8)
.stream().filter(it -> !it.startsWith("#"))
.collect(Collectors.joining())
.replace("{metricName}", query.metricName())
.replace("{state}", query.state())
.replace("{serviceId}", query.serviceId())
.replace("{serviceInstanceId}", query.serviceInstanceId())
.replace("{endpointId}", query.endpointId())
.replace("{endpointName}", query.endpointName())
.replace("{traceId}", query.traceId())
.replace("{segmentId}", query.segmentId())
.replace("{spanId}", query.spanId())
.replace("{start}", query.start())
.replace("{end}", query.end())
.replace("{step}", query.step())
.replace("{tagKey}", query.tagKey())
.replace("{tagValue}", query.tagValue())
.replace("{pageNum}", query.pageNum())
.replace("{pageSize}", query.pageSize())
.replace("{needTotal}", query.needTotal())
.replace("{keywordsOfContent}", query.keywordsOfContent())
.replace(
"{excludingKeywordsOfContent}", query.excludingKeywordsOfContent());
LOGGER.info("Query: {}", queryString);
final ResponseEntity<GQLResponse<LogData>> responseEntity = restTemplate.exchange(
new RequestEntity<>(queryString, HttpMethod.POST, URI.create(endpointUrl)),
new ParameterizedTypeReference<GQLResponse<LogData>>() {
}
);
if (responseEntity.getStatusCode() != HttpStatus.OK) {
throw new RuntimeException("Response status != 200, actual: " + responseEntity.getStatusCode());
}
return Objects.requireNonNull(responseEntity.getBody()).getData().getLogs().getData();
}
public boolean supportQueryLogsByKeywords() throws Exception {
final URL queryFileUrl = Resources.getResource("support-query-logs-by-keywords.gql");
final String queryString = Resources.readLines(queryFileUrl, StandardCharsets.UTF_8)
.stream()
.filter(it -> !it.startsWith("#"))
.collect(Collectors.joining());
LOGGER.info("Query: {}", queryString);
final ResponseEntity<GQLResponse<SupportQueryLogsByKeywords>> responseEntity = restTemplate.exchange(
new RequestEntity<>(queryString, HttpMethod.POST, URI.create(endpointUrl)),
new ParameterizedTypeReference<GQLResponse<SupportQueryLogsByKeywords>>() {
}
);
if (responseEntity.getStatusCode() != HttpStatus.OK) {
throw new RuntimeException("Response status != 200, actual: " + responseEntity.getStatusCode());
}
return Objects.requireNonNull(responseEntity.getBody().getData().isSupport());
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.e2e.common;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class KeyValue {
private String key;
private String value;
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.skywalking.e2e.common;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.apache.skywalking.e2e.verification.AbstractMatcher;
import static java.util.Objects.nonNull;
@Setter
@Getter
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
public class KeyValueMatcher extends AbstractMatcher<KeyValue> {
private String key;
private String value;
@Override
public void verify(final KeyValue keyValue) {
if (nonNull(getKey())) {
doVerify(getKey(), keyValue.getKey());
}
if (nonNull(getValue())) {
doVerify(getValue(), keyValue.getValue());
}
}
}
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册