io.opentracing.contrib.kafka.TracingKafkaConsumer Maven / Gradle / Ivy
/*
* Copyright 2017-2018 The OpenTracing Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.opentracing.contrib.kafka;
import io.opentracing.References;
import io.opentracing.Span;
import io.opentracing.SpanContext;
import io.opentracing.Tracer;
import io.opentracing.tag.Tags;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.clients.consumer.OffsetCommitCallback;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
public class TracingKafkaConsumer implements Consumer {
private final Tracer tracer;
private final Consumer consumer;
public TracingKafkaConsumer(Consumer consumer, Tracer tracer) {
this.consumer = consumer;
this.tracer = tracer;
}
@Override
public Set assignment() {
return consumer.assignment();
}
@Override
public Set subscription() {
return consumer.subscription();
}
@Override
public void subscribe(Collection topics, ConsumerRebalanceListener listener) {
consumer.subscribe(topics, listener);
}
@Override
public void subscribe(Collection topics) {
consumer.subscribe(topics);
}
@Override
public void subscribe(Pattern pattern, ConsumerRebalanceListener listener) {
consumer.subscribe(pattern, listener);
}
@Override
public void subscribe(Pattern pattern) {
consumer.subscribe(pattern);
}
@Override
public void unsubscribe() {
consumer.unsubscribe();
}
@Override
public void assign(Collection partitions) {
consumer.assign(partitions);
}
@Override
public ConsumerRecords poll(long timeout) {
ConsumerRecords records = consumer.poll(timeout);
for (ConsumerRecord record : records) {
buildAndFinishChildSpan(record);
}
return records;
}
@Override
public void commitSync() {
consumer.commitSync();
}
@Override
public void commitSync(Map offsets) {
consumer.commitSync(offsets);
}
@Override
public void commitAsync() {
consumer.commitAsync();
}
@Override
public void commitAsync(OffsetCommitCallback callback) {
consumer.commitAsync(callback);
}
@Override
public void commitAsync(Map offsets,
OffsetCommitCallback callback) {
consumer.commitAsync(offsets, callback);
}
@Override
public void seek(TopicPartition partition, long offset) {
consumer.seek(partition, offset);
}
@Override
public void seekToBeginning(Collection partitions) {
consumer.seekToBeginning(partitions);
}
@Override
public void seekToEnd(Collection partitions) {
consumer.seekToEnd(partitions);
}
@Override
public long position(TopicPartition partition) {
return consumer.position(partition);
}
@Override
public OffsetAndMetadata committed(TopicPartition partition) {
return consumer.committed(partition);
}
@Override
public Map metrics() {
return consumer.metrics();
}
@Override
public List partitionsFor(String topic) {
return consumer.partitionsFor(topic);
}
@Override
public Map> listTopics() {
return consumer.listTopics();
}
@Override
public void pause(Collection partitions) {
consumer.pause(partitions);
}
@Override
public void resume(Collection partitions) {
consumer.resume(partitions);
}
@Override
public Set paused() {
return consumer.paused();
}
@Override
public Map offsetsForTimes(
Map timestampsToSearch) {
return consumer.offsetsForTimes(timestampsToSearch);
}
@Override
public Map beginningOffsets(Collection partitions) {
return consumer.beginningOffsets(partitions);
}
@Override
public Map endOffsets(Collection partitions) {
return consumer.endOffsets(partitions);
}
@Override
public void close() {
consumer.close();
}
@Override
public void close(long l, TimeUnit timeUnit) {
consumer.close(l, timeUnit);
}
@Override
public void wakeup() {
consumer.wakeup();
}
private void buildAndFinishChildSpan(ConsumerRecord record) {
SpanContext parentContext = TracingKafkaUtils.extract(record.headers(), tracer);
if (parentContext != null) {
Tracer.SpanBuilder spanBuilder = tracer.buildSpan("receive")
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CLIENT);
spanBuilder.addReference(References.FOLLOWS_FROM, parentContext);
Span span = spanBuilder.start();
SpanDecorator.onResponse(record, span);
span.finish();
// Inject created span context into record headers for extraction by client to continue span chain
TracingKafkaUtils.injectSecond(span.context(), record.headers(), tracer);
}
}
}