
no.nav.common.kafka.consumer.util.ConsumerUtils Maven / Gradle / Ivy
package no.nav.common.kafka.consumer.util;
import no.nav.common.kafka.consumer.ConsumeStatus;
import no.nav.common.kafka.consumer.TopicConsumer;
import no.nav.common.kafka.consumer.feilhandtering.StoredConsumerRecord;
import no.nav.common.kafka.consumer.feilhandtering.StoredRecordConsumer;
import no.nav.common.kafka.util.KafkaUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import static java.lang.String.format;
public class ConsumerUtils {
private final static Logger log = LoggerFactory.getLogger(ConsumerUtils.class);
public static StoredConsumerRecord mapToStoredRecord(
ConsumerRecord record,
Serializer keySerializer,
Serializer valueSerializer
) {
byte[] key = keySerializer.serialize(record.topic(), record.key());
byte[] value = valueSerializer.serialize(record.topic(), record.value());
String headersJson = KafkaUtils.headersToJson(record.headers());
return new StoredConsumerRecord(
record.topic(),
record.partition(),
record.offset(),
key,
value,
headersJson,
record.timestamp()
);
}
public static StoredConsumerRecord mapToStoredRecord(ConsumerRecord record) {
String headersJson = KafkaUtils.headersToJson(record.headers());
return new StoredConsumerRecord(
record.topic(),
record.partition(),
record.offset(),
record.key(),
record.value(),
headersJson,
record.timestamp()
);
}
public static ConsumerRecord mapFromStoredRecord(
StoredConsumerRecord record,
Deserializer keyDeserializer,
Deserializer valueDeserializer
) {
K key = keyDeserializer.deserialize(record.getTopic(), record.getKey());
V value = valueDeserializer.deserialize(record.getTopic(), record.getValue());
Headers headers = KafkaUtils.jsonToHeaders(record.getHeadersJson());
ConsumerRecord consumerRecord = new ConsumerRecord<>(
record.getTopic(),
record.getPartition(),
record.getOffset(),
record.getTimestamp(),
TimestampType.CREATE_TIME,
ConsumerRecord.NULL_CHECKSUM,
ConsumerRecord.NULL_SIZE,
ConsumerRecord.NULL_SIZE,
key,
value
);
headers.forEach(header -> consumerRecord.headers().add(header));
return consumerRecord;
}
public static Map toStoredRecordConsumerMap(
Map> consumerMap,
Deserializer keyDeserializer,
Deserializer valueDeserializer
) {
Map storedRecordConsumerMap = new HashMap<>();
consumerMap.forEach((topic, topicConsumer) -> {
storedRecordConsumerMap.put(topic, toStoredRecordConsumer(topicConsumer, keyDeserializer, valueDeserializer));
});
return storedRecordConsumerMap;
}
public static StoredRecordConsumer toStoredRecordConsumer(
TopicConsumer topicConsumer,
Deserializer keyDeserializer,
Deserializer valueDeserializer
) {
return storedRecord -> topicConsumer.consume(mapFromStoredRecord(storedRecord, keyDeserializer, valueDeserializer));
}
public static TopicConsumer aggregateConsumer(final List> consumers) {
return record -> {
ConsumeStatus aggregatedStatus = ConsumeStatus.OK;
for (TopicConsumer consumer : consumers) {
ConsumeStatus status = consumer.consume(record);
if (status == ConsumeStatus.FAILED) {
aggregatedStatus = ConsumeStatus.FAILED;
}
}
return aggregatedStatus;
};
}
/**
* Used to wrap consumers that dont return a ConsumeStatus
* @param consumer the consumer which will consume the record
* @param record the kafka record to consume
* @param topic key
* @param topic value
* @return ConsumeStatus.OK
*/
public static ConsumeStatus consume(Consumer> consumer, ConsumerRecord record) {
consumer.accept(record);
return ConsumeStatus.OK;
}
public static ConsumeStatus safeConsume(TopicConsumer topicConsumer, ConsumerRecord consumerRecord) {
try {
ConsumeStatus status = topicConsumer.consume(consumerRecord);
if (status == null) {
log.warn(
"Consumer returned null instead of OK/FAILED, defaulting to FAILED. topic={} partition={} offset={}",
consumerRecord.topic(),
consumerRecord.partition(),
consumerRecord.offset()
);
return ConsumeStatus.FAILED;
}
return status;
} catch (Exception e) {
String msg = format(
"Consumer failed to process record from topic=%s partition=%d offset=%d",
consumerRecord.topic(),
consumerRecord.partition(),
consumerRecord.offset()
);
log.error(msg, e);
return ConsumeStatus.FAILED;
}
}
public static JsonTopicConsumer jsonConsumer(Class dataClass, Function consumer) {
return new JsonTopicConsumer<>(dataClass, (k, t) -> consumer.apply(t));
}
public static JsonTopicConsumer jsonConsumer(Class dataClass, Consumer consumer) {
return new JsonTopicConsumer<>(dataClass, (record, data) -> {
consumer.accept(data);
return ConsumeStatus.OK;
});
}
public static JsonTopicConsumer jsonConsumer(Class dataClass, BiConsumer, D> consumer) {
return new JsonTopicConsumer<>(dataClass, (record, data) -> {
consumer.accept(record, data);
return ConsumeStatus.OK;
});
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy