no.finn.retriableconsumer.RetryHandler Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of retriable-kafka-consumer Show documentation
Show all versions of retriable-kafka-consumer Show documentation
Retries processing when consuming records from kafka.
package no.finn.retriableconsumer;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RetryHandler implements Consumer> {
public static String HEADER_KEY_REPROCESS_COUNTER = "reprocess-counter";
private static final Logger log = LoggerFactory.getLogger(RetryHandler.class);
private final Supplier> factory;
private final long retryThrottleMillis;
private final String groupId;
RetryHandler(Supplier> factory, long retryThrottleMillis, String groupId) {
this.factory = factory;
this.retryThrottleMillis = retryThrottleMillis;
this.groupId = groupId;
}
@Override
public void accept(ConsumerRecord record) {
String retryTopic = retryTopicName(record.topic(), groupId);
log.info("Putting message with key [{}] on retry-topic [{}].", record.key(), retryTopic);
factory.get().send(createRetryRecord(record, retryTopic, System.currentTimeMillis()));
try {
Thread.sleep(retryThrottleMillis);
} catch (InterruptedException e) {
log.error("Interrupted while sleeping");
}
}
public static List retryTopicNames(List topics, String groupId) {
return topics.stream().map(topic -> retryTopicName(topic, groupId)).collect(Collectors.toList());
}
ProducerRecord createRetryRecord(ConsumerRecord oldRecord, String retryTopic, long nowInMillis) {
ProducerRecord newRecord = new ProducerRecord<>(retryTopic, oldRecord.key(), oldRecord.value());
// copy headers from consumer
oldRecord.headers().forEach(h -> newRecord.headers().add(h));
// add reprocessCounter header
Header counterHeader = processCounterHeader(newRecord);
newRecord.headers().remove(HEADER_KEY_REPROCESS_COUNTER);
newRecord.headers().add(counterHeader);
// add timestamp-header if not present
if (newRecord.headers().lastHeader(RestartableKafkaConsumer.HEADER_TIMESTAMP_KEY) == null) {
newRecord.headers().add(timestampHeader(nowInMillis));
}
return newRecord;
}
public static Header timestampHeader(long timestamp) {
return new RecordHeader(RestartableKafkaConsumer.HEADER_TIMESTAMP_KEY, String.valueOf(timestamp).getBytes());
}
static Header processCounterHeader(ProducerRecord, ?> producerRecord) {
Header processCounterHeader = producerRecord.headers().lastHeader(HEADER_KEY_REPROCESS_COUNTER);
if (processCounterHeader == null || !NumberUtils.isDigits(new String(processCounterHeader.value()))) {
return new RecordHeader(HEADER_KEY_REPROCESS_COUNTER, "1".getBytes());
}
int reprocessCount = Integer.parseInt(new String(processCounterHeader.value()));
return new RecordHeader(HEADER_KEY_REPROCESS_COUNTER, String.valueOf(reprocessCount + 1).getBytes());
}
static String retryTopicName(String topic, String groupId) {
if (StringUtils.startsWith(topic, "retry")) return topic;
return String.format("%s-%s-%s", "retry", groupId, topic);
}
}