All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.github.lontime.extkafka.service.ConsumerProcessor Maven / Gradle / Ivy

package com.github.lontime.extkafka.service;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.LongAdder;

import com.github.lontime.base.commonj.components.ComponentInterfaceHelper;
import com.github.lontime.base.commonj.components.DelayedProcessor;
import com.github.lontime.base.commonj.utils.CollectionHelper;
import com.github.lontime.base.commonj.utils.LoggerHelper;
import com.github.lontime.extconfig.ConfigHelper;
import com.github.lontime.extkafka.common.CommitKind;
import com.github.lontime.extkafka.configuration.ConsumerOption;
import com.github.lontime.extkafka.listeners.KafkaConsumerListener;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;

/**
 * processor.
 * @author lontime
 */
public class ConsumerProcessor extends DelayedProcessor {

    private final ConsumerOption option;

    private Consumer consumer;

    private final Map currentOffsets = new ConcurrentHashMap<>();

    private final LongAdder failAdder = new LongAdder();

    private Boolean autoCommit = Boolean.FALSE;

    public ConsumerProcessor(ConsumerOption option) {
        this.option = option;
    }

    @Override
    protected void startUp() {
        this.consumer = createConsumer();
        this.autoCommit = resolveEnableAutoCommit();
    }

    @Override
    public boolean doLoop() {
        final List listeners = ComponentInterfaceHelper.get(KafkaConsumerListener.class, option.getName());
        if (CollectionHelper.isEmpty(listeners)) {
            LoggerHelper.debugv("listeners is null");
            return false;
        }
        final ConsumerRecords records = consumer.poll(option.getPullTimeout());
        if (records.isEmpty()) {
            LoggerHelper.debugv("records is null");
            failAdder.reset();
            return true;
        }
        return handle(records, listeners);
    }

    private boolean handle(ConsumerRecords records, List listeners) {
        try {
            for (TopicPartition partition : records.partitions()) {
                final List> partitionRecords = records.records(partition);
                final long firstConsumedOffset = partitionRecords.get(0).offset();
                currentOffsets.put(partition, new OffsetAndMetadata(firstConsumedOffset));
                listeners.forEach(s -> s.accept(partitionRecords));
                final long lastConsumedOffset = partitionRecords.get(partitionRecords.size() - 1).offset();
                currentOffsets.put(partition, new OffsetAndMetadata(lastConsumedOffset + 1));
                if (!autoCommit) {
                    humanCommit(Collections.singletonMap(partition, new OffsetAndMetadata(lastConsumedOffset + 1)));
                }
            }
            failAdder.reset();
        } catch (Exception ex) {
            LoggerHelper.warnv(ex, "consumer listener is error");
            failAdder.increment();
            currentOffsets.forEach((k, v) -> consumer.seek(k, v.offset()));
        }
        return true;
    }

    @Override
    protected Duration sleepTimeout() {
        final long intervalValue = option.getInterval().toMillis() * (failAdder.longValue() + 1);
        final long interval = Math.min(intervalValue, option.getIntervalMax().toMillis());
        return Duration.ofMillis(interval);
    }

    @Override
    protected Duration initialDelay() {
        return option.getWarmup();
    }

    @Override
    protected void shutDown() throws Exception {
        if (consumer != null) {
            if (!autoCommit) {
                humanCommit(currentOffsets);
            }
            consumer.unsubscribe();
            consumer.close(option.getCloseTimeout());
        }
    }

    public void rebalance() {
        if (CollectionHelper.isNotEmpty(currentOffsets)) {
            consumer.commitSync(currentOffsets);
        }
    }

    private void humanCommit(Map offsets) {
        if (option.getCommitKind() == CommitKind.SYNC) {
            if (option.getSyncCommitTimeout() != null) {
                consumer.commitSync(offsets, option.getSyncCommitTimeout());
            } else {
                consumer.commitSync(offsets);
            }
        } else {
            consumer.commitAsync();
        }
    }

    private boolean resolveEnableAutoCommit() {
        if (option.getEnableAutoCommit() != null) {
            return option.getEnableAutoCommit();
        }
        final Map props = option.getSpecs();
        for (Map.Entry entry : props.entrySet()) {
            final String key = ConfigHelper.unescapeName(entry.getKey()).toLowerCase();
            if (ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG.equals(key)) {
                //enable.auto.commit
                return Boolean.parseBoolean(entry.getValue());
            }
        }
        return false;
    }

    private Consumer createConsumer() {
        final Map props = option.getSpecs();
        final Map propsNew = new HashMap<>();
        if (option.getGroupId() != null) {
            propsNew.put(ConsumerConfig.GROUP_ID_CONFIG, option.getGroupId());
        }
        for (Map.Entry entry : props.entrySet()) {
            propsNew.put(entry.getKey().toLowerCase(), entry.getValue());
        }
        if (option.getEnableAutoCommit() != null) {
            propsNew.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, option.getEnableAutoCommit());
        }
        if (option.getAutoCommitInterval() != null) {
            propsNew.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, option.getAutoCommitInterval().toMillis());
        }
        propsNew.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
        propsNew.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
        final KafkaConsumer kafkaConsumer = new KafkaConsumer<>(propsNew);
        kafkaConsumer.subscribe(option.getTopics(), new DefaultConsumerRebalanceListener(this));
        return kafkaConsumer;
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy