All Downloads are FREE. Search and download functionalities are using the official Maven repository.

net.kut3.messaging.kafka.client.ConsumerImpl Maven / Gradle / Ivy

The newest version!
/*
 * Copyright 2019 Kut3Net.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package net.kut3.messaging.kafka.client;

import ch.qos.logback.classic.Level;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import net.kut3.messaging.BatchMessageProcessor;
import net.kut3.messaging.Consumer;
import net.kut3.messaging.Message;
import net.kut3.messaging.MessageProcessor;
import net.kut3.messaging.ProcessResult;
import net.kut3.messaging.client.ClientFactory;
import net.kut3.messaging.kafka.Component;
import net.kut3.messaging.kafka.OffsetResetMode;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import static org.apache.kafka.clients.consumer.ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 *
 */
class ConsumerImpl implements Consumer, Component {

    private static final Logger LOGGER
            = LoggerFactory.getLogger(ConsumerImpl.class);

    private final String name;
    private final AtomicBoolean isStopped = new AtomicBoolean(false);
    private final CountDownLatch stopWaiter = new CountDownLatch(1);
    private final boolean isAutoAck;
    private final KafkaConsumer consumer;
    private final Collection topics;

    /**
     *
     * @param name Name of this consumer
     * @param props Kafka producer properties
     * @param topics Topics to consume
     */
    ConsumerImpl(String name, Map props,
            Collection topics) {

        this.name = name;
        if (null != props.get(ENABLE_AUTO_COMMIT_CONFIG)) {
            this.isAutoAck = (boolean) props.get(ENABLE_AUTO_COMMIT_CONFIG);
        } else {
            this.isAutoAck = true;
        }

        LOGGER.info("Consumer '" + this.name + "' - "
                + ENABLE_AUTO_COMMIT_CONFIG + "=" + this.isAutoAck);

        this.consumer = new KafkaConsumer<>(props);
        this.topics = topics;

        LOGGER.info("Consumer '" + this.name + "' - "
                + "topics=" + Arrays.toString(topics.toArray()));
    }

    @Override
    public String name() {
        return this.name;
    }

    @Override
    public boolean isAutoAck() {
        return this.isAutoAck;
    }

    @Override
    public void start(MessageProcessor messageProcessor) {
        this.consumer.subscribe(this.topics);

        while (!this.isStopped.get()) {
            ConsumerRecords records
                    = this.consumer.poll(Duration.ofMillis(10000));

            if (records.isEmpty()) {
                LOGGER.info("Consumer '" + this.name + "' - No records found");
                continue;
            }

            for (ConsumerRecord record : records) {
                messageProcessor.process(new KafkaMessage(record));
            }
        }
    }

    @Override
    public void start(BatchMessageProcessor messageProcessor) {
        this.consumer.subscribe(this.topics);

        try {
            while (!this.isStopped.get()) {
                ConsumerRecords records
                        = this.consumer.poll(Duration.ofMillis(10000));

                if (records.isEmpty()) {
                    LOGGER.info("Consumer '" + this.name + "' - No records found");
                    continue;
                }

                for (ConsumerRecord record : records) {
                    messageProcessor.addToBatch(new KafkaMessage(record));
                }

                messageProcessor.processBatch();
            }
        } finally {
            this.consumer.close();
            this.stopWaiter.countDown();
        }
    }

    @Override
    public void close() {
        this.isStopped.set(true);
        try {
            this.stopWaiter.await();
        } catch (InterruptedException ex) {
            // Ignore
        }
    }

    public static void main(String[] args) throws InterruptedException {
        ClientFactory clientFactory = new KafkaClientFactory();

        String producerName = "kafka-client-0.3.0-01";
        String servers = "10.1.1.99:9092,10.1.1.99:9093,10.1.1.98:9094";
        String groupId = "net.kut3.messaging.kafka.test-group";
        String topic = "dev.Merchant";

        ((ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache.kafka"))
                .setLevel(Level.ERROR);

        LoggerFactory.getLogger(topic).info("Begin");

        Consumer consumer = clientFactory.newConsumer(new SimpleConsumerProperties(producerName,
                servers,
                groupId,
                OffsetResetMode.EARLIEST,
                Arrays.asList(topic)
        ));

        consumer.start(new BatchMessageProcessor() {
            @Override
            public void addToBatch(Message message) {
                System.out.println(message.toString());
            }

            @Override
            public List processBatch() {
                System.out.println("Done batch");
                return new ArrayList<>();
            }
        });

        consumer.close();
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy