com.infusers.core.email.kafka.KafkaConsumerConfig Maven / Gradle / Ivy
/*package com.infusers.core.email.kafka;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import com.infusers.core.email.EmailMessage;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${kafka.consumer.group-id}")
private String groupId;
private final String bootstrapServers = "localhost:9092";
private final long maxPollIntervalMs = 5000; // 5 seconds
@Bean
public ConsumerFactory consumerFactory() {
Map props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class);
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class);
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*"); // Adjust this based on your package structure
props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, EmailMessage.class.getName());
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1); // Ensure only one record is consumed at a time
return new DefaultKafkaConsumerFactory<>(props);
}
@Bean
public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.getContainerProperties().setPollTimeout(maxPollIntervalMs); // Set poll timeout
factory.getContainerProperties().setIdleEventInterval(maxPollIntervalMs); // Set idle event interval
//factory.setErrorHandler(new SeekToCurrentErrorHandler()); // Add error handler to seek to the current offset on exception
return factory;
}
}*/
© 2015 - 2025 Weber Informatics LLC | Privacy Policy