org.reactivecommons.async.kafka.config.RCKafkaConfig Maven / Gradle / Ivy
package org.reactivecommons.async.kafka.config;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.reactivecommons.api.domain.DomainEventBus;
import org.reactivecommons.async.commons.DLQDiscardNotifier;
import org.reactivecommons.async.commons.DiscardNotifier;
import org.reactivecommons.async.commons.converters.MessageConverter;
import org.reactivecommons.async.commons.converters.json.DefaultObjectMapperSupplier;
import org.reactivecommons.async.commons.converters.json.ObjectMapperSupplier;
import org.reactivecommons.async.commons.ext.CustomReporter;
import org.reactivecommons.async.commons.ext.DefaultCustomReporter;
import org.reactivecommons.async.kafka.KafkaDomainEventBus;
import org.reactivecommons.async.kafka.communications.ReactiveMessageListener;
import org.reactivecommons.async.kafka.communications.ReactiveMessageSender;
import org.reactivecommons.async.kafka.communications.topology.KafkaCustomizations;
import org.reactivecommons.async.kafka.communications.topology.TopologyCreator;
import org.reactivecommons.async.kafka.config.props.RCAsyncPropsKafka;
import org.reactivecommons.async.kafka.config.props.RCKafkaProps;
import org.reactivecommons.async.kafka.converters.json.KafkaJacksonMessageConverter;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import reactor.kafka.receiver.ReceiverOptions;
import reactor.kafka.sender.KafkaSender;
import reactor.kafka.sender.SenderOptions;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
@Configuration
@EnableConfigurationProperties({RCAsyncPropsKafka.class})
public class RCKafkaConfig {
// Sender
@Bean
@ConditionalOnMissingBean(DomainEventBus.class)
public DomainEventBus kafkaDomainEventBus(ReactiveMessageSender sender) {
return new KafkaDomainEventBus(sender);
}
@Bean
@ConditionalOnMissingBean(ReactiveMessageSender.class)
public ReactiveMessageSender kafkaReactiveMessageSender(KafkaSender kafkaSender,
MessageConverter converter, TopologyCreator topologyCreator) {
return new ReactiveMessageSender(kafkaSender, converter, topologyCreator);
}
@Bean
@ConditionalOnMissingBean(KafkaSender.class)
public KafkaSender kafkaSender(RCAsyncPropsKafka config, @Value("${spring.application.name}") String clientId) {
RCKafkaProps props = config.getKafkaProps();
props.put(ProducerConfig.CLIENT_ID_CONFIG, clientId);
props.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
SenderOptions senderOptions = SenderOptions.create(props);
return KafkaSender.create(senderOptions);
}
// Receiver
@Bean
@ConditionalOnMissingBean(ReactiveMessageListener.class)
public ReactiveMessageListener kafkaReactiveMessageListener(ReceiverOptions receiverOptions) {
return new ReactiveMessageListener(receiverOptions);
}
@Bean
@ConditionalOnMissingBean(ReceiverOptions.class)
public ReceiverOptions kafkaReceiverOptions(RCAsyncPropsKafka config) {
RCKafkaProps props = config.getKafkaProps();
props.put(KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
return ReceiverOptions.create(props);
}
// Shared
@Bean
@ConditionalOnMissingBean(TopologyCreator.class)
public TopologyCreator kafkaTopologyCreator(RCAsyncPropsKafka config, KafkaCustomizations customizations) {
AdminClient adminClient = AdminClient.create(config.getKafkaProps());
return new TopologyCreator(adminClient, customizations);
}
@Bean
@ConditionalOnMissingBean(KafkaCustomizations.class)
public KafkaCustomizations defaultKafkaCustomizations() {
return new KafkaCustomizations();
}
@Bean
@ConditionalOnMissingBean(MessageConverter.class)
public MessageConverter kafkaJacksonMessageConverter(ObjectMapperSupplier objectMapperSupplier) {
return new KafkaJacksonMessageConverter(objectMapperSupplier.get());
}
@Bean
@ConditionalOnMissingBean(DiscardNotifier.class)
public DiscardNotifier kafkaDiscardNotifier(DomainEventBus domainEventBus, MessageConverter messageConverter) {
return new DLQDiscardNotifier(domainEventBus, messageConverter);
}
@Bean
@ConditionalOnMissingBean(ObjectMapperSupplier.class)
public ObjectMapperSupplier defaultObjectMapperSupplier() {
return new DefaultObjectMapperSupplier();
}
@Bean
@ConditionalOnMissingBean(CustomReporter.class)
public CustomReporter defaultKafkaCustomReporter() {
return new DefaultCustomReporter();
}
// Utilities
public static RCKafkaProps readPropsFromDotEnv(Path path) throws IOException {
String env = Files.readString(path);
String[] split = env.split("\n");
RCKafkaProps props = new RCKafkaProps();
for (String s : split) {
if (s.startsWith("#")) {
continue;
}
String[] split1 = s.split("=", 2);
props.put(split1[0], split1[1]);
}
return props;
}
public static String jassConfig(String username, String password) {
return String.format("org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";", username, password);
}
}