com.github.lontime.extkafka.provider.AbstractProvider Maven / Gradle / Ivy
package com.github.lontime.extkafka.provider;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
import com.github.lontime.base.commonj.components.AbstractComponentLifecycle;
import com.github.lontime.base.serial.MsgpackSerial;
import com.github.lontime.extkafka.common.WrapperProducer;
import com.github.lontime.extkafka.configuration.OptionResolver;
import com.github.lontime.extkafka.configuration.ProducerOption;
import com.github.lontime.extkafka.service.ConsumerContainer;
import com.twitter.serial.serializer.Serializer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.ByteArraySerializer;
/**
* @author lontime
* @since 1.0
*/
public abstract class AbstractProvider extends AbstractComponentLifecycle implements Utf8Provider {
protected final ConsumerContainer container;
private Map producerMap = new HashMap<>();
public AbstractProvider() {
this.container = new ConsumerContainer();
}
@Override
public void initialize() {
this.container.initialize();
OptionResolver.getInstance().getProducers().forEach(this::createProducer);
}
@Override
public void afterRunning() {
this.container.start();
}
@Override
public void beforeStopping() {
this.container.stop();
}
@Override
public void destroy() {
this.container.destroy();
}
private WrapperProducer createProducer(ProducerOption option) {
return producerMap.computeIfAbsent(option.getName(), k -> loadProducer(option));
}
private WrapperProducer loadProducer(ProducerOption producerOption) {
final Map props = producerOption.getSpecs();
final Map propsNew = new HashMap<>();
for (Map.Entry entry : props.entrySet()) {
propsNew.put(entry.getKey().toLowerCase(), entry.getValue());
}
if (producerOption.getBatchSize() != null) {
propsNew.put(ProducerConfig.BATCH_SIZE_CONFIG, producerOption.getBatchSize());
}
propsNew.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
propsNew.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
final KafkaProducer producer = new KafkaProducer<>(propsNew);
if (producerOption.getPartition() != null) {
return new WrapperProducer(producerOption.getTopic(), producerOption.getPartition(), producer);
}
return new WrapperProducer(producerOption.getTopic(), producer);
}
@Override
public Future send(String name, K key, V value, Serializer keySerializer, Serializer valueSerializer) {
lazyReentrantStart();
if (!producerMap.containsKey(name)) {
return null;
}
final WrapperProducer producer = producerMap.get(name);
return send(name, new ProducerRecord<>(producer.getTopic(), producer.getPartition(), key, value),
keySerializer, valueSerializer);
}
@Override
public Future send(String name, ProducerRecord record, Serializer keySerializer, Serializer valueSerializer) {
lazyReentrantStart();
if (!producerMap.containsKey(name)) {
return null;
}
final byte[] key = MsgpackSerial.INSTANCE.toByteArrayUnchecked(record.key(), keySerializer);
final byte[] value = MsgpackSerial.INSTANCE.toByteArrayUnchecked(record.value(), valueSerializer);
ProducerRecord recordNew = new ProducerRecord<>(record.topic(), key, value);
return sendBytes(name, recordNew);
}
@Override
public Future sendBytes(String name, byte[] key, byte[] value) {
lazyReentrantStart();
if (!producerMap.containsKey(name)) {
return null;
}
final WrapperProducer producer = producerMap.get(name);
return sendBytes(name, new ProducerRecord<>(producer.getTopic(), producer.getPartition(), key, value));
}
@Override
public Future sendBytes(String name, ProducerRecord record) {
lazyReentrantStart();
if (!producerMap.containsKey(name)) {
return null;
}
return producerMap.get(name).getProducer().send(record);
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy