cn.geektool.kafka.admin.KafkaFramework Maven / Gradle / Ivy
The newest version!
package cn.geektool.kafka.admin;
import cn.geektool.kafka.admin.bean.KafkaInfo;
import cn.geektool.kafka.consumer.bean.ConsumerBean;
import cn.geektool.kafka.consumer.generator.KafkaConsumerGenerator;
import cn.geektool.kafka.global.exception.JkafkaException;
import cn.geektool.kafka.producer.bean.ProducerBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import cn.geektool.core.check.CheckAssert;
import cn.geektool.core.pool.BaseExecutor;
import cn.geektool.core.pool.DefaultThreadPool;
import cn.geektool.core.util.CollectionUtil;
import cn.geektool.core.util.StrUtil;
import cn.geektool.kafka.admin.external.IKafkaConsumer;
import cn.geektool.kafka.admin.external.IKafkaExternal;
import cn.geektool.kafka.admin.external.IKafkaProducer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
/**
* kafka的信息
*
* @author jiangdi
* @since 0.0.1
*/
@Slf4j
public class KafkaFramework {
/**
* kafka全部配置
*/
@Getter
private KafkaInfo info;
/**
* 启动成功的kafka生产者(可用于获取变更等进行处理)
*/
@Getter
private Map successProducerMap = new ConcurrentHashMap<>();
/**
* 启动成功的kafka消费者(可用于获取变更等进行处理)
*/
@Getter
private Map successConsumerMap = new ConcurrentHashMap<>();
/**
* 线程池(消费者使用)
*/
@Getter
private BaseExecutor executorService;
/**
* 提供kafka生产、消费bean扩展(可自定义生产、消费处理等方式)
*/
@Getter
private IKafkaExternal kafkaExternal;
public KafkaFramework(KafkaInfo info) {
this.info = info;
}
/**
* 自己构建的线程池
*
* @param executorService 线程池
* @return 返回kafka框架
*/
public KafkaFramework executorService(BaseExecutor executorService) {
this.executorService = executorService;
return this;
}
/**
* 自行创建时将继承的扩展接口
*
* @param kafkaExternal 扩展接口
* @return 返回kafka框架
*/
public KafkaFramework kafkaExternal(IKafkaExternal kafkaExternal) {
this.kafkaExternal = kafkaExternal;
return this;
}
/**
* 启动
*/
public void start() {
log.info("loading kafka client....");
if (kafkaExternal == null) {
kafkaExternal = new IKafkaExternal() {
};
}
if (executorService == null) {
executorService = new DefaultThreadPool();
}
//生产者启动
if (info.isEnableProducer()) {
if (CollectionUtil.isEmpty(info.getProducerMap())) {
throw new JkafkaException("生产者启动但没有配置对应生产者,...");
}
info.getProducerMap().forEach((key, value) -> {
if (value.isEnable()) {
generatorPro(key, value);
}
});
}
//消费者启动
if (info.isEnableConsumer()) {
if (CollectionUtil.isEmpty(info.getConsumerMap())) {
throw new JkafkaException("消费者启动但没有配置对应消费者,...");
}
info.getConsumerMap().forEach((key, value) -> {
if (value.isEnable()) {
generatorCon(key, value);
}
});
}
log.info("start kafka client success..");
}
/**
* 构建生产者
*
* @param key
* @param value
*/
private void generatorPro(String key, ProducerBean value) {
if (StrUtil.isEmpty(value.getServers())) {
CheckAssert.check(info.getServers(), String.format("生产者%s:没有绑定对应的属性", key));
value.setServers(info.getServers());
}
IKafkaProducer kafkaProducer = kafkaExternal.createProducer(key);
try {
kafkaProducer.execute(JSON.toJSON(value));
} catch (ExecutionException e) {
log.error("kafka线程池执行异常", e);
} catch (InterruptedException e) {
log.error("kafka线程池执行异常", e);
}
successProducerMap.put(key, kafkaProducer);
}
/**
* 构建消费者
*
* @param key
* @param value
*/
private void generatorCon(String key, ConsumerBean value) {
if (StrUtil.isEmpty(value.getServers())) {
CheckAssert.check(info.getServers(), String.format("消费者%s:没有绑定对应实例", key));
value.setServers(info.getServers());
}
CheckAssert.check(value.getTopics(), String.format("消费者%s:没有指定对应主题", key));
CheckAssert.check(value.getInvokeBeanName(), String.format("消费者%s:没有指定对应的调用bean", key));
CheckAssert.check(value.getInvokeMethodName(), String.format("消费者%s:没有指定对应的方法名", key));
List partionParamList = KafkaConsumerGenerator.generatorParam(value);
if (CollectionUtil.isNotEmpty(partionParamList)) {
partionParamList.stream().forEach(partition -> {
String createKey = partition.getJSONObject("consumerMap").getString("group.id");
IKafkaConsumer kafkaConsumer = kafkaExternal.createConsumer(createKey);
try {
kafkaConsumer.execute(partition);
} catch (ExecutionException e) {
log.error("kafka消费线程初始化异常", e);
} catch (InterruptedException e) {
log.error("kafka消费线程初始化异常", e);
}
executorService.submit(kafkaConsumer);
successConsumerMap.put(createKey, kafkaConsumer);
});
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy