com.jchanghong.kafka.KafkaHelper.kt Maven / Gradle / Ivy
The newest version!
package com.jchanghong.kafka
import cn.hutool.core.date.DateUnit
import cn.hutool.core.date.DateUtil
import cn.hutool.core.thread.ThreadUtil
import cn.hutool.core.util.RandomUtil
import cn.hutool.cron.CronUtil
import com.jchanghong.log.kError
import com.jchanghong.log.kInfo
import org.apache.kafka.clients.admin.*
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.ConsumerRecords
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.kafka.common.KafkaFuture
import java.time.Duration
import java.util.*
import java.util.concurrent.ConcurrentLinkedDeque
import java.util.concurrent.ExecutionException
import java.util.concurrent.Executors
import java.util.concurrent.Future
import java.util.concurrent.atomic.AtomicLong
import java.util.function.Function
/** 一个对象一套kafka配置,多个kafka,需要建立多个对象*/
class KafkaHelper(
val bootstrap: String,
val groupId: String,
val topics: List?=null,
val action: Function, Unit>?=null
) {
private val singleThreadExecutor=Executors.newSingleThreadExecutor()
private val mProps: Properties by lazy { getAndSetProps(bootstrap, groupId) }
private val mProducer: KafkaProducer by lazy { KafkaProducer(mProps) }
private val mConsumer: KafkaConsumer by lazy { KafkaConsumer(mProps )}
private val adminClient: AdminClient by lazy { KafkaAdminClient.create(mProps) }
// 配置Kafka
private fun getAndSetProps(bootstrap: String, groupId: String? = null): Properties {
val props = Properties()
props["bootstrap.servers"] = bootstrap
props.put("retries", 2) // 重试次数
props.put("batch.size", 16384) // 批量发送大小
props.put("buffer.memory", 33554432) // 缓存大小,根据本机内存大小配置
// props.put("linger.ms", 1000) // 发送频率,满足任务一个条件发送
props.put("acks", "all")
if (!groupId.isNullOrBlank()) {
props.setProperty("group.id", groupId)
}
props.setProperty("enable.auto.commit", "true")
props.setProperty("auto.commit.interval.ms", "1000")
props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props["key.serializer"] = "org.apache.kafka.common.serialization.StringSerializer"
props["value.serializer"] = "org.apache.kafka.common.serialization.StringSerializer"
return props
}
@JvmOverloads
fun createTopic(name: String,p:Int=8,r:Short=1) {
val newTopic = NewTopic(name, p, r)
val newTopicList: MutableCollection = ArrayList()
newTopicList.add(newTopic)
val createTopicsResult = adminClient.createTopics(newTopicList)
for (entry in createTopicsResult.values()) {
try {
entry.value.get()
Thread.sleep(2000)
} catch (e: Exception) {
kError(e.message,e)
}
kInfo("createTopic ${entry.key}")
}
}
fun deleteTopic(name: String) {
val deleteTopicsResult = adminClient.deleteTopics(Arrays.asList(name))
for ((k, v) in deleteTopicsResult.values()) {
try {
v.get()
Thread.sleep(2000)
} catch (e: Exception) {
kError(e.message,e)
}
kInfo("deleteTopic $k")
}
}
fun listAllTopic(): Set {
val result: ListTopicsResult = adminClient.listTopics()
val names = result.names()
try {
return names.get()
} catch (e: InterruptedException) {
kError(e.message, e)
} catch (e: ExecutionException) {
kError(e.message, e)
}
return emptySet()
}
fun getTopic(name: String): TopicDescription? {
val describeTopics: DescribeTopicsResult = adminClient.describeTopics(Arrays.asList(name))
val values: Collection> = describeTopics.values().values
if (values.isEmpty()) {
kInfo("找不到描述信息")
} else {
for (value in values) {
return value.get()
}
}
return null
}
fun produce(topic: String, value: String,key: String?=null ) {
mProducer.send(ProducerRecord(topic, key?:"${System.nanoTime()}${RandomUtil.randomString(20)}", value))
}
fun startConsumer() {
checkNotNull(topics)
checkNotNull(action)
mConsumer.subscribe(topics)
singleThreadExecutor.execute {
while (true) {
val records: ConsumerRecords = mConsumer.poll(Duration.ofMillis(100)) ?: continue
records.forEach {
action.apply(it)
}
}
}
}
}
fun main() {
val kafkaHelper = KafkaHelper("50.1.43.110:9092", "group3", listOf("testr2p8")) {
"println".kInfo(it.value().toString()+" group1 consumer1 ${it.partition()} ${it.offset()} ${it.key()}")
}
// kafkaHelper.deleteTopic("testr2p8")
kafkaHelper.createTopic("camera_status_r2p16",16,2)
kafkaHelper.createTopic("camera_tag_r2p16",16,2)
// kafkaHelper.startConsumer()
// ThreadUtil.sleep(5000)
// (11000..12000).toList().forEach {
// kafkaHelper.produce("testr2p8","1gentest${it}")
// }
// println("end1")
}