All Downloads are FREE. Search and download functionalities are using the official Maven repository.

jchanghong.kafka.KafkaHelper.kt Maven / Gradle / Ivy

There is a newer version: 3.0.38
Show newest version
package jchanghong.kafka

import cn.hutool.core.util.ClassUtil
import cn.hutool.core.util.RandomUtil
import jchanghong.log.kError
import jchanghong.log.kInfo
import org.apache.kafka.clients.admin.*
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.ConsumerRecords
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.KafkaFuture
import java.time.Duration
import java.util.*
import java.util.concurrent.ExecutionException
import java.util.concurrent.Executors
import java.util.function.Function

/** 一个对象一套kafka配置,多个kafka,需要建立多个对象*/
class KafkaHelper(
    val bootstrap: String,
    val groupId: String,
    val topics: List?=null,
    val action: Function, Unit>?=null
) {
   private  val singleThreadExecutor=Executors.newSingleThreadExecutor()
    private val mProps: Properties by lazy { getAndSetProps(bootstrap, groupId) }
    private val mProducer: KafkaProducer by lazy { KafkaProducer(mProps) }
    private val mConsumer: KafkaConsumer by lazy { KafkaConsumer(mProps )}
    private val adminClient: AdminClient by lazy { KafkaAdminClient.create(mProps) }
    // 配置Kafka
    private fun getAndSetProps(bootstrap: String, groupId: String? = null): Properties {
        val props = Properties()
        props["bootstrap.servers"] = bootstrap
        props.put("retries", 2) // 重试次数
        props.put("batch.size", 16384) // 批量发送大小
        props.put("buffer.memory", 33554432) // 缓存大小,根据本机内存大小配置
//        props.put("linger.ms", 1000) // 发送频率,满足任务一个条件发送

        props.put("acks", "all")
        if (!groupId.isNullOrBlank()) {
            props.setProperty("group.id", groupId)
        }
        props.setProperty("enable.auto.commit", "true")
        props.setProperty("auto.commit.interval.ms", "1000")
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
        props["key.serializer"] = "org.apache.kafka.common.serialization.StringSerializer"
        props["value.serializer"] = "org.apache.kafka.common.serialization.StringSerializer"
        return props
    }

    @JvmOverloads
    fun createTopic(name: String,p:Int=8,r:Short=1) {
        val newTopic = NewTopic(name, p, r)
        val newTopicList: MutableCollection = ArrayList()
        newTopicList.add(newTopic)
        val createTopicsResult = adminClient.createTopics(newTopicList)
        for (entry in createTopicsResult.values()) {
            try {
                entry.value.get()
                Thread.sleep(2000)
            } catch (e: Exception) {
                kError(e.message,e)
            }
            kInfo("createTopic ${entry.key}")
        }
    }

    fun deleteTopic(name: String) {
        val deleteTopicsResult = adminClient.deleteTopics(Arrays.asList(name))
        for ((k, v) in deleteTopicsResult.values()) {
            try {
                v.get()
                Thread.sleep(2000)
            } catch (e: Exception) {
                kError(e.message,e)
            }
            kInfo("deleteTopic $k")
        }
    }

    fun listAllTopic(): Set {
        val result: ListTopicsResult = adminClient.listTopics()
        val names = result.names()
        try {
            return names.get()
        } catch (e: InterruptedException) {
            kError(e.message, e)
        } catch (e: ExecutionException) {
            kError(e.message, e)
        }
        return emptySet()
    }

    fun getTopic(name: String): TopicDescription? {
        val describeTopics: DescribeTopicsResult = adminClient.describeTopics(Arrays.asList(name))
        val values: Collection> = describeTopics.values().values
        if (values.isEmpty()) {
            kInfo("找不到描述信息")
        } else {
            for (value in values) {
                return value.get()
            }
        }
        return null
    }

    fun produce(topic: String, value: String,key: String?=null ) {
        mProducer.send(ProducerRecord(topic, key?:"${System.nanoTime()}${RandomUtil.randomString(20)}", value))
    }

    fun startConsumer() {
        checkNotNull(topics)
        checkNotNull(action)
        val method = ClassUtil.getDeclaredMethod(KafkaConsumer::class.java, "subscribe",List::class.java)
        val pollMethod = ClassUtil.getDeclaredMethod(KafkaConsumer::class.java, "poll",Duration::class.java)
        if (method != null) {
//            老版本版本kafka
            method.invoke(mConsumer,topics)
//            mConsumer.subscribe(topics)
            singleThreadExecutor.execute {
                while (true) {
                    val poll =if (pollMethod!=null) mConsumer.poll(Duration.ofMillis(100)) else mConsumer.poll(0)
                    val records: ConsumerRecords = poll ?: continue
                    records.forEach {
                        action.apply(it)
                    }
                }
            }
        }
    }
}


fun main() {


    val kafkaHelper = KafkaHelper("50.1.43.110:9092", "group3", listOf("testr2p8"), Function {
        "println".kInfo(it.value().toString()+" group1 consumer1 ${it.partition()} ${it.offset()}  ${it.key()}")
    })
//    kafkaHelper.deleteTopic("testr2p8")
//    kafkaHelper.createTopic("camera_status_r2p16",16,2)
//    kafkaHelper.createTopic("camera_tag_r2p16",16,2)
    kafkaHelper.startConsumer()
//    ThreadUtil.sleep(5000)
//    (11000..12000).toList().forEach {
//        kafkaHelper.produce("testr2p8","1gentest${it}")
//    }
//    println("end1")
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy