com.ibm.cp4waiops.connectors.sdk.KafkaConsumerThread Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of connectors-sdk Show documentation
Show all versions of connectors-sdk Show documentation
A developer SDK for creating connectors for CP4WAIOps.
package com.ibm.cp4waiops.connectors.sdk;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.serialization.StringDeserializer;
import io.cloudevents.CloudEvent;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.MeterRegistry;
public class KafkaConsumerThread implements Runnable {
KafkaConsumer consumer;
private static final Logger logger = Logger.getLogger(KafkaConsumerThread.class.getName());
ConnectorBase connector;
ConnectorManagerConfig config;
protected Counter kafkaConsumerTotal;
protected Counter kafkaConsumerDropped;
/**
*
* @param kafkaConfiguration
*/
public KafkaConsumerThread(Properties kafkaConfiguration, Collection topics, ConnectorBase connector,
ConnectorManagerConfig config, String connectorName, MeterRegistry metricsRegistry) {
logger.info("Creating Kafka consumer thread. Consuming topics " + topics);
// Add deserializer and consumer properties
kafkaConfiguration.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
kafkaConfiguration.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, CloudEventDeserializerCustom.class);
kafkaConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
kafkaConfiguration.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
kafkaConfiguration.put(ConsumerConfig.GROUP_ID_CONFIG, getGroupId(connector.getConnectorID(),
connector.getComponentName(), config.getConnectorClientID(connectorName)));
consumer = new KafkaConsumer<>(kafkaConfiguration);
// Subscribe Consumer to Our Topics
consumer.subscribe(topics);
this.connector = connector;
if (metricsRegistry != null) {
kafkaConsumerTotal = metricsRegistry.counter("connector.sdk.kafka.consumer.total");
kafkaConsumerDropped = metricsRegistry.counter("connector.sdk.kafka.consumer.dropped");
}
}
protected String getGroupId(String connectionId, String componentName, String clientId) {
return "cp4waiops-cartridge.bridge." + clientId + "." + connectionId + "." + componentName;
}
@Override
public void run() {
try {
while (true) {
try {
ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord record : records) {
try {
if (kafkaConsumerTotal != null) {
kafkaConsumerTotal.increment();
}
// If deserialization fails, null will be returned
if (record.value() == null) {
consumer.commitSync(
Collections.singletonMap(new TopicPartition(record.topic(), record.partition()),
new OffsetAndMetadata(record.offset() + 1)));
try {
StringBuffer sb = new StringBuffer();
Header[] headerArr = record.headers().toArray();
int headerLen = headerArr.length;
for (int i = 0; i < headerLen; i++) {
sb.append(headerArr[i].key() + "="
+ new String(headerArr[i].value(), StandardCharsets.UTF_8) + " ");
}
logger.log(Level.WARNING, "Problematic log offset=" + record.offset() + " topic="
+ record.topic() + " headers=" + sb.toString());
} catch (Exception e) {
logger.log(Level.INFO, "Error printing problematic log " + e);
}
if (kafkaConsumerDropped != null) {
kafkaConsumerDropped.increment();
}
continue;
}
connector.onAction("Generated Direct To Kafka Consumption", record.value());
} catch (SerializationException e) {
// The deserializer should catch all problems, but just in case, handle the exception
consumer.commitSync(
Collections.singletonMap(new TopicPartition(record.topic(), record.partition()),
new OffsetAndMetadata(record.offset() + 1)));
logger.log(Level.WARNING, "Unexpected deserialization exception", e);
} catch (ConnectorException ce) {
logger.log(Level.WARNING, "Creating Kafka consumer thread. Consuming topics ", ce);
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Error reading Kafka, consumer polling will be reset", e);
}
}
} catch (Exception e) {
logger.log(Level.SEVERE, "Error reading Kafka messages in polling loop", e);
} finally {
// If interrupted close gracefully
consumer.unsubscribe();
logger.log(Level.INFO, "Kafka consumer unsubscribed");
}
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy