All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.mageddo.tobby.producer.kafka.SimpleJdbcKafkaProducerAdapter Maven / Gradle / Ivy

There is a newer version: 2.1.6-alpha
Show newest version
package com.mageddo.tobby.producer.kafka;

import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

import com.mageddo.tobby.producer.ProducerJdbc;

import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.Serializer;

public class SimpleJdbcKafkaProducerAdapter implements Producer {

  final ExecutorService executorService;
  private final JdbcKafkaProducer jdbcKafkaProducer;

  public SimpleJdbcKafkaProducerAdapter(
      Serializer keySerializer, Serializer valueSerializer, ProducerJdbc producerJdbc
  ) {
    this(new JdbcKafkaProducer<>(
        producerJdbc, keySerializer, valueSerializer
    ));
  }

  public SimpleJdbcKafkaProducerAdapter(JdbcKafkaProducer jdbcKafkaProducer) {
    this(Executors.newFixedThreadPool(5, r -> {
      Thread t = Executors.defaultThreadFactory().newThread(r);
      t.setDaemon(true);
      return t;
    }), jdbcKafkaProducer);
  }

  public SimpleJdbcKafkaProducerAdapter(
      ExecutorService executorService, JdbcKafkaProducer jdbcKafkaProducer
  ) {
    this.executorService = executorService;
    this.jdbcKafkaProducer = jdbcKafkaProducer;
  }

  @Override
  public void initTransactions() {
    this.transactionUnsupportedError();
  }

  @Override
  public void beginTransaction() throws ProducerFencedException {
    this.transactionUnsupportedError();
  }

  @Override
  public void sendOffsetsToTransaction(Map offsets,
      String consumerGroupId)
      throws ProducerFencedException {
    this.transactionUnsupportedError();
  }

  @Override
  public void commitTransaction() throws ProducerFencedException {
    this.transactionUnsupportedError();
  }

  @Override
  public void abortTransaction() throws ProducerFencedException {
    this.transactionUnsupportedError();
  }

  @Override
  public Future send(ProducerRecord record) {
    final RecordMetadata produced = this.save(record);
    return this.buildPromise(produced);
  }

  @Override
  public Future send(ProducerRecord record, Callback callback) {
    final RecordMetadata metadata = this.save(record);
    return this.executorService.submit(() -> {
      callback.onCompletion(metadata, null);
      return metadata;
    });
  }

  @Override
  public void flush() {
    // ( ͡° ͜ʖ ͡°)
  }

  @Override
  public List partitionsFor(String topic) {
    return Collections.EMPTY_LIST;
  }

  @Override
  public Map metrics() {
    return Collections.EMPTY_MAP;
  }

  @Override
  public void close() {
    this.executorService.shutdown();
  }

  @Override
  public void close(long timeout, TimeUnit unit) {
    try {
      this.executorService.shutdown();
      this.executorService.awaitTermination(timeout, unit);
    } catch (InterruptedException e) {
      this.executorService.shutdownNow();
    }
  }

  private void transactionUnsupportedError() {
    throw new UnsupportedOperationException(
        "This is a jdbc producer, no kafka transactions are necessary"
    );
  }

  private Future buildPromise(RecordMetadata metadata) {
    return this.executorService.submit(() -> metadata);
  }

  private RecordMetadata save(ProducerRecord record) {
    return this.jdbcKafkaProducer.send(record);
  }

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy