All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.pekko.kafka.Metadata.scala Maven / Gradle / Ivy

Go to download

Apache Pekko Kafka Connector is a Reactive Enterprise Integration library for Java and Scala, based on Reactive Streams and Apache Pekko.

The newest version!
/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * license agreements; and to You under the Apache License, version 2.0:
 *
 *   https://www.apache.org/licenses/LICENSE-2.0
 *
 * This file is part of the Apache Pekko project, which was derived from Akka.
 */

/*
 * Copyright (C) 2014 - 2016 Softwaremill 
 * Copyright (C) 2016 - 2020 Lightbend Inc. 
 */

package org.apache.pekko.kafka

import java.util.Optional

import org.apache.pekko
import pekko.actor.NoSerializationVerificationNeeded
import pekko.util.ccompat._
import pekko.util.ccompat.JavaConverters._
import org.apache.kafka.clients.consumer.{ OffsetAndMetadata, OffsetAndTimestamp }
import org.apache.kafka.common.{ PartitionInfo, TopicPartition }

import scala.util.Try

/**
 * Messages for Kafka metadata fetching via [[KafkaConsumerActor]].
 *
 * NOTE: Processing of these requests blocks the actor loop. The KafkaConsumerActor is configured to run on its
 * own dispatcher, so just as the other remote calls to Kafka, the blocking happens within a designated thread pool.
 * However, calling these during consuming might affect performance and even cause timeouts in extreme cases.
 */
object Metadata {

  sealed trait Request
  sealed trait Response

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#listTopics()]]
   */
  case object ListTopics extends Request with NoSerializationVerificationNeeded
  final case class Topics(response: Try[Map[String, List[PartitionInfo]]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.Map[String, java.util.List[PartitionInfo]]] =
      response
        .map { m =>
          Optional.of(m.view.mapValues(_.asJava).toMap.asJava)
        }
        .getOrElse(Optional.empty())
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#listTopics()]]
   */
  def createListTopics: ListTopics.type = ListTopics

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#partitionsFor()]]
   */
  final case class GetPartitionsFor(topic: String) extends Request with NoSerializationVerificationNeeded
  final case class PartitionsFor(response: Try[List[PartitionInfo]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.List[PartitionInfo]] =
      response.map(i => Optional.of(i.asJava)).getOrElse(Optional.empty())
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#partitionsFor()]]
   */
  def createGetPartitionsFor(topic: String): GetPartitionsFor = GetPartitionsFor(topic)

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#beginningOffsets()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  final case class GetBeginningOffsets(partitions: Set[TopicPartition])
      extends Request
      with NoSerializationVerificationNeeded
  final case class BeginningOffsets(response: Try[Map[TopicPartition, Long]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.Map[TopicPartition, java.lang.Long]] =
      response
        .map { m =>
          Optional.of(m.view.mapValues(Long.box).toMap.asJava)
        }
        .getOrElse(Optional.empty())
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#beginningOffsets()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  def createGetBeginningOffsets(partitions: java.util.Set[TopicPartition]): GetBeginningOffsets =
    GetBeginningOffsets(partitions.asScala.toSet)

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#endOffsets()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  final case class GetEndOffsets(partitions: Set[TopicPartition]) extends Request with NoSerializationVerificationNeeded
  final case class EndOffsets(response: Try[Map[TopicPartition, Long]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.Map[TopicPartition, java.lang.Long]] =
      response
        .map { m =>
          Optional.of(m.view.mapValues(Long.box).toMap.asJava)
        }
        .getOrElse(Optional.empty())
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#endOffsets()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  def createGetEndOffsets(partitions: java.util.Set[TopicPartition]): GetEndOffsets =
    GetEndOffsets(partitions.asScala.toSet)

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#offsetsForTimes()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  final case class GetOffsetsForTimes(timestampsToSearch: Map[TopicPartition, Long])
      extends Request
      with NoSerializationVerificationNeeded
  final case class OffsetsForTimes(response: Try[Map[TopicPartition, OffsetAndTimestamp]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.Map[TopicPartition, OffsetAndTimestamp]] =
      response
        .map { m =>
          Optional.of(m.asJava)
        }
        .getOrElse(Optional.empty())
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#offsetsForTimes()]]
   *
   * Warning: KafkaConsumer documentation states that this method may block indefinitely if the partition does not exist.
   */
  def createGetOffsetForTimes(timestampsToSearch: java.util.Map[TopicPartition, java.lang.Long]): GetOffsetsForTimes =
    GetOffsetsForTimes(timestampsToSearch.asScala.view.mapValues(_.toLong).toMap)

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#committed()]]
   */
  @deprecated("use `GetCommittedOffsets`", "Alpakka Kafka 2.0.3")
  final case class GetCommittedOffset(partition: TopicPartition) extends Request with NoSerializationVerificationNeeded

  @deprecated("use `CommittedOffsets`", "Alpakka Kafka 2.0.3")
  final case class CommittedOffset(response: Try[OffsetAndMetadata], requestedPartition: TopicPartition)
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[OffsetAndMetadata] = Optional.ofNullable(response.toOption.orNull)
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#committed()]]
   */
  @deprecated("use `createGetCommittedOffsets`", "Alpakka Kafka 2.0.3")
  def createGetCommittedOffset(partition: TopicPartition): GetCommittedOffset = GetCommittedOffset(partition)

  /**
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#committed()]]
   */
  final case class GetCommittedOffsets(partitions: Set[TopicPartition])
      extends Request
      with NoSerializationVerificationNeeded
  final case class CommittedOffsets(response: Try[Map[TopicPartition, OffsetAndMetadata]])
      extends Response
      with NoSerializationVerificationNeeded {

    /**
     * Java API
     */
    def getResponse: Optional[java.util.Map[TopicPartition, OffsetAndMetadata]] =
      Optional.ofNullable(response.toOption.map(_.asJava).orNull)
  }

  /**
   * Java API:
   * [[org.apache.kafka.clients.consumer.KafkaConsumer#committed()]]
   */
  def createGetCommittedOffsets(partitions: java.util.Set[TopicPartition]): GetCommittedOffsets =
    GetCommittedOffsets(partitions.asScala.toSet)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy