io.streamthoughts.jikkou.kafka.reconciler.service.KafkaConsumerGroupService Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of jikkou-provider-kafka Show documentation
Show all versions of jikkou-provider-kafka Show documentation
Integration between Apache Kafka and Jikkou
The newest version!
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright (c) The original authors
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.streamthoughts.jikkou.kafka.reconciler.service;
import static io.streamthoughts.jikkou.kafka.KafkaLabelAndAnnotations.JIKKOU_IO_KAFKA_IS_SIMPLE_CONSUMER;
import io.streamthoughts.jikkou.common.utils.AsyncUtils;
import io.streamthoughts.jikkou.common.utils.Strings;
import io.streamthoughts.jikkou.core.exceptions.JikkouRuntimeException;
import io.streamthoughts.jikkou.core.models.ObjectMeta;
import io.streamthoughts.jikkou.kafka.collections.V1KafkaConsumerGroupList;
import io.streamthoughts.jikkou.kafka.internals.Futures;
import io.streamthoughts.jikkou.kafka.models.V1KafkaConsumerGroup;
import io.streamthoughts.jikkou.kafka.models.V1KafkaConsumerGroupMember;
import io.streamthoughts.jikkou.kafka.models.V1KafkaConsumerGroupStatus;
import io.streamthoughts.jikkou.kafka.models.V1KafkaConsumerOffset;
import io.streamthoughts.jikkou.kafka.models.V1KafkaNode;
import io.streamthoughts.jikkou.kafka.reconciler.service.KafkaOffsetSpec.ToEarliest;
import io.streamthoughts.jikkou.kafka.reconciler.service.KafkaOffsetSpec.ToLatest;
import io.streamthoughts.jikkou.kafka.reconciler.service.KafkaOffsetSpec.ToOffset;
import io.streamthoughts.jikkou.kafka.reconciler.service.KafkaOffsetSpec.ToTimestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
import org.apache.kafka.clients.admin.ConsumerGroupListing;
import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsResult;
import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsSpec;
import org.apache.kafka.clients.admin.ListConsumerGroupsOptions;
import org.apache.kafka.clients.admin.ListConsumerGroupsResult;
import org.apache.kafka.clients.admin.ListOffsetsResult;
import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.ConsumerGroupState;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.TopicPartition;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
/**
* Service to manage Kafka Consumer Groups.
*/
public final class KafkaConsumerGroupService {
private static final Logger LOG = LoggerFactory.getLogger(KafkaConsumerGroupService.class);
private final AdminClient client;
/**
* Creates a new {@link KafkaConsumerGroupService} instance.
*
* @param client The AdminClient.
*/
public KafkaConsumerGroupService(final @NotNull AdminClient client) {
this.client = Objects.requireNonNull(client, "client cannot be null");
}
/**
* Resets the Consumer Group offsets for the specified groupID and topics.
*
* @param groupId The group ID - cannot be {@code null}.
* @param topics The list of topics - cannot be {@code null}.
* @param offsetSpec The offset specification.
* @param dryRun Specify whether to run this method in dry-run.
* @return The V1KafkaConsumerGroup.
*/
public V1KafkaConsumerGroup resetConsumerGroupOffsets(final @NotNull String groupId,
final @NotNull List topics,
final @NotNull KafkaOffsetSpec offsetSpec,
boolean dryRun) {
return switch (offsetSpec) {
// TO_EARLIEST
case ToEarliest ignored -> resetConsumerGroupOffsets(groupId, topics, OffsetSpec.earliest(), dryRun);
// TO_LATEST
case ToLatest ignored -> resetConsumerGroupOffsets(groupId, topics, OffsetSpec.latest(), dryRun);
// TO_TIMESTAMP
case ToTimestamp spec ->
resetConsumerGroupOffsets(groupId, topics, OffsetSpec.forTimestamp(spec.timestamp()), dryRun);
// TO_OFFSETS
case ToOffset spec -> {
// Get the partitions for the given topics.
CompletableFuture> future = listTopicPartitions(topics);
Map offsets = AsyncUtils.getValueOrThrowException(future, JikkouRuntimeException::new)
.stream()
.collect(Collectors.toMap(Function.identity(), unused -> new OffsetAndMetadata(spec.offset())));
// Alter the consumer group offsets.
yield alterConsumerGroupOffsets(groupId, offsets, dryRun);
}
case null -> throw new IllegalArgumentException("offsetSpec cannot be null");
};
}
/**
* Resets the Consumer Group offsets for the specified groupID and topics.
*
* @param groupId The group ID - cannot be {@code null}.
* @param topics The list of topics - cannot be {@code null}.
* @param offsetSpec The offset to reset to.
* @param dryRun Specify whether to run this method in dry-run.
* @return The V1KafkaConsumerGroup.
*/
public V1KafkaConsumerGroup resetConsumerGroupOffsets(@NotNull String groupId,
@NotNull List topics,
@NotNull OffsetSpec offsetSpec,
boolean dryRun) {
if (Strings.isBlank(groupId)) {
throw new IllegalArgumentException("groupId cannot be null");
}
if (topics == null) {
throw new IllegalArgumentException("topics cannot be null");
}
// List offsets and Map to OffsetAndMetadata
CompletableFuture
© 2015 - 2025 Weber Informatics LLC | Privacy Policy