org.apache.kafka.connect.util.SinkUtils Maven / Gradle / Ivy
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.util;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorOffset;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorOffsets;
import org.apache.kafka.connect.runtime.rest.errors.BadRequestException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public final class SinkUtils {
public static final String KAFKA_TOPIC_KEY = "kafka_topic";
public static final String KAFKA_PARTITION_KEY = "kafka_partition";
public static final String KAFKA_OFFSET_KEY = "kafka_offset";
private SinkUtils() {}
public static String consumerGroupId(String connector) {
return "connect-" + connector;
}
public static ConnectorOffsets consumerGroupOffsetsToConnectorOffsets(Map consumerGroupOffsets) {
List connectorOffsets = new ArrayList<>();
for (Map.Entry topicPartitionOffset : consumerGroupOffsets.entrySet()) {
Map partition = new HashMap<>();
partition.put(KAFKA_TOPIC_KEY, topicPartitionOffset.getKey().topic());
partition.put(KAFKA_PARTITION_KEY, topicPartitionOffset.getKey().partition());
connectorOffsets.add(new ConnectorOffset(partition,
Collections.singletonMap(KAFKA_OFFSET_KEY, topicPartitionOffset.getValue().offset())));
}
return new ConnectorOffsets(connectorOffsets);
}
/**
* Ensure that the provided partitions (keys in the {@code partitionOffsets} map) look like:
*
* {
* "kafka_topic": "topic"
* "kafka_partition": 3
* }
*
*
* and that the provided offsets (values in the {@code partitionOffsets} map) look like:
*
* {
* "kafka_offset": 1000
* }
*
*
* and then parse them into a mapping from {@link TopicPartition}s to their corresponding {@link Long}
* valued offsets.
*
* @param partitionOffsets the partitions to offset map that needs to be validated and parsed; may not be null or empty
* @return the parsed mapping from {@link TopicPartition}s to their corresponding {@link Long} valued offsets; may not be null or empty
*
* @throws BadRequestException if the provided offsets aren't in the expected format
*/
public static Map parseSinkConnectorOffsets(Map
© 2015 - 2025 Weber Informatics LLC | Privacy Policy