
io.druid.firehose.kafka.KafkaEightSimpleConsumerFirehoseFactory Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of druid-kafka-eight-simple-consumer Show documentation
Show all versions of druid-kafka-eight-simple-consumer Show documentation
druid-kafka-eight-simple-consumer
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.firehose.kafka;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import com.metamx.emitter.EmittingLogger;
import io.druid.data.input.ByteBufferInputRowParser;
import io.druid.data.input.Committer;
import io.druid.data.input.FirehoseFactoryV2;
import io.druid.data.input.FirehoseV2;
import io.druid.data.input.InputRow;
import io.druid.firehose.kafka.KafkaSimpleConsumer.BytesMessageWithOffset;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
public class KafkaEightSimpleConsumerFirehoseFactory implements
FirehoseFactoryV2
{
private static final EmittingLogger log = new EmittingLogger(
KafkaEightSimpleConsumerFirehoseFactory.class
);
@JsonProperty
private final List brokerList;
@JsonProperty
private final List partitionIdList;
@JsonProperty
private final String clientId;
@JsonProperty
private final String feed;
@JsonProperty
private final int queueBufferLength;
@JsonProperty
private final boolean earliest;
private final List consumerWorkers = new CopyOnWriteArrayList<>();
private static final int DEFAULT_QUEUE_BUFFER_LENGTH = 20000;
private static final int CONSUMER_FETCH_TIMEOUT = 10000;
@JsonCreator
public KafkaEightSimpleConsumerFirehoseFactory(
@JsonProperty("brokerList") List brokerList,
@JsonProperty("partitionIdList") List partitionIdList,
@JsonProperty("clientId") String clientId,
@JsonProperty("feed") String feed,
@JsonProperty("queueBufferLength") Integer queueBufferLength,
@JsonProperty("resetOffsetToEarliest") Boolean resetOffsetToEarliest
)
{
this.brokerList = brokerList;
Preconditions.checkArgument(
brokerList != null && brokerList.size() > 0,
"brokerList is null/empty"
);
this.partitionIdList = partitionIdList;
Preconditions.checkArgument(
partitionIdList != null && partitionIdList.size() > 0,
"partitionIdList is null/empty"
);
this.clientId = clientId;
Preconditions.checkArgument(
clientId != null && !clientId.isEmpty(),
"clientId is null/empty"
);
this.feed = feed;
Preconditions.checkArgument(
feed != null && !feed.isEmpty(),
"feed is null/empty"
);
this.queueBufferLength = queueBufferLength == null ? DEFAULT_QUEUE_BUFFER_LENGTH : queueBufferLength;
Preconditions.checkArgument(queueBufferLength > 0, "queueBufferLength must be positive number");
log.info("queueBufferLength loaded as[%s]", this.queueBufferLength);
this.earliest = resetOffsetToEarliest == null ? true : resetOffsetToEarliest.booleanValue();
log.info(
"if old offsets are not known, data from partition will be read from [%s] available offset.",
this.earliest ? "earliest" : "latest"
);
}
private Map loadOffsetFromPreviousMetaData(Object lastCommit)
{
Map offsetMap = Maps.newHashMap();
if (lastCommit == null) {
return offsetMap;
}
if (lastCommit instanceof Map) {
Map
© 2015 - 2025 Weber Informatics LLC | Privacy Policy