io.prestosql.plugin.kafka.KafkaSimpleConsumerManager Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.kafka;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import io.airlift.log.Logger;
import io.prestosql.spi.HostAddress;
import io.prestosql.spi.NodeManager;
import kafka.javaapi.consumer.SimpleConsumer;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import java.util.Map;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
/**
* Manages connections to the Kafka nodes. A worker may connect to multiple Kafka nodes depending on the segments and partitions
* it needs to process. According to the Kafka source code, a Kafka {@link kafka.javaapi.consumer.SimpleConsumer} is thread-safe.
*/
public class KafkaSimpleConsumerManager
{
private static final Logger log = Logger.get(KafkaSimpleConsumerManager.class);
private final LoadingCache consumerCache;
private final NodeManager nodeManager;
private final int connectTimeoutMillis;
private final int bufferSizeBytes;
@Inject
public KafkaSimpleConsumerManager(
KafkaConnectorConfig kafkaConnectorConfig,
NodeManager nodeManager)
{
this.nodeManager = requireNonNull(nodeManager, "nodeManager is null");
requireNonNull(kafkaConnectorConfig, "kafkaConfig is null");
this.connectTimeoutMillis = toIntExact(kafkaConnectorConfig.getKafkaConnectTimeout().toMillis());
this.bufferSizeBytes = toIntExact(kafkaConnectorConfig.getKafkaBufferSize().toBytes());
this.consumerCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::createConsumer));
}
@PreDestroy
public void tearDown()
{
for (Map.Entry entry : consumerCache.asMap().entrySet()) {
try {
entry.getValue().close();
}
catch (Exception e) {
log.warn(e, "While closing consumer %s:", entry.getKey());
}
}
}
public SimpleConsumer getConsumer(HostAddress host)
{
requireNonNull(host, "host is null");
return consumerCache.getUnchecked(host);
}
private SimpleConsumer createConsumer(HostAddress host)
{
log.info("Creating new Consumer for %s", host);
return new SimpleConsumer(host.getHostText(),
host.getPort(),
connectTimeoutMillis,
bufferSizeBytes,
"presto-kafka-" + nodeManager.getCurrentNode().getNodeIdentifier());
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy