org.springframework.kafka.core.DefaultKafkaConsumerFactory Maven / Gradle / Ivy
/*
* Copyright 2016-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.kafka.core;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.function.Supplier;
import org.aopalliance.aop.Advice;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.commons.logging.LogFactory;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.serialization.Deserializer;
import org.springframework.aop.framework.ProxyFactory;
import org.springframework.aop.support.NameMatchMethodPointcutAdvisor;
import org.springframework.beans.factory.BeanNameAware;
import org.springframework.core.log.LogAccessor;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* The {@link ConsumerFactory} implementation to produce new {@link Consumer} instances
* for provided {@link Map} {@code configs} and optional {@link Deserializer}s on each {@link #createConsumer()}
* invocation.
*
* If you are using {@link Deserializer}s that have no-arg constructors and require no setup, then simplest to
* specify {@link Deserializer} classes against {@link ConsumerConfig#KEY_DESERIALIZER_CLASS_CONFIG} and
* {@link ConsumerConfig#VALUE_DESERIALIZER_CLASS_CONFIG} keys in the {@code configs} passed to the
* {@link DefaultKafkaConsumerFactory} constructor.
*
* If that is not possible, but you are using {@link Deserializer}s that may be shared between all {@link Consumer}
* instances (and specifically that their close() method is a no-op), then you can pass in {@link Deserializer}
* instances for one or both of the key and value deserializers.
*
* If neither of the above is true then you may provide a {@link Supplier} for one or both {@link Deserializer}s
* which will be used to obtain {@link Deserializer}(s) each time a {@link Consumer} is created by the factory.
*
* @param the key type.
* @param the value type.
*
* @author Gary Russell
* @author Murali Reddy
* @author Artem Bilan
* @author Chris Gilbert
*/
public class DefaultKafkaConsumerFactory extends KafkaResourceFactory
implements ConsumerFactory, BeanNameAware {
private static final LogAccessor LOGGER = new LogAccessor(LogFactory.getLog(DefaultKafkaConsumerFactory.class));
private final Map configs;
private final List> listeners = new ArrayList<>();
private final List> postProcessors = new ArrayList<>();
private Supplier> keyDeserializerSupplier;
private Supplier> valueDeserializerSupplier;
private String beanName = "not.managed.by.Spring";
/**
* Construct a factory with the provided configuration.
* @param configs the configuration.
*/
public DefaultKafkaConsumerFactory(Map configs) {
this(configs, () -> null, () -> null);
}
/**
* Construct a factory with the provided configuration and deserializers.
* @param configs the configuration.
* @param keyDeserializer the key {@link Deserializer}.
* @param valueDeserializer the value {@link Deserializer}.
*/
public DefaultKafkaConsumerFactory(Map configs,
@Nullable Deserializer keyDeserializer,
@Nullable Deserializer valueDeserializer) {
this(configs, () -> keyDeserializer, () -> valueDeserializer);
}
/**
* Construct a factory with the provided configuration and deserializer suppliers.
* @param configs the configuration.
* @param keyDeserializerSupplier the key {@link Deserializer} supplier function.
* @param valueDeserializerSupplier the value {@link Deserializer} supplier function.
* @since 2.3
*/
public DefaultKafkaConsumerFactory(Map configs,
@Nullable Supplier> keyDeserializerSupplier,
@Nullable Supplier> valueDeserializerSupplier) {
this.configs = new HashMap<>(configs);
this.keyDeserializerSupplier = keyDeserializerSupplier == null ? () -> null : keyDeserializerSupplier;
this.valueDeserializerSupplier = valueDeserializerSupplier == null ? () -> null : valueDeserializerSupplier;
}
@Override
public void setBeanName(String name) {
this.beanName = name;
}
/**
* Set the key deserializer.
* @param keyDeserializer the deserializer.
*/
public void setKeyDeserializer(@Nullable Deserializer keyDeserializer) {
this.keyDeserializerSupplier = () -> keyDeserializer;
}
/**
* Set the value deserializer.
* @param valueDeserializer the valuee deserializer.
*/
public void setValueDeserializer(@Nullable Deserializer valueDeserializer) {
this.valueDeserializerSupplier = () -> valueDeserializer;
}
@Override
public Map getConfigurationProperties() {
Map configs2 = new HashMap<>(this.configs);
checkBootstrap(configs2);
return Collections.unmodifiableMap(configs2);
}
@Override
public Deserializer getKeyDeserializer() {
return this.keyDeserializerSupplier.get();
}
@Override
public Deserializer getValueDeserializer() {
return this.valueDeserializerSupplier.get();
}
/**
* Get the current list of listeners.
* @return the listeners.
* @since 2.5
*/
@Override
public List> getListeners() {
return Collections.unmodifiableList(this.listeners);
}
@Override
public List> getPostProcessors() {
return Collections.unmodifiableList(this.postProcessors);
}
/**
* Add a listener.
* @param listener the listener.
* @since 2.5
*/
@Override
public void addListener(Listener listener) {
Assert.notNull(listener, "'listener' cannot be null");
this.listeners.add(listener);
}
/**
* Add a listener at a specific index.
* @param index the index (list position).
* @param listener the listener.
* @since 2.5
*/
@Override
public void addListener(int index, Listener listener) {
Assert.notNull(listener, "'listener' cannot be null");
if (index >= this.listeners.size()) {
this.listeners.add(listener);
}
else {
this.listeners.add(index, listener);
}
}
@Override
public void addPostProcessor(ConsumerPostProcessor postProcessor) {
Assert.notNull(postProcessor, "'postProcessor' cannot be null");
this.postProcessors.add(postProcessor);
}
@Override
public boolean removePostProcessor(ConsumerPostProcessor postProcessor) {
return this.postProcessors.remove(postProcessor);
}
/**
* Remove a listener.
* @param listener the listener.
* @return true if removed.
* @since 2.5
*/
@Override
public boolean removeListener(Listener listener) {
return this.listeners.remove(listener);
}
@Override
public Consumer createConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable String clientIdSuffix) {
return createKafkaConsumer(groupId, clientIdPrefix, clientIdSuffix, null);
}
@Override
public Consumer createConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable final String clientIdSuffixArg, @Nullable Properties properties) {
return createKafkaConsumer(groupId, clientIdPrefix, clientIdSuffixArg, properties);
}
@Deprecated
protected Consumer createKafkaConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable String clientIdSuffixArg) {
return createKafkaConsumer(groupId, clientIdPrefix, clientIdSuffixArg, null);
}
protected Consumer createKafkaConsumer(@Nullable String groupId, @Nullable String clientIdPrefix,
@Nullable String clientIdSuffixArg, @Nullable Properties properties) {
boolean overrideClientIdPrefix = StringUtils.hasText(clientIdPrefix);
String clientIdSuffix = clientIdSuffixArg;
if (clientIdSuffix == null) {
clientIdSuffix = "";
}
boolean shouldModifyClientId = (this.configs.containsKey(ConsumerConfig.CLIENT_ID_CONFIG)
&& StringUtils.hasText(clientIdSuffix)) || overrideClientIdPrefix;
if (groupId == null
&& (properties == null || properties.stringPropertyNames().size() == 0)
&& !shouldModifyClientId) {
return createKafkaConsumer(new HashMap<>(this.configs));
}
else {
return createConsumerWithAdjustedProperties(groupId, clientIdPrefix, properties, overrideClientIdPrefix,
clientIdSuffix, shouldModifyClientId);
}
}
private Consumer createConsumerWithAdjustedProperties(String groupId, String clientIdPrefix,
Properties properties, boolean overrideClientIdPrefix, String clientIdSuffix,
boolean shouldModifyClientId) {
Map modifiedConfigs = new HashMap<>(this.configs);
if (groupId != null) {
modifiedConfigs.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
}
if (shouldModifyClientId) {
modifiedConfigs.put(ConsumerConfig.CLIENT_ID_CONFIG,
(overrideClientIdPrefix ? clientIdPrefix
: modifiedConfigs.get(ConsumerConfig.CLIENT_ID_CONFIG)) + clientIdSuffix);
}
if (properties != null) {
Set stringPropertyNames = properties.stringPropertyNames(); // to get any nested default Properties
stringPropertyNames
.stream()
.filter(name -> !name.equals(ConsumerConfig.CLIENT_ID_CONFIG)
&& !name.equals(ConsumerConfig.GROUP_ID_CONFIG))
.forEach(name -> modifiedConfigs.put(name, properties.getProperty(name)));
properties.entrySet().stream()
.filter(entry -> !entry.getKey().equals(ConsumerConfig.CLIENT_ID_CONFIG)
&& !entry.getKey().equals(ConsumerConfig.GROUP_ID_CONFIG)
&& !stringPropertyNames.contains(entry.getKey())
&& entry.getKey() instanceof String)
.forEach(entry -> modifiedConfigs.put((String) entry.getKey(), entry.getValue()));
checkInaccessible(properties, modifiedConfigs);
}
return createKafkaConsumer(modifiedConfigs);
}
private void checkInaccessible(Properties properties, Map modifiedConfigs) {
List