io.kroxylicious.testing.kafka.junit5ext.KafkaClusterExtension Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of testing-junit5-extension Show documentation
Show all versions of testing-junit5-extension Show documentation
Provides a JUnit5 extension for providing KafkaCluster implementations to tests and running tests over multiple cluster configurations.
/*
* Copyright Kroxylicious Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.kroxylicious.testing.kafka.junit5ext;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Executable;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.ServiceLoader;
import java.util.UUID;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.ByteBufferDeserializer;
import org.apache.kafka.common.serialization.ByteBufferSerializer;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.serialization.BytesSerializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.DoubleDeserializer;
import org.apache.kafka.common.serialization.DoubleSerializer;
import org.apache.kafka.common.serialization.FloatDeserializer;
import org.apache.kafka.common.serialization.FloatSerializer;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.ListDeserializer;
import org.apache.kafka.common.serialization.ListSerializer;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.serialization.UUIDDeserializer;
import org.apache.kafka.common.serialization.UUIDSerializer;
import org.apache.kafka.common.serialization.VoidDeserializer;
import org.apache.kafka.common.serialization.VoidSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.Extension;
import org.junit.jupiter.api.extension.ExtensionConfigurationException;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.junit.jupiter.api.extension.TestTemplateInvocationContext;
import org.junit.jupiter.api.extension.TestTemplateInvocationContextProvider;
import org.junit.platform.commons.support.AnnotationSupport;
import org.junit.platform.commons.support.HierarchyTraversalMode;
import org.junit.platform.commons.util.ExceptionUtils;
import org.junit.platform.commons.util.ReflectionUtils;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import info.schnatterer.mobynamesgenerator.MobyNamesGenerator;
import io.kroxylicious.testing.kafka.api.KafkaCluster;
import io.kroxylicious.testing.kafka.api.KafkaClusterConstraint;
import io.kroxylicious.testing.kafka.api.KafkaClusterProvisioningStrategy;
import io.kroxylicious.testing.kafka.api.KroxyliciousTestInfo;
import io.kroxylicious.testing.kafka.common.ClientConfig;
import io.kroxylicious.testing.kafka.internal.AdminSource;
import static java.lang.System.Logger.Level.TRACE;
import static org.junit.platform.commons.support.ReflectionSupport.findFields;
import static org.junit.platform.commons.util.ReflectionUtils.makeAccessible;
/**
* A JUnit 5 extension that allows declarative injection of a {@link KafkaCluster} into a test
* via static or instance field(s) and/or parameters.
*
* A simple example looks like:
* {@code
* import io.kroxylicious.junit5.KafkaClusterExtension;
* import org.apache.kafka.clients.producer.Producer;
* import org.apache.kafka.clients.producer.ProducerRecord;
* import org.junit.jupiter.api.Test;
* import org.junit.jupiter.api.extension.ExtendWith;
*
* @ExtendWith(KafkaClusterExtension.class) // <1>
* class MyTest {
*
* KafkaCluster cluster; // <2>
*
* @Test
* public void testProducer(
* Producer producer // <3>
* ) throws Exception {
* producer.send(new ProducerRecord<>("hello", "world")).get();
* }
* }
* }
*
* Notes:
*
* - You have to tell Junit that you're using the extension using {@code @ExtendWith}.
* - An instance field of type {@link KafkaCluster} will cause a new cluster to be provisioned for
* each test in the class. Alternatively you can use a parameter on a
* {@code @Test}-annotated method. If you use a {@code static} field then a single
* cluster will be provisioned for all the tests in the class.
* - Your test methods can declare {@code Producer}, {@code Consumer} and {@code Admin}-typed parameters.
* They will be configured to bootstrap against the {@code cluster}.
*
*
* Injection rules
* The extension supports injecting clusters and clients:
*
* - into fields of the test class
* - as parameters to {@code @BeforeAll}
* - as parameters to {@code @BeforeEach}
* - as parameters to test methods
*
* To avoid collisions with other extensions, such as Mockito, we will only inject into fields which:
*
* - have no annotations
* - OR are annotated with annotations from the following packages
* - {@code java.lang}
* - {@code org.junit}
* - {@code io.kroxylicious}
*
*/
public class KafkaClusterExtension implements
ParameterResolver, BeforeEachCallback,
BeforeAllCallback, TestTemplateInvocationContextProvider {
private static final System.Logger LOGGER = System.getLogger(KafkaClusterExtension.class.getName());
private static final ExtensionContext.Namespace CLUSTER_NAMESPACE = ExtensionContext.Namespace.create(KafkaClusterExtension.class, KafkaCluster.class);
private static final ExtensionContext.Namespace ADMIN_NAMESPACE = ExtensionContext.Namespace.create(KafkaClusterExtension.class, Admin.class);
private static final ExtensionContext.Namespace PRODUCER_NAMESPACE = ExtensionContext.Namespace.create(KafkaClusterExtension.class, Producer.class);
private static final ExtensionContext.Namespace CONSUMER_NAMESPACE = ExtensionContext.Namespace.create(KafkaClusterExtension.class, Consumer.class);
/**
* The constant STARTING_PREFIX.
*/
public static final String STARTING_PREFIX = "WY9Br5K1vAfov_8jjJ3KUA";
/**
* Instantiates a new Kafka cluster extension.
*/
public KafkaClusterExtension() {
}
@Override
public boolean supportsTestTemplate(ExtensionContext context) {
Parameter[] parameters = context.getRequiredTestMethod().getParameters();
for (var parameter : parameters) {
if (!supportsParameter(parameter)) {
return false;
}
}
return true;
}
private static List extends List extends Object>> cartesianProduct(List> domains) {
if (domains.isEmpty()) {
throw new IllegalArgumentException();
}
return _cartesianProduct(0, domains);
}
private static List extends List extends Object>> _cartesianProduct(int index, List> domains) {
List> ret = new ArrayList<>();
if (index == domains.size()) {
ret.add(new ArrayList<>(domains.size()));
}
else {
for (Object obj : domains.get(index)) {
for (List tuple : _cartesianProduct(index + 1, domains)) {
tuple.add(0, obj);
ret.add(tuple);
}
}
}
return ret;
}
@Override
public Stream provideTestTemplateInvocationContexts(ExtensionContext context) {
Method testTemplateMethod = context.getRequiredTestMethod();
Parameter[] parameters = testTemplateMethod.getParameters();
Parameter parameter = Arrays.stream(parameters).filter(p -> KafkaCluster.class.isAssignableFrom(p.getType())).findFirst().get();
DimensionMethodSource[] freeConstraintsSource = parameter.getAnnotationsByType(DimensionMethodSource.class);
var lists = Arrays.stream(freeConstraintsSource).map(methodSource -> invokeDimensionMethodSource(context, methodSource)).toList();
List extends List> cartesianProduct = lists.isEmpty() ? List.of() : cartesianProduct((List) lists);
ConstraintsMethodSource annotation = parameter.getAnnotation(ConstraintsMethodSource.class);
var constraints = annotation != null ? invokeConstraintsMethodSource(context, annotation) : List.> of();
return Stream.concat(cartesianProduct.stream(), constraints.stream())
.map((List additionalConstraints) -> {
return new TestTemplateInvocationContext() {
@Override
public String getDisplayName(int invocationIndex) {
List> list = invocationIndex > cartesianProduct.size() ? constraints.get(invocationIndex - cartesianProduct.size() - 1)
: cartesianProduct.get(invocationIndex - 1);
return list.toString();
}
@Override
public List getAdditionalExtensions() {
return List.of(new ParameterResolver() {
@Override
public boolean supportsParameter(ParameterContext parameterContext,
ExtensionContext extensionContext) {
return KafkaClusterExtension.supportsParameter(parameterContext.getParameter());
}
@Override
public Object resolveParameter(ParameterContext parameterContext,
ExtensionContext extensionContext) {
return KafkaClusterExtension.resolveParameter(parameterContext, extensionContext, additionalConstraints);
}
});
}
};
});
}
@NonNull
private static List extends List> invokeConstraintsMethodSource(ExtensionContext context,
ConstraintsMethodSource methodSource) {
Method testTemplateMethod = context.getRequiredTestMethod();
Class> requiredTestClass = context.getRequiredTestClass();
Object source;
try {
Method sourceMethod = getTargetMethod(requiredTestClass, methodSource.clazz(), methodSource.value());
if (ReflectionUtils.isNotStatic(sourceMethod)) {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + ConstraintsMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " must be static");
}
else if (sourceMethod.getParameters().length != 0) {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + ConstraintsMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " cannot have any parameters");
}
Class> returnType = sourceMethod.getReturnType();
// check return type is Stream extends Annotation>
if (Stream.class.isAssignableFrom(returnType)) {
Type genericReturnType = sourceMethod.getGenericReturnType();
if (genericReturnType instanceof ParameterizedType pt) {
if (Stream.class.equals(pt.getRawType())
&& pt.getActualTypeArguments()[0] instanceof Class> clsTypeArg
&& !clsTypeArg.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), ConstraintsMethodSource.class, requiredTestClass);
}
}
}
else if (Collection.class.isAssignableFrom(returnType)) {
Type genericReturnType = sourceMethod.getGenericReturnType();
if (genericReturnType instanceof ParameterizedType pt) {
if (Collection.class.equals(pt.getRawType())
&& pt.getActualTypeArguments()[0] instanceof Class> clsTypeArg
&& !clsTypeArg.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), ConstraintsMethodSource.class, requiredTestClass);
}
}
}
else if (returnType.isArray()) {
var elementType = returnType.getComponentType();
if (!elementType.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), ConstraintsMethodSource.class, requiredTestClass);
}
}
else {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " must return a Stream, a Collection, or an array with" +
"Annotation type");
}
// TODO check that annotation is meta-annotated
source = ReflectionUtils.makeAccessible(sourceMethod).invoke(null);
}
catch (ReflectiveOperationException e) {
throw new ParameterResolutionException("Error invoking method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass, e);
}
return KafkaClusterExtension.> coerceToList(
methodSource.value(), ConstraintsMethodSource.class,
testTemplateMethod, requiredTestClass, source).stream()
.map(list -> filterAnnotations(list, KafkaClusterConstraint.class))
.toList();
}
@NonNull
private static List invokeDimensionMethodSource(ExtensionContext context,
DimensionMethodSource methodSource) {
Method testTemplateMethod = context.getRequiredTestMethod();
Class> requiredTestClass = context.getRequiredTestClass();
Object source;
try {
Method sourceMethod = getTargetMethod(requiredTestClass, methodSource.clazz(), methodSource.value());
if (ReflectionUtils.isNotStatic(sourceMethod)) {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " must be static");
}
else if (sourceMethod.getParameters().length != 0) {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " cannot have any parameters");
}
Class> returnType = sourceMethod.getReturnType();
// check return type is Stream extends Annotation>
if (Stream.class.isAssignableFrom(returnType)) {
Type genericReturnType = sourceMethod.getGenericReturnType();
if (genericReturnType instanceof ParameterizedType pt) {
if (Stream.class.equals(pt.getRawType())
&& pt.getActualTypeArguments()[0] instanceof Class> clsTypeArg
&& !clsTypeArg.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), DimensionMethodSource.class, requiredTestClass);
}
}
}
else if (Collection.class.isAssignableFrom(returnType)) {
Type genericReturnType = sourceMethod.getGenericReturnType();
if (genericReturnType instanceof ParameterizedType pt) {
if (Collection.class.equals(pt.getRawType())
&& pt.getActualTypeArguments()[0] instanceof Class> clsTypeArg
&& !clsTypeArg.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), DimensionMethodSource.class, requiredTestClass);
}
}
}
else if (returnType.isArray()) {
var elementType = returnType.getComponentType();
if (!elementType.isAnnotation()) {
throw returnTypeError(testTemplateMethod, methodSource.value(), DimensionMethodSource.class, requiredTestClass);
}
}
else {
throw new ParameterResolutionException("Method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass + " must return a Stream, a Collection, or an array with" +
"Annotation type");
}
source = sourceMethod.invoke(null);
}
catch (ReflectiveOperationException e) {
throw new ParameterResolutionException("Error invoking method " + methodSource.value() + " given in @" + DimensionMethodSource.class.getSimpleName() +
" on " + requiredTestClass, e);
}
return filterAnnotations(coerceToList(
methodSource.value(), DimensionMethodSource.class,
testTemplateMethod, requiredTestClass, source), KafkaClusterConstraint.class);
}
@NonNull
private static Method getTargetMethod(Class> clazz, Class> methodClazz, String methodName) throws NoSuchMethodException {
Class> target = methodClazz == null || methodClazz == Void.class ? clazz : methodClazz;
return ReflectionUtils.makeAccessible(target.getDeclaredMethod(methodName));
}
@SuppressWarnings("unchecked")
@NonNull
private static List coerceToList(String methodName,
Class extends Annotation> annotationType,
Method testTemplateMethod, Class> requiredTestClass, Object source) {
List list;
if (source instanceof Stream) {
list = ((Stream) source).toList();
}
else if (source instanceof List) {
list = (List) source;
}
else if (source instanceof Collection) {
list = new ArrayList<>((Collection) source);
}
else if (source instanceof Object[]) {
list = Arrays.asList((T[]) source);
}
else {
throw returnTypeError(testTemplateMethod, methodName, annotationType, requiredTestClass);
}
return list;
}
@NonNull
private static ParameterResolutionException returnTypeError(Method testTemplateMethod,
String methodName,
Class extends Annotation> annotationType,
Class> requiredTestClass) {
return new ParameterResolutionException("Method " + methodName + " given in @" + annotationType.getSimpleName() +
" on " + testTemplateMethod.getName() + "() of " + requiredTestClass + " must return a Stream, a Collection, or an array with" +
"Annotation type");
}
/**
* The type Closeable.
*
* @param the type parameter
*/
static class Closeable implements ExtensionContext.Store.CloseableResource {
private final String clusterName;
private final T resource;
private final AnnotatedElement sourceElement;
/**
* Instantiates a new Closeable.
*
* @param sourceElement the source element
* @param clusterName the cluster name
* @param resource the resource
*/
public Closeable(AnnotatedElement sourceElement, String clusterName, T resource) {
this.sourceElement = sourceElement;
this.clusterName = clusterName;
this.resource = resource;
}
/**
* Get t.
*
* @return the t
*/
public T get() {
return resource;
}
@Override
public void close() throws Throwable {
LOGGER.log(TRACE, "Stopping '{0}' with cluster name '{1}' for {2}",
resource, clusterName, sourceElement);
resource.close();
}
}
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException {
return !hasTestTemplateConfiguration(parameterContext.getDeclaringExecutable()) && supportsParameter(parameterContext.getParameter());
}
private boolean hasTestTemplateConfiguration(Executable executable) {
return executable.isAnnotationPresent(TestTemplate.class)
&& Arrays.stream(executable.getParameters()).anyMatch(
p -> p.getAnnotationsByType(DimensionMethodSource.class).length > 0 || p.getAnnotationsByType(ConstraintsMethodSource.class).length > 0);
}
@Override
public Object resolveParameter(
ParameterContext parameterContext,
ExtensionContext extensionContext)
throws ParameterResolutionException {
return resolveParameter(parameterContext, extensionContext, List.of());
}
/**
* Resolve parameter object.
*
* @param parameterContext the parameter context
* @param extensionContext the extension context
* @param extraConstraints the extra constraints
* @return the object
* @throws ParameterResolutionException the parameter resolution exception
*/
public static Object resolveParameter(
ParameterContext parameterContext,
ExtensionContext extensionContext,
List extraConstraints)
throws ParameterResolutionException {
Parameter parameter = parameterContext.getParameter();
Class> type = parameter.getType();
LOGGER.log(TRACE,
"test {0}: Resolving parameter ({1} {2})",
extensionContext.getUniqueId(),
type.getSimpleName(),
parameter.getName());
if (KafkaCluster.class.isAssignableFrom(type)) {
var paramType = type.asSubclass(KafkaCluster.class);
var constraints = getConstraintAnnotations(parameter, KafkaClusterConstraint.class);
constraints.addAll(extraConstraints);
return getCluster(parameter, paramType, constraints, extensionContext);
}
else if (Admin.class.isAssignableFrom(type)) {
var paramType = type.asSubclass(Admin.class);
return createAdmin("parameter " + parameter.getName(), parameter, paramType, extensionContext);
}
else if (Producer.class.isAssignableFrom(type)) {
var paramType = type.asSubclass(Producer.class);
Type paramGenericType = parameterContext.getDeclaringExecutable().getGenericParameterTypes()[parameterContext.getIndex()];
return createProducer("parameter " + parameter.getName(), parameter, (Class) paramType, paramGenericType,
extensionContext);
}
else if (Consumer.class.isAssignableFrom(type)) {
var paramType = type.asSubclass(Consumer.class);
Type paramGenericType = parameterContext.getDeclaringExecutable().getGenericParameterTypes()[parameterContext.getIndex()];
return createConsumer("parameter " + parameter.getName(), parameter, (Class) paramType, paramGenericType,
extensionContext);
}
else if (Topic.class.isAssignableFrom(type)) {
var paramType = type.asSubclass(Topic.class);
return createTopic("parameter " + parameter.getName(), parameter, paramType, type, extensionContext);
}
else {
throw new ExtensionConfigurationException("Could not resolve " + parameterContext);
}
}
/**
* Perform field injection for non-private, static fields
* of type {@link KafkaCluster} or {@link KafkaCluster}.
*/
@Override
public void beforeAll(ExtensionContext context) throws Exception {
injectStaticFields(context, context.getRequiredTestClass());
}
/**
* Perform field injection for non-private, instance fields
* of type {@link KafkaCluster} or {@link KafkaCluster}.
*/
@Override
public void beforeEach(ExtensionContext context) throws Exception {
context.getRequiredTestInstances().getAllInstances().forEach(instance -> injectInstanceFields(context, instance));
}
private static boolean supportsParameter(Parameter parameter) {
Class> type = parameter.getType();
return KafkaCluster.class.isAssignableFrom(type) ||
((isKafkaClient(type) || isKafkaTopic(parameter.getType())) && isCandidate(parameter));
}
private static boolean isCandidate(AnnotatedElement annotatedElement) {
return noAnnotations(annotatedElement) || hasOnlySupportedAnnotations(annotatedElement);
}
private static boolean isKafkaClient(Class> type) {
return Admin.class.isAssignableFrom(type) || Producer.class.isAssignableFrom(type) || Consumer.class.isAssignableFrom(type);
}
private static boolean isKafkaTopic(Class> type) {
return Topic.class.isAssignableFrom(type);
}
private static boolean noAnnotations(AnnotatedElement parameter) {
return parameter.getAnnotations().length == 0;
}
/**
* We want to avoid conflicts with annotations such as mockito's @Mock. However, maintaining a list of
* conflicting annotations would be mad. So it seems simpler to maintain a set of known safe annotations with which
* we can inject still inject.
*/
private static boolean hasOnlySupportedAnnotations(AnnotatedElement parameter) {
boolean supported = true;
for (Annotation annotation : parameter.getAnnotations()) {
final String canonicalName = annotation.annotationType().getCanonicalName();
if (!canonicalName.startsWith("io.kroxylicious")
&& !canonicalName.startsWith("org.junit")
&& !canonicalName.startsWith("java.lang")) {
supported = false;
break;
}
}
return supported;
}
private void injectInstanceFields(ExtensionContext context, Object instance) {
injectFields(context, instance, instance.getClass(), ReflectionUtils::isNotStatic);
}
private void injectStaticFields(ExtensionContext context, Class> testClass) {
injectFields(context, null, testClass, ReflectionUtils::isStatic);
}
private void injectFields(ExtensionContext context, Object testInstance, Class> testClass, Predicate predicate) {
findFields(
testClass,
field -> predicate.test(field) && KafkaCluster.class.isAssignableFrom(field.getType()),
HierarchyTraversalMode.BOTTOM_UP)
.forEach(field -> {
assertSupportedType("field", field.getType());
try {
var accessibleField = makeAccessible(field);
List constraints = getConstraintAnnotations(accessibleField, KafkaClusterConstraint.class);
accessibleField.set(testInstance,
getCluster(accessibleField, accessibleField.getType().asSubclass(KafkaCluster.class), constraints, context));
}
catch (Throwable t) {
ExceptionUtils.throwAsUncheckedException(t);
}
});
final Map, List> fieldsByType = findFields(testClass,
field -> predicate.test(field) && isCandidate(field),
HierarchyTraversalMode.BOTTOM_UP)
.stream()
.collect(Collectors.groupingBy(Field::getType));
injectField(Admin.class, KafkaClusterExtension::createAdmin, context, testInstance, fieldsByType);
injectField(Producer.class, KafkaClusterExtension::createProducer, context, testInstance, fieldsByType);
injectField(Consumer.class, KafkaClusterExtension::createConsumer, context, testInstance, fieldsByType);
injectField(Topic.class, KafkaClusterExtension::createTopic, context, testInstance, fieldsByType);
}
@SuppressWarnings("unchecked")
private static void injectField(Class clientType, Injector injector, ExtensionContext context, Object testInstance,
Map, List> fieldsByType) {
fieldsByType.entrySet().stream()
.filter(entry -> clientType.isAssignableFrom(entry.getKey()))
.map(Map.Entry::getValue)
.flatMap(List::stream)
.filter(field -> {
try {
return makeAccessible(field).get(testInstance) == null;
}
catch (IllegalAccessException e) {
ExceptionUtils.throwAsUncheckedException(e);
}
return false;
})
.forEach(field -> {
try {
makeAccessible(field).set(testInstance,
injector.inject(
"field " + field.getName(),
field,
(Class) field.getType().asSubclass(clientType),
field.getGenericType(),
context));
}
catch (Exception e) {
ExceptionUtils.throwAsUncheckedException(e);
}
});
}
@Nullable
private static Serializer> getSerializerFromGenericType(Type type, int typeArgumentIndex) {
Serializer> keySerializer = null;
if (type instanceof ParameterizedType pt
&& pt.getRawType() instanceof Class> cls
&& Producer.class.isAssignableFrom(cls)) {
// Field declared like Producer, KafkaProducer
Type key = pt.getActualTypeArguments()[typeArgumentIndex];
keySerializer = getSerializerFromType(key);
}
return keySerializer;
}
private static Serializer> getSerializerFromType(Type keyOrValueType) {
Serializer> serializer = null;
if (keyOrValueType instanceof Class>) {
if (keyOrValueType == String.class) {
serializer = new StringSerializer();
}
else if (keyOrValueType == Integer.class) {
serializer = new IntegerSerializer();
}
else if (keyOrValueType == Long.class) {
serializer = new LongSerializer();
}
else if (keyOrValueType == UUID.class) {
serializer = new UUIDSerializer();
}
else if (keyOrValueType == Float.class) {
serializer = new FloatSerializer();
}
else if (keyOrValueType == Double.class) {
serializer = new DoubleSerializer();
}
else if (keyOrValueType == byte[].class) {
serializer = new ByteArraySerializer();
}
else if (keyOrValueType == ByteBuffer.class) {
serializer = new ByteBufferSerializer();
}
else if (keyOrValueType == Bytes.class) {
serializer = new BytesSerializer();
}
else if (keyOrValueType == Void.class) {
serializer = new VoidSerializer();
}
}
else if (keyOrValueType instanceof ParameterizedType pt) {
if (List.class == pt.getRawType()) {
return new ListSerializer<>(getSerializerFromType(keyOrValueType));
}
}
return serializer;
}
@Nullable
private static Deserializer> getDeserializerFromGenericType(Type type, int typeArgumentIndex) {
Deserializer> deserializer = null;
if (type instanceof ParameterizedType pt
&& pt.getRawType() instanceof Class> cls
&& Consumer.class.isAssignableFrom(cls)) {
// Field declared like Producer, KafkaProducer
Type key = pt.getActualTypeArguments()[typeArgumentIndex];
deserializer = getDeserializerFromType(key);
}
return deserializer;
}
private static Deserializer> getDeserializerFromType(Type keyOrValueType) {
Deserializer> deserializer = null;
if (keyOrValueType instanceof Class>) {
if (keyOrValueType == String.class) {
deserializer = new StringDeserializer();
}
else if (keyOrValueType == Integer.class) {
deserializer = new IntegerDeserializer();
}
else if (keyOrValueType == Long.class) {
deserializer = new LongDeserializer();
}
else if (keyOrValueType == UUID.class) {
deserializer = new UUIDDeserializer();
}
else if (keyOrValueType == Float.class) {
deserializer = new FloatDeserializer();
}
else if (keyOrValueType == Double.class) {
deserializer = new DoubleDeserializer();
}
else if (keyOrValueType == byte[].class) {
deserializer = new ByteArrayDeserializer();
}
else if (keyOrValueType == ByteBuffer.class) {
deserializer = new ByteBufferDeserializer();
}
else if (keyOrValueType == Bytes.class) {
deserializer = new BytesDeserializer();
}
else if (keyOrValueType == Void.class) {
deserializer = new VoidDeserializer();
}
}
else if (keyOrValueType instanceof ParameterizedType pt) {
if (List.class == pt.getRawType()) {
var ta = pt.getActualTypeArguments()[0];
if (ta instanceof Class cls) {
return new ListDeserializer<>(cls, getDeserializerFromType(keyOrValueType));
}
}
}
return deserializer;
}
private static Iterable uuidsFrom(String startingPrefix) {
if (startingPrefix.length() > 22) {
throw new IllegalArgumentException("startingPrefix is too long to be a Base64-encoded UUID prefix");
}
int pad = 22 - startingPrefix.length();
StringBuilder stringBuilder = new StringBuilder(startingPrefix);
for (int i = 0; i < pad; i++) {
stringBuilder.append('0');
}
byte[] decode = Base64.getUrlDecoder().decode(stringBuilder.toString());
var bb = ByteBuffer.wrap(decode);
var msb = bb.getLong();
var lsb = bb.getLong();
return () -> {
return new Iterator<>() {
long most = msb;
long least = lsb;
@Override
public boolean hasNext() {
return true;
}
@Override
public String next() {
var oldLeast = least;
if (oldLeast > 0 && least < 0) {
// handle overflow: if least overflows we need to increment most
most++;
}
bb.putLong(0, most).putLong(8, least);
least++;
// avoidRODO allocating Uuid.ZERO and Uuid.
return Base64.getUrlEncoder().withoutPadding().encodeToString(bb.array());
}
};
};
}
private static KafkaCluster findClusterFromContext(
AnnotatedElement element,
ExtensionContext extensionContext,
Class> type,
String description) {
ExtensionContext.Store store = extensionContext.getStore(CLUSTER_NAMESPACE);
String clusterName;
if (element.isAnnotationPresent(Name.class)
&& !element.getAnnotation(Name.class).value().isEmpty()) {
clusterName = element.getAnnotation(Name.class).value();
}
else {
clusterName = findLastUsedClusterId(store, uuidsFrom(STARTING_PREFIX));
if (clusterName == null || !clusterName.equals(STARTING_PREFIX)) {
throw new AmbiguousKafkaClusterException(
"KafkaCluster to associate with " + description + " is ambiguous, " +
"use @Name on the intended cluster and this element to disambiguate");
}
}
LOGGER.log(TRACE, "test {0}: decl {1}: Looking up cluster {2}",
extensionContext.getUniqueId(),
element,
clusterName);
Closeable last = store.get(clusterName,
(Class>) (Class) Closeable.class);
Objects.requireNonNull(last);
return last.get();
}
private static KafkaCluster getCluster(AnnotatedElement sourceElement,
Class extends KafkaCluster> type,
List constraints,
ExtensionContext extensionContext) {
// Semantic we want for clients without specified clusterId is "closest enclosing scope"
// If we used generated keys A, B, C we could get this by iterating lookup from A, B until we found
// and unused key, and using the last found
// But if a user-chosen key collided with a generated one then this doesn't work.
// However users are highly unlikely to choose any given UUID
// so we just don't start allocating from UUID A, but somewhere random (possibly KCE instance)
// and reject user-chosen UUIDs in a small range from there
// This makes the lookup path simple
// Can also choose where in the UUID space we start (i.e. don't use one of the UUID versions
// which the user is likely to use when choosing their ID).
ExtensionContext.Store store = extensionContext.getStore(CLUSTER_NAMESPACE);
String clusterName;
if (sourceElement.isAnnotationPresent(Name.class)
&& !sourceElement.getAnnotation(Name.class).value().isEmpty()) {
clusterName = sourceElement.getAnnotation(Name.class).value();
if (store.get(clusterName) != null && !constraints.isEmpty()) {
throw new ExtensionConfigurationException(
"A " + KafkaCluster.class.getSimpleName() + "-typed declaration with @Name(\"" + clusterName
+ "\") already exists, we cannot apply new constraints");
}
}
else {
var clusterIdIter = uuidsFrom(STARTING_PREFIX);
clusterName = findFirstUnusedClusterId(store, clusterIdIter);
}
LOGGER.log(TRACE,
"test {0}: decl {1}: cluster ''{2}'': Looking up cluster",
extensionContext.getUniqueId(),
sourceElement,
clusterName);
Closeable closeableCluster = store.getOrComputeIfAbsent(clusterName,
__ -> {
return createCluster(extensionContext, clusterName, type, sourceElement, constraints);
},
(Class>) (Class) Closeable.class);
Objects.requireNonNull(closeableCluster);
KafkaCluster cluster = closeableCluster.get();
LOGGER.log(TRACE,
"test {0}: decl {1}: cluster ''{2}'': Starting",
extensionContext.getUniqueId(),
sourceElement,
clusterName);
return cluster;
}
private static String findFirstUnusedClusterId(ExtensionContext.Store store, Iterable clusterIdIter) {
var it = clusterIdIter.iterator();
while (true) {
String clusterId = it.next();
var cluster = store.get(clusterId);
if (cluster == null) {
return clusterId;
}
}
}
private static String findLastUsedClusterId(ExtensionContext.Store store, Iterable clusterIdIter) {
var it = clusterIdIter.iterator();
String last = null;
while (true) {
String clusterId = it.next();
var cluster = store.get(clusterId);
if (cluster == null) {
return last;
}
last = clusterId;
}
}
private static Admin createAdmin(String description,
AnnotatedElement sourceElement,
Class extends Admin> type,
ExtensionContext extensionContext) {
return createAdmin(description, sourceElement, type, Void.class, extensionContext);
}
private static Admin createAdmin(String description,
AnnotatedElement sourceElement,
Class extends Admin> type,
Type genericType,
ExtensionContext extensionContext) {
KafkaCluster cluster = findClusterFromContext(sourceElement, extensionContext, type, description);
return extensionContext.getStore(ADMIN_NAMESPACE)
.