co.decodable.sdk.pipeline.testing.PipelineTestContext Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of decodable-pipeline-sdk Show documentation
Show all versions of decodable-pipeline-sdk Show documentation
A software development kit for implementing Apache Flink jobs and running them on Decodable
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright Decodable, Inc.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package co.decodable.sdk.pipeline.testing;
import co.decodable.sdk.pipeline.EnvironmentAccess;
import co.decodable.sdk.pipeline.util.Incubating;
import java.lang.System.Logger.Level;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
/**
* Provides access to Decodable streams during testing as well as the ability to run custom Flink
* jobs.
*/
@Incubating
public class PipelineTestContext implements AutoCloseable {
private static final System.Logger LOGGER = System.getLogger(PipelineTestContext.class.getName());
private final TestEnvironment testEnvironment;
private final KafkaProducer producer;
private final Map streams;
private final ExecutorService executorService;
/** Creates a new testing context, using the given {@link TestEnvironment}. */
public PipelineTestContext(TestEnvironment testEnvironment) {
EnvironmentAccess.setEnvironment(testEnvironment);
this.testEnvironment = testEnvironment;
this.producer =
new KafkaProducer(producerProperties(testEnvironment.bootstrapServers()));
this.streams = new HashMap<>();
this.executorService = Executors.newCachedThreadPool();
}
private static Properties producerProperties(String bootstrapServers) {
var props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
return props;
}
private static Properties consumerProperties(String bootstrapServers) {
var consumerProps = new Properties();
consumerProps.put("bootstrap.servers", bootstrapServers);
consumerProps.put(
"key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumerProps.put(
"value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumerProps.put("auto.offset.reset", "earliest");
consumerProps.put("group.id", "my-group");
return consumerProps;
}
/** Returns a stream for the given name. */
public DecodableStream stream(String name) {
KafkaConsumer consumer =
new KafkaConsumer(consumerProperties(testEnvironment.bootstrapServers()));
consumer.subscribe(Collections.singleton(testEnvironment.topicFor(name)));
return streams.computeIfAbsent(name, n -> new DecodableStreamImpl(n, consumer));
}
/** Asynchronously executes the given Flink job main method. */
public void runJobAsync(ThrowingConsumer jobMainMethod, String... args)
throws Exception {
executorService.submit(
() -> {
try {
jobMainMethod.accept(args);
} catch (InterruptedException e) {
LOGGER.log(Level.INFO, "Job aborted");
} catch (Exception e) {
LOGGER.log(Level.ERROR, "Job failed", e);
}
});
}
@Override
public void close() throws Exception {
try {
producer.close();
executorService.shutdownNow();
executorService.awaitTermination(100, TimeUnit.MILLISECONDS);
for (DecodableStreamImpl stream : streams.values()) {
stream.consumer.close();
}
} catch (Exception e) {
throw new RuntimeException("Couldn't close testing context", e);
} finally {
EnvironmentAccess.resetEnvironment();
}
}
/**
* A {@link Consumer} variant which allows for declared checked exception types.
*
* @param The consumed data type.
*/
@FunctionalInterface
public interface ThrowingConsumer {
void accept(T t) throws Exception;
}
private class DecodableStreamImpl implements DecodableStream {
private final String streamName;
private final KafkaConsumer consumer;
private final List> consumed;
public DecodableStreamImpl(String streamName, KafkaConsumer consumer) {
this.streamName = streamName;
this.consumer = consumer;
this.consumed = new ArrayList<>();
}
@Override
public void add(StreamRecord streamRecord) {
Future sent =
producer.send(
new ProducerRecord<>(testEnvironment.topicFor(streamName), streamRecord.value()));
// wait for record to be ack-ed
try {
sent.get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Couldn't send record", e);
}
}
@Override
public Future> takeOne() {
return ((CompletableFuture>>) take(1)).thenApply(l -> l.get(0));
}
@Override
public Future>> take(int n) {
return CompletableFuture.supplyAsync(
() -> {
while (consumed.size() < n) {
ConsumerRecords records = consumer.poll(Duration.ofMillis(20));
for (ConsumerRecord record : records) {
consumed.add(record);
}
}
List> result =
consumed.subList(0, n).stream()
.map(cr -> new StreamRecord<>(cr.value()))
.collect(Collectors.toList());
consumed.subList(0, n).clear();
return result;
},
executorService);
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy