io.streamthoughts.jikkou.kafka.reporter.KafkaChangeReporter Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of jikkou-provider-kafka Show documentation
Show all versions of jikkou-provider-kafka Show documentation
Integration between Apache Kafka and Jikkou
The newest version!
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright (c) The original authors
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.streamthoughts.jikkou.kafka.reporter;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.streamthoughts.jikkou.common.utils.AsyncUtils;
import io.streamthoughts.jikkou.core.JikkouInfo;
import io.streamthoughts.jikkou.core.exceptions.ConfigException;
import io.streamthoughts.jikkou.core.extension.ExtensionContext;
import io.streamthoughts.jikkou.core.io.Jackson;
import io.streamthoughts.jikkou.core.reconciler.ChangeResult;
import io.streamthoughts.jikkou.core.reporter.ChangeReporter;
import io.streamthoughts.jikkou.kafka.internals.KafkaRecord;
import io.streamthoughts.jikkou.kafka.internals.admin.AdminClientContext;
import io.streamthoughts.jikkou.kafka.internals.producer.DefaultProducerFactory;
import io.streamthoughts.jikkou.kafka.internals.producer.KafkaRecordSender;
import io.streamthoughts.jikkou.kafka.internals.producer.ProducerFactory;
import io.streamthoughts.jikkou.kafka.internals.producer.ProducerRequestResult;
import io.streamthoughts.jikkou.kafka.reporter.ce.CloudEventEntity;
import io.streamthoughts.jikkou.kafka.reporter.ce.CloudEventEntityBuilder;
import io.streamthoughts.jikkou.kafka.reporter.ce.CloudEventExtension;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Stream;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This reporter can be used to send change results into a kafka topic as Cloud Event.
*/
public class KafkaChangeReporter implements ChangeReporter {
private static final Logger LOG = LoggerFactory.getLogger(KafkaChangeReporter.class);
public static final int NUM_PARTITIONS = 1;
private KafkaChangeReporterConfig configuration;
private ObjectMapper objectMapper = Jackson.JSON_OBJECT_MAPPER;
private ProducerFactory producerFactory;
/**
* Creates a new {@link KafkaChangeReporter} instance.
*/
public KafkaChangeReporter() {
super();
}
/**
* Creates a new {@link KafkaChangeReporter} instance.
*
* @param producerFactory the producer-client to be used for sending events.
*/
public KafkaChangeReporter(final @NotNull ProducerFactory producerFactory,
final @NotNull ObjectMapper objectMapper) {
this.producerFactory = Objects.requireNonNull(producerFactory, "producerFactory cannot be null");
this.objectMapper = Objects.requireNonNull(objectMapper, "objectMapper cannot be null");
}
/**
* {@inheritDoc}
*/
@Override
public void init(@NotNull ExtensionContext context) throws ConfigException {
configuration = new KafkaChangeReporterConfig(context.appConfiguration());
if (producerFactory == null) {
producerFactory = new DefaultProducerFactory<>(
configuration.producerConfig(),
new ByteArraySerializer(),
new ByteArraySerializer()
);
}
}
/**
* {@inheritDoc}
**/
@Override
public void report(List results) {
LOG.info("Starting reporting for {} changes", results.size());
checkIfTopicNeedToBeCreated();
final String topic = configuration.topicName();
final String source = configuration.eventSource();
Stream stream = filterRelevantChangeResults(results);
List> records = stream.map(result -> {
CloudEventEntity
© 2015 - 2025 Weber Informatics LLC | Privacy Policy