io.deephaven.kafka.KeyOrValueSpecObjectProcessorImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of deephaven-extensions-kafka Show documentation
Show all versions of deephaven-extensions-kafka Show documentation
Kafka: Integrating Engine tables with Kafka
//
// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending
//
package io.deephaven.kafka;
import io.confluent.kafka.schemaregistry.SchemaProvider;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.deephaven.chunk.ObjectChunk;
import io.deephaven.chunk.WritableChunk;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.engine.table.ColumnDefinition;
import io.deephaven.engine.table.TableDefinition;
import io.deephaven.kafka.KafkaTools.Consume.KeyOrValueSpec;
import io.deephaven.kafka.KafkaTools.KeyOrValue;
import io.deephaven.kafka.KafkaTools.KeyOrValueIngestData;
import io.deephaven.kafka.ingest.KafkaStreamPublisher;
import io.deephaven.kafka.ingest.KeyOrValueProcessor;
import io.deephaven.kafka.ingest.MultiFieldChunkAdapter;
import io.deephaven.processor.NamedObjectProcessor;
import io.deephaven.processor.ObjectProcessor;
import io.deephaven.qst.type.Type;
import io.deephaven.util.mutable.MutableInt;
import org.apache.kafka.common.serialization.Deserializer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
/**
* This implementation is useful for presenting an easier onboarding ramp and better (and public) interface
* {@link KafkaTools.Consume#objectProcessorSpec(Deserializer, NamedObjectProcessor)} for end-users. The
* {@link ObjectProcessor} is a user-visible replacement for {@link KeyOrValueProcessor}. In the meantime though, we are
* adapting into a {@link KeyOrValueProcessor} until such a time when {@link KafkaStreamPublisher} can be re-written to
* take advantage of these better interfaces.
*/
class KeyOrValueSpecObjectProcessorImpl extends KeyOrValueSpec {
private final Deserializer extends T> deserializer;
private final NamedObjectProcessor super T> processor;
KeyOrValueSpecObjectProcessorImpl(Deserializer extends T> deserializer,
NamedObjectProcessor super T> processor) {
this.deserializer = Objects.requireNonNull(deserializer);
this.processor = Objects.requireNonNull(processor);
}
@Override
public Optional getSchemaProvider() {
return Optional.empty();
}
@Override
protected Deserializer extends T> getDeserializer(KeyOrValue keyOrValue,
SchemaRegistryClient schemaRegistryClient,
Map configs) {
return deserializer;
}
@Override
protected KeyOrValueIngestData getIngestData(KeyOrValue keyOrValue, SchemaRegistryClient schemaRegistryClient,
Map configs, MutableInt nextColumnIndexMut, List> columnDefinitionsOut) {
final KeyOrValueIngestData data = new KeyOrValueIngestData();
data.fieldPathToColumnName = new LinkedHashMap<>();
final List names = processor.names();
final List> types = processor.processor().outputTypes();
final int L = names.size();
for (int i = 0; i < L; ++i) {
final String columnName = names.get(i);
final Type> type = types.get(i);
data.fieldPathToColumnName.put(columnName, columnName);
columnDefinitionsOut.add(ColumnDefinition.of(columnName, type));
}
return data;
}
@Override
protected KeyOrValueProcessor getProcessor(TableDefinition tableDef, KeyOrValueIngestData data) {
return new KeyOrValueProcessorImpl(
offsetsFunction(MultiFieldChunkAdapter.chunkOffsets(tableDef, data.fieldPathToColumnName)));
}
private class KeyOrValueProcessorImpl implements KeyOrValueProcessor {
private final Function[], List>> offsetsAdapter;
private KeyOrValueProcessorImpl(Function[], List>> offsetsAdapter) {
this.offsetsAdapter = Objects.requireNonNull(offsetsAdapter);
}
@Override
public void handleChunk(ObjectChunk