All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.deephaven.kafka.KeyOrValueSpecObjectProcessorImpl Maven / Gradle / Ivy

There is a newer version: 0.37.1
Show newest version
//
// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending
//
package io.deephaven.kafka;

import io.confluent.kafka.schemaregistry.SchemaProvider;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.deephaven.chunk.ObjectChunk;
import io.deephaven.chunk.WritableChunk;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.engine.table.ColumnDefinition;
import io.deephaven.engine.table.TableDefinition;
import io.deephaven.kafka.KafkaTools.Consume.KeyOrValueSpec;
import io.deephaven.kafka.KafkaTools.KeyOrValue;
import io.deephaven.kafka.KafkaTools.KeyOrValueIngestData;
import io.deephaven.kafka.ingest.KafkaStreamPublisher;
import io.deephaven.kafka.ingest.KeyOrValueProcessor;
import io.deephaven.kafka.ingest.MultiFieldChunkAdapter;
import io.deephaven.processor.NamedObjectProcessor;
import io.deephaven.processor.ObjectProcessor;
import io.deephaven.qst.type.Type;
import io.deephaven.util.mutable.MutableInt;
import org.apache.kafka.common.serialization.Deserializer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;

/**
 * This implementation is useful for presenting an easier onboarding ramp and better (and public) interface
 * {@link KafkaTools.Consume#objectProcessorSpec(Deserializer, NamedObjectProcessor)} for end-users. The
 * {@link ObjectProcessor} is a user-visible replacement for {@link KeyOrValueProcessor}. In the meantime though, we are
 * adapting into a {@link KeyOrValueProcessor} until such a time when {@link KafkaStreamPublisher} can be re-written to
 * take advantage of these better interfaces.
 */
class KeyOrValueSpecObjectProcessorImpl extends KeyOrValueSpec {
    private final Deserializer deserializer;
    private final NamedObjectProcessor processor;

    KeyOrValueSpecObjectProcessorImpl(Deserializer deserializer,
            NamedObjectProcessor processor) {
        this.deserializer = Objects.requireNonNull(deserializer);
        this.processor = Objects.requireNonNull(processor);
    }

    @Override
    public Optional getSchemaProvider() {
        return Optional.empty();
    }

    @Override
    protected Deserializer getDeserializer(KeyOrValue keyOrValue,
            SchemaRegistryClient schemaRegistryClient,
            Map configs) {
        return deserializer;
    }

    @Override
    protected KeyOrValueIngestData getIngestData(KeyOrValue keyOrValue, SchemaRegistryClient schemaRegistryClient,
            Map configs, MutableInt nextColumnIndexMut, List> columnDefinitionsOut) {
        final KeyOrValueIngestData data = new KeyOrValueIngestData();
        data.fieldPathToColumnName = new LinkedHashMap<>();
        final List names = processor.names();
        final List> types = processor.processor().outputTypes();
        final int L = names.size();
        for (int i = 0; i < L; ++i) {
            final String columnName = names.get(i);
            final Type type = types.get(i);
            data.fieldPathToColumnName.put(columnName, columnName);
            columnDefinitionsOut.add(ColumnDefinition.of(columnName, type));
        }
        return data;
    }

    @Override
    protected KeyOrValueProcessor getProcessor(TableDefinition tableDef, KeyOrValueIngestData data) {
        return new KeyOrValueProcessorImpl(
                offsetsFunction(MultiFieldChunkAdapter.chunkOffsets(tableDef, data.fieldPathToColumnName)));
    }

    private class KeyOrValueProcessorImpl implements KeyOrValueProcessor {
        private final Function[], List>> offsetsAdapter;

        private KeyOrValueProcessorImpl(Function[], List>> offsetsAdapter) {
            this.offsetsAdapter = Objects.requireNonNull(offsetsAdapter);
        }

        @Override
        public void handleChunk(ObjectChunk inputChunk, WritableChunk[] publisherChunks) {
            // noinspection unchecked
            final ObjectChunk in = (ObjectChunk) inputChunk;
            // we except isInOrder to be true, so apply should be an O(1) op no matter how many columns there are.
            processor.processor().processAll(in, offsetsAdapter.apply(publisherChunks));
        }
    }

    private static  Function> offsetsFunction(int[] offsets) {
        return offsets.length == 0
                ? array -> Collections.emptyList()
                : isInOrder(offsets)
                        ? array -> Arrays.asList(array).subList(offsets[0], offsets[0] + offsets.length)
                        : array -> reorder(array, offsets);
    }

    private static boolean isInOrder(int[] offsets) {
        for (int i = 1; i < offsets.length; ++i) {
            if (offsets[i - 1] + 1 != offsets[i]) {
                return false;
            }
        }
        return true;
    }

    private static  List reorder(T[] array, int[] offsets) {
        final List out = new ArrayList<>(offsets.length);
        for (int offset : offsets) {
            out.add(array[offset]);
        }
        return out;
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy