io.deephaven.kafka.publish.KeyOrValueSerializer Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of deephaven-extensions-kafka Show documentation
Show all versions of deephaven-extensions-kafka Show documentation
Kafka: Integrating Engine tables with Kafka
//
// Copyright (c) 2016-2024 Deephaven Data Labs and Patent Pending
//
package io.deephaven.kafka.publish;
import io.deephaven.chunk.ObjectChunk;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.engine.rowset.RowSequence;
import io.deephaven.util.SafeCloseable;
/**
* Chunk-oriented serializer that supplies keys or values for stream publishing.
*/
public interface KeyOrValueSerializer {
/**
* Create a chunk of output keys or values that correspond to {@code rowSequence}. The output {@link ObjectChunk
* chunks} should be cached in the {@code context} for re-use, but the data returned in them should be functionally
* immutable and not rely on pooled or re-usable objects.
*
* @param context A {@link Context} created by {@link #makeContext(int)}
* @param rowSequence The row keys to serialize
* @param previous If previous row values should be used, as with row key removals
*
* @return A chunk of serialized data keys or values, with {@code ObjectChunk.size() == rowSequence.size()}
*/
ObjectChunk handleChunk(Context context, RowSequence rowSequence,
boolean previous);
/**
* Create a context for calling {@link #handleChunk(Context, RowSequence, boolean)}.
*
* @param size The maximum number of rows that will be serialized for each chunk
*
* @return A Context for the KeyOrValueSerializer
*/
Context makeContext(int size);
/**
* Context interface.
*/
interface Context extends SafeCloseable {
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy