io.deephaven.engine.table.impl.updateby.internal.BaseByteUpdateByOperator Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of deephaven-engine-table Show documentation
Show all versions of deephaven-engine-table Show documentation
Engine Table: Implementation and closely-coupled utilities
/*
* ---------------------------------------------------------------------------------------------------------------------
* AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit BaseCharUpdateByOperator and regenerate
* ---------------------------------------------------------------------------------------------------------------------
*/
package io.deephaven.engine.table.impl.updateby.internal;
import io.deephaven.util.QueryConstants;
import io.deephaven.engine.table.impl.sources.ByteArraySource;
import io.deephaven.engine.table.impl.sources.ByteSparseArraySource;
import io.deephaven.engine.table.WritableColumnSource;
import io.deephaven.chunk.Chunk;
import io.deephaven.chunk.IntChunk;
import io.deephaven.chunk.LongChunk;
import io.deephaven.chunk.WritableByteChunk;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.engine.rowset.RowSequence;
import io.deephaven.engine.rowset.RowSet;
import io.deephaven.engine.rowset.chunkattributes.OrderedRowKeys;
import io.deephaven.engine.table.*;
import io.deephaven.engine.table.impl.MatchPair;
import io.deephaven.engine.table.impl.sources.*;
import io.deephaven.engine.table.impl.updateby.UpdateByOperator;
import io.deephaven.engine.table.impl.util.RowRedirection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.annotation.OverridingMethodsMustInvokeSuper;
import java.util.Collections;
import java.util.Map;
import static io.deephaven.engine.rowset.RowSequence.NULL_ROW_KEY;
import static io.deephaven.util.QueryConstants.*;
public abstract class BaseByteUpdateByOperator extends UpdateByOperator {
protected WritableColumnSource outputSource;
protected WritableColumnSource maybeInnerSource;
// region extra-fields
final byte nullValue;
// endregion extra-fields
protected abstract class Context extends UpdateByOperator.Context {
protected final ChunkSink.FillFromContext outputFillContext;
protected final WritableByteChunk outputValues;
public byte curVal = NULL_BYTE;
protected Context(final int chunkSize) {
outputFillContext = outputSource.makeFillFromContext(chunkSize);
outputValues = WritableByteChunk.makeWritableChunk(chunkSize);
}
@Override
public void accumulateCumulative(@NotNull final RowSequence inputKeys,
@NotNull final Chunk extends Values>[] valueChunkArr,
@Nullable final LongChunk extends Values> tsChunk,
final int len) {
setValueChunks(valueChunkArr);
// chunk processing
for (int ii = 0; ii < len; ii++) {
push(ii, 1);
writeToOutputChunk(ii);
}
// chunk output to column
writeToOutputColumn(inputKeys);
}
@Override
public void accumulateRolling(@NotNull final RowSequence inputKeys,
@NotNull final Chunk extends Values>[] influencerValueChunkArr,
@Nullable final LongChunk affectedPosChunk,
@Nullable final LongChunk influencerPosChunk,
@NotNull final IntChunk extends Values> pushChunk,
@NotNull final IntChunk extends Values> popChunk,
final int len) {
setValueChunks(influencerValueChunkArr);
setPosChunks(affectedPosChunk, influencerPosChunk);
int pushIndex = 0;
// chunk processing
for (int ii = 0; ii < len; ii++) {
final int pushCount = pushChunk.get(ii);
final int popCount = popChunk.get(ii);
if (pushCount == NULL_INT) {
writeNullToOutputChunk(ii);
continue;
}
// pop for this row
if (popCount > 0) {
pop(popCount);
}
// push for this row
if (pushCount > 0) {
push(pushIndex, pushCount);
pushIndex += pushCount;
}
// write the results to the output chunk
writeToOutputChunk(ii);
}
// chunk output to column
writeToOutputColumn(inputKeys);
}
@Override
public void setValueChunks(@NotNull final Chunk extends Values>[] valueChunks) {}
@Override
public void writeToOutputChunk(final int outIdx) {
outputValues.set(outIdx, curVal);
}
void writeNullToOutputChunk(final int outIdx) {
outputValues.set(outIdx, NULL_BYTE);
}
@Override
public void writeToOutputColumn(@NotNull final RowSequence inputKeys) {
outputSource.fillFromChunk(outputFillContext, outputValues, inputKeys);
}
@Override
public void reset() {
curVal = NULL_BYTE;
nullCount = 0;
}
@Override
public void close() {
outputValues.close();
outputFillContext.close();
}
}
/**
* Construct a base operator for operations that produce byte outputs.
*
* @param pair the {@link MatchPair} that defines the input/output for this operation
* @param affectingColumns a list of all columns (including the input column from the pair) that affects the result
* of this operator.
*/
public BaseByteUpdateByOperator(
@NotNull final MatchPair pair,
@NotNull final String[] affectingColumns
// region extra-constructor-args
// endregion extra-constructor-args
) {
this(pair, affectingColumns, null, 0, 0, false);
}
/**
* Construct a base operator for operations that produce byte outputs.
*
* @param pair the {@link MatchPair} that defines the input/output for this operation
* @param affectingColumns a list of all columns (including the input column from the pair) that affects the result
* of this operator.
* @param timestampColumnName an optional timestamp column. If this is null, it will be assumed time is measured in
* integer ticks.
* @param reverseWindowScaleUnits the reverse window for the operator. If no {@code timestampColumnName} is provided, this
* is measured in ticks, otherwise it is measured in nanoseconds.
* @param forwardWindowScaleUnits the forward window for the operator. If no {@code timestampColumnName} is provided, this
* is measured in ticks, otherwise it is measured in nanoseconds.
*/
public BaseByteUpdateByOperator(
@NotNull final MatchPair pair,
@NotNull final String[] affectingColumns,
@Nullable final String timestampColumnName,
final long reverseWindowScaleUnits,
final long forwardWindowScaleUnits,
final boolean isWindowed
// region extra-constructor-args
// endregion extra-constructor-args
) {
super(pair, affectingColumns, timestampColumnName, reverseWindowScaleUnits, forwardWindowScaleUnits, isWindowed);
// region constructor
this.nullValue = getNullValue();
// endregion constructor
}
@Override
@OverridingMethodsMustInvokeSuper
public void initializeSources(@NotNull final Table source, @Nullable final RowRedirection rowRedirection) {
this.rowRedirection = rowRedirection;
if(rowRedirection != null) {
// region create-dense
this.maybeInnerSource = makeDenseSource();
// endregion create-dense
outputSource = WritableRedirectedColumnSource.maybeRedirect(rowRedirection, maybeInnerSource, 0);
} else {
maybeInnerSource = null;
// region create-sparse
this.outputSource = makeSparseSource();
// endregion create-sparse
}
}
// region extra-methods
protected byte getNullValue() {
return QueryConstants.NULL_BYTE;
}
// region extra-methods
protected WritableColumnSource makeSparseSource() {
return new ByteSparseArraySource();
}
protected WritableColumnSource makeDenseSource() {
return new ByteArraySource();
}
// endregion extra-methods
@Override
public void initializeCumulative(@NotNull final UpdateByOperator.Context context,
final long firstUnmodifiedKey,
final long firstUnmodifiedTimestamp,
@NotNull final RowSet bucketRowSet) {
Context ctx = (Context) context;
ctx.reset();
if (firstUnmodifiedKey != NULL_ROW_KEY) {
ctx.curVal = outputSource.getByte(firstUnmodifiedKey);
}
}
@Override
public void startTrackingPrev() {
outputSource.startTrackingPrevValues();
if (rowRedirection != null) {
assert maybeInnerSource != null;
maybeInnerSource.startTrackingPrevValues();
}
}
// region Shifts
@Override
public void applyOutputShift(@NotNull final RowSet subIndexToShift, final long delta) {
if (outputSource instanceof BooleanSparseArraySource.ReinterpretedAsByte) {
((BooleanSparseArraySource.ReinterpretedAsByte)outputSource).shift(subIndexToShift, delta);
} else {
((ByteSparseArraySource)outputSource).shift(subIndexToShift, delta);
}
}
// endregion Shifts
@Override
public void prepareForParallelPopulation(final RowSet changedRows) {
if (rowRedirection != null) {
assert maybeInnerSource != null;
((WritableSourceWithPrepareForParallelPopulation) maybeInnerSource).prepareForParallelPopulation(changedRows);
} else {
((WritableSourceWithPrepareForParallelPopulation) outputSource).prepareForParallelPopulation(changedRows);
}
}
@NotNull
@Override
public Map> getOutputColumns() {
return Collections.singletonMap(pair.leftColumn, outputSource);
}
// region clear-output
@Override
public void clearOutputRows(final RowSet toClear) {
// NOP for primitive types
}
// endregion clear-output
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy