io.trino.orc.writer.MapColumnWriter Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of trino-orc Show documentation
Show all versions of trino-orc Show documentation
Trino - ORC file format support
The newest version!
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.orc.writer;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import io.trino.orc.checkpoint.BooleanStreamCheckpoint;
import io.trino.orc.checkpoint.LongStreamCheckpoint;
import io.trino.orc.metadata.ColumnEncoding;
import io.trino.orc.metadata.CompressedMetadataWriter;
import io.trino.orc.metadata.CompressionKind;
import io.trino.orc.metadata.OrcColumnId;
import io.trino.orc.metadata.RowGroupIndex;
import io.trino.orc.metadata.Stream;
import io.trino.orc.metadata.Stream.StreamKind;
import io.trino.orc.metadata.statistics.ColumnStatistics;
import io.trino.orc.stream.LongOutputStream;
import io.trino.orc.stream.PresentOutputStream;
import io.trino.orc.stream.StreamDataOutput;
import io.trino.spi.block.Block;
import io.trino.spi.block.ColumnarMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static io.airlift.slice.SizeOf.instanceSize;
import static io.trino.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT_V2;
import static io.trino.orc.metadata.CompressionKind.NONE;
import static io.trino.orc.stream.LongOutputStream.createLengthOutputStream;
import static io.trino.spi.block.ColumnarMap.toColumnarMap;
import static java.util.Objects.requireNonNull;
public class MapColumnWriter
implements ColumnWriter
{
private static final int INSTANCE_SIZE = instanceSize(MapColumnWriter.class);
private final OrcColumnId columnId;
private final boolean compressed;
private final ColumnEncoding columnEncoding;
private final LongOutputStream lengthStream;
private final PresentOutputStream presentStream;
private final ColumnWriter keyWriter;
private final ColumnWriter valueWriter;
private final List rowGroupColumnStatistics = new ArrayList<>();
private int nonNullValueCount;
private boolean closed;
public MapColumnWriter(OrcColumnId columnId, CompressionKind compression, int bufferSize, ColumnWriter keyWriter, ColumnWriter valueWriter)
{
this.columnId = requireNonNull(columnId, "columnId is null");
this.compressed = requireNonNull(compression, "compression is null") != NONE;
this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0);
this.keyWriter = requireNonNull(keyWriter, "keyWriter is null");
this.valueWriter = requireNonNull(valueWriter, "valueWriter is null");
this.lengthStream = createLengthOutputStream(compression, bufferSize);
this.presentStream = new PresentOutputStream(compression, bufferSize);
}
@Override
public List getNestedColumnWriters()
{
return ImmutableList.builder()
.add(keyWriter)
.addAll(keyWriter.getNestedColumnWriters())
.add(valueWriter)
.addAll(valueWriter.getNestedColumnWriters())
.build();
}
@Override
public Map getColumnEncodings()
{
ImmutableMap.Builder encodings = ImmutableMap.builder();
encodings.put(columnId, columnEncoding);
encodings.putAll(keyWriter.getColumnEncodings());
encodings.putAll(valueWriter.getColumnEncodings());
return encodings.buildOrThrow();
}
@Override
public void beginRowGroup()
{
lengthStream.recordCheckpoint();
presentStream.recordCheckpoint();
keyWriter.beginRowGroup();
valueWriter.beginRowGroup();
}
@Override
public void writeBlock(Block block)
{
checkState(!closed);
checkArgument(block.getPositionCount() > 0, "Block is empty");
ColumnarMap columnarMap = toColumnarMap(block);
writeColumnarMap(columnarMap);
}
private void writeColumnarMap(ColumnarMap columnarMap)
{
// write nulls and lengths
for (int position = 0; position < columnarMap.getPositionCount(); position++) {
boolean present = !columnarMap.isNull(position);
presentStream.writeBoolean(present);
if (present) {
nonNullValueCount++;
lengthStream.writeLong(columnarMap.getEntryCount(position));
}
}
// write keys and value
Block keysBlock = columnarMap.getKeysBlock();
if (keysBlock.getPositionCount() > 0) {
keyWriter.writeBlock(keysBlock);
valueWriter.writeBlock(columnarMap.getValuesBlock());
}
}
@Override
public Map finishRowGroup()
{
checkState(!closed);
ColumnStatistics statistics = new ColumnStatistics((long) nonNullValueCount, 0, null, null, null, null, null, null, null, null, null, null);
rowGroupColumnStatistics.add(statistics);
nonNullValueCount = 0;
ImmutableMap.Builder columnStatistics = ImmutableMap.builder();
columnStatistics.put(columnId, statistics);
columnStatistics.putAll(keyWriter.finishRowGroup());
columnStatistics.putAll(valueWriter.finishRowGroup());
return columnStatistics.buildOrThrow();
}
@Override
public void close()
{
closed = true;
keyWriter.close();
valueWriter.close();
lengthStream.close();
presentStream.close();
}
@Override
public Map getColumnStripeStatistics()
{
checkState(closed);
ImmutableMap.Builder columnStatistics = ImmutableMap.builder();
columnStatistics.put(columnId, ColumnStatistics.mergeColumnStatistics(rowGroupColumnStatistics));
columnStatistics.putAll(keyWriter.getColumnStripeStatistics());
columnStatistics.putAll(valueWriter.getColumnStripeStatistics());
return columnStatistics.buildOrThrow();
}
@Override
public List getIndexStreams(CompressedMetadataWriter metadataWriter)
throws IOException
{
checkState(closed);
ImmutableList.Builder rowGroupIndexes = ImmutableList.builder();
List lengthCheckpoints = lengthStream.getCheckpoints();
Optional> presentCheckpoints = presentStream.getCheckpoints();
for (int i = 0; i < rowGroupColumnStatistics.size(); i++) {
int groupId = i;
ColumnStatistics columnStatistics = rowGroupColumnStatistics.get(groupId);
LongStreamCheckpoint lengthCheckpoint = lengthCheckpoints.get(groupId);
Optional presentCheckpoint = presentCheckpoints.map(checkpoints -> checkpoints.get(groupId));
List positions = createArrayColumnPositionList(compressed, lengthCheckpoint, presentCheckpoint);
rowGroupIndexes.add(new RowGroupIndex(positions, columnStatistics));
}
Slice slice = metadataWriter.writeRowIndexes(rowGroupIndexes.build());
Stream stream = new Stream(columnId, StreamKind.ROW_INDEX, slice.length(), false);
ImmutableList.Builder indexStreams = ImmutableList.builder();
indexStreams.add(new StreamDataOutput(slice, stream));
indexStreams.addAll(keyWriter.getIndexStreams(metadataWriter));
indexStreams.addAll(keyWriter.getBloomFilters(metadataWriter));
indexStreams.addAll(valueWriter.getIndexStreams(metadataWriter));
indexStreams.addAll(valueWriter.getBloomFilters(metadataWriter));
return indexStreams.build();
}
private static List createArrayColumnPositionList(
boolean compressed,
LongStreamCheckpoint lengthCheckpoint,
Optional presentCheckpoint)
{
ImmutableList.Builder positionList = ImmutableList.builder();
presentCheckpoint.ifPresent(booleanStreamCheckpoint -> positionList.addAll(booleanStreamCheckpoint.toPositionList(compressed)));
positionList.addAll(lengthCheckpoint.toPositionList(compressed));
return positionList.build();
}
@Override
public List getBloomFilters(CompressedMetadataWriter metadataWriter)
throws IOException
{
return ImmutableList.of();
}
@Override
public List getDataStreams()
{
checkState(closed);
ImmutableList.Builder outputDataStreams = ImmutableList.builder();
presentStream.getStreamDataOutput(columnId).ifPresent(outputDataStreams::add);
outputDataStreams.add(lengthStream.getStreamDataOutput(columnId));
outputDataStreams.addAll(keyWriter.getDataStreams());
outputDataStreams.addAll(valueWriter.getDataStreams());
return outputDataStreams.build();
}
@Override
public long getBufferedBytes()
{
return lengthStream.getBufferedBytes() + presentStream.getBufferedBytes() + keyWriter.getBufferedBytes() + valueWriter.getBufferedBytes();
}
@Override
public long getRetainedBytes()
{
long retainedBytes = INSTANCE_SIZE + lengthStream.getRetainedBytes() + presentStream.getRetainedBytes() + keyWriter.getRetainedBytes() + valueWriter.getRetainedBytes();
for (ColumnStatistics statistics : rowGroupColumnStatistics) {
retainedBytes += statistics.getRetainedSizeInBytes();
}
return retainedBytes;
}
@Override
public void reset()
{
closed = false;
lengthStream.reset();
presentStream.reset();
keyWriter.reset();
valueWriter.reset();
rowGroupColumnStatistics.clear();
nonNullValueCount = 0;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy