com.facebook.presto.orc.reader.StructBatchStreamReader Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.reader;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.common.block.RowBlock;
import com.facebook.presto.common.block.RunLengthEncodedBlock;
import com.facebook.presto.common.type.RowType;
import com.facebook.presto.common.type.RowType.Field;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.orc.OrcAggregatedMemoryContext;
import com.facebook.presto.orc.OrcCorruptionException;
import com.facebook.presto.orc.OrcRecordReaderOptions;
import com.facebook.presto.orc.StreamDescriptor;
import com.facebook.presto.orc.Stripe;
import com.facebook.presto.orc.stream.BooleanInputStream;
import com.facebook.presto.orc.stream.InputStreamSource;
import com.facebook.presto.orc.stream.InputStreamSources;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.io.Closer;
import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.PRESENT;
import static com.facebook.presto.orc.reader.BatchStreamReaders.createStreamReader;
import static com.facebook.presto.orc.reader.ReaderUtils.verifyStreamType;
import static com.facebook.presto.orc.stream.MissingInputStreamSource.missingStreamSource;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Verify.verify;
import static java.util.Objects.requireNonNull;
public class StructBatchStreamReader
implements BatchStreamReader
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(StructBatchStreamReader.class).instanceSize();
private final StreamDescriptor streamDescriptor;
private final Map structFields;
private final RowType type;
private final List fieldNames;
private int readOffset;
private int nextBatchSize;
private InputStreamSource presentStreamSource = missingStreamSource(BooleanInputStream.class);
@Nullable
private BooleanInputStream presentStream;
private boolean rowGroupOpen;
StructBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
requireNonNull(type, "type is null");
verifyStreamType(streamDescriptor, type, RowType.class::isInstance);
this.type = (RowType) type;
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
Map nestedStreams = Maps.uniqueIndex(
streamDescriptor.getNestedStreams(), stream -> stream.getFieldName().toLowerCase(Locale.ENGLISH));
ImmutableList.Builder fieldNames = ImmutableList.builder();
ImmutableMap.Builder structFields = ImmutableMap.builder();
for (Field field : this.type.getFields()) {
String fieldName = field.getName()
.orElseThrow(() -> new IllegalArgumentException("ROW type does not have field names declared: " + type))
.toLowerCase(Locale.ENGLISH);
fieldNames.add(fieldName);
StreamDescriptor fieldStream = nestedStreams.get(fieldName);
if (fieldStream != null) {
structFields.put(fieldName, createStreamReader(field.getType(), fieldStream, hiveStorageTimeZone, options, systemMemoryContext));
}
}
this.fieldNames = fieldNames.build();
this.structFields = structFields.build();
}
@Override
public void prepareNextRead(int batchSize)
{
readOffset += nextBatchSize;
nextBatchSize = batchSize;
}
@Override
public Block readBlock()
throws IOException
{
if (!rowGroupOpen) {
openRowGroup();
}
if (readOffset > 0) {
if (presentStream != null) {
// skip ahead the present bit reader, but count the set bits
// and use this as the skip size for the field readers
readOffset = presentStream.countBitsSet(readOffset);
}
for (BatchStreamReader structField : structFields.values()) {
structField.prepareNextRead(readOffset);
}
}
boolean[] nullVector = null;
Block[] blocks;
if (presentStream == null) {
blocks = getBlocksForType(nextBatchSize);
}
else {
nullVector = new boolean[nextBatchSize];
int nullValues = presentStream.getUnsetBits(nextBatchSize, nullVector);
if (nullValues != nextBatchSize) {
blocks = getBlocksForType(nextBatchSize - nullValues);
}
else {
List typeParameters = type.getTypeParameters();
blocks = new Block[typeParameters.size()];
for (int i = 0; i < typeParameters.size(); i++) {
blocks[i] = typeParameters.get(i).createBlockBuilder(null, 0).build();
}
}
}
verify(Arrays.stream(blocks)
.mapToInt(Block::getPositionCount)
.distinct()
.count() == 1);
// Struct is represented as a row block
Block rowBlock = RowBlock.fromFieldBlocks(nextBatchSize, Optional.ofNullable(nullVector), blocks);
readOffset = 0;
nextBatchSize = 0;
return rowBlock;
}
private void openRowGroup()
throws IOException
{
presentStream = presentStreamSource.openStream();
rowGroupOpen = true;
}
@Override
public void startStripe(Stripe stripe)
throws IOException
{
presentStreamSource = missingStreamSource(BooleanInputStream.class);
readOffset = 0;
nextBatchSize = 0;
presentStream = null;
rowGroupOpen = false;
for (BatchStreamReader structField : structFields.values()) {
structField.startStripe(stripe);
}
}
@Override
public void startRowGroup(InputStreamSources dataStreamSources)
throws IOException
{
presentStreamSource = dataStreamSources.getInputStreamSource(streamDescriptor, PRESENT, BooleanInputStream.class);
readOffset = 0;
nextBatchSize = 0;
presentStream = null;
rowGroupOpen = false;
for (BatchStreamReader structField : structFields.values()) {
structField.startRowGroup(dataStreamSources);
}
}
@Override
public String toString()
{
return toStringHelper(this)
.addValue(streamDescriptor)
.toString();
}
private Block[] getBlocksForType(int positionCount)
throws IOException
{
Block[] blocks = new Block[fieldNames.size()];
for (int i = 0; i < fieldNames.size(); i++) {
String fieldName = fieldNames.get(i);
BatchStreamReader streamReader = structFields.get(fieldName);
if (streamReader != null) {
streamReader.prepareNextRead(positionCount);
blocks[i] = streamReader.readBlock();
}
else {
blocks[i] = RunLengthEncodedBlock.create(type.getFields().get(i).getType(), null, positionCount);
}
}
return blocks;
}
@Override
public void close()
{
try (Closer closer = Closer.create()) {
for (BatchStreamReader batchStreamReader : structFields.values()) {
closer.register(batchStreamReader::close);
}
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public long getRetainedSizeInBytes()
{
long retainedSizeInBytes = INSTANCE_SIZE;
for (BatchStreamReader structField : structFields.values()) {
retainedSizeInBytes += structField.getRetainedSizeInBytes();
}
return retainedSizeInBytes;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy