io.trino.plugin.hive.HivePageSourceProvider Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive;
import com.google.common.collect.BiMap;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import io.trino.filesystem.Location;
import io.trino.metastore.HiveType;
import io.trino.metastore.HiveTypeName;
import io.trino.metastore.type.TypeInfo;
import io.trino.plugin.hive.HivePageSource.BucketValidator;
import io.trino.plugin.hive.HiveSplit.BucketConversion;
import io.trino.plugin.hive.HiveSplit.BucketValidation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.coercions.CoercionUtils.CoercionContext;
import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ConnectorPageSource;
import io.trino.spi.connector.ConnectorPageSourceProvider;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.connector.ConnectorTableHandle;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.connector.EmptyPageSource;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.NullableValue;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.type.TypeManager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.regex.Pattern;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.Maps.uniqueIndex;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.SYNTHESIZED;
import static io.trino.plugin.hive.HiveColumnHandle.isRowIdColumnHandle;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_UNSUPPORTED_FORMAT;
import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMapping.toColumnHandles;
import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED;
import static io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision;
import static io.trino.plugin.hive.coercions.CoercionUtils.createTypeFromCoercer;
import static io.trino.plugin.hive.coercions.CoercionUtils.extractHiveStorageFormat;
import static io.trino.plugin.hive.util.HiveBucketing.HiveBucketFilter;
import static io.trino.plugin.hive.util.HiveBucketing.getHiveBucketFilter;
import static io.trino.plugin.hive.util.HiveTypeUtil.getHiveTypeForDereferences;
import static io.trino.plugin.hive.util.HiveUtil.getDeserializerClassName;
import static io.trino.plugin.hive.util.HiveUtil.getInputFormatName;
import static io.trino.plugin.hive.util.HiveUtil.getPrefilledColumnValue;
import static java.util.Objects.requireNonNull;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toList;
public class HivePageSourceProvider
implements ConnectorPageSourceProvider
{
// The original file path looks like this: /root/dir/nnnnnnn_m(_copy_ccc)?
private static final Pattern ORIGINAL_FILE_PATH_MATCHER = Pattern.compile("(?s)(?.*)/(?(?\\d+)_(?.*)?)$");
private final TypeManager typeManager;
private final int domainCompactionThreshold;
private final Set pageSourceFactories;
@Inject
public HivePageSourceProvider(TypeManager typeManager, HiveConfig hiveConfig, Set pageSourceFactories)
{
this.typeManager = requireNonNull(typeManager, "typeManager is null");
this.domainCompactionThreshold = hiveConfig.getDomainCompactionThreshold();
this.pageSourceFactories = ImmutableSet.copyOf(requireNonNull(pageSourceFactories, "pageSourceFactories is null"));
}
@Override
public ConnectorPageSource createPageSource(
ConnectorTransactionHandle transaction,
ConnectorSession session,
ConnectorSplit split,
ConnectorTableHandle tableHandle,
List columns,
DynamicFilter dynamicFilter)
{
HiveTableHandle hiveTable = (HiveTableHandle) tableHandle;
HiveSplit hiveSplit = (HiveSplit) split;
if (shouldSkipBucket(hiveTable, hiveSplit, dynamicFilter)) {
return new EmptyPageSource();
}
List hiveColumns = columns.stream()
.map(HiveColumnHandle.class::cast)
.collect(toList());
boolean originalFile = ORIGINAL_FILE_PATH_MATCHER.matcher(hiveSplit.getPath()).matches();
List columnMappings = ColumnMapping.buildColumnMappings(
hiveSplit.getPartitionName(),
hiveSplit.getPartitionKeys(),
hiveColumns,
hiveSplit.getBucketConversion().map(BucketConversion::bucketColumnHandles).orElse(ImmutableList.of()),
hiveSplit.getHiveColumnCoercions(),
hiveSplit.getPath(),
hiveSplit.getTableBucketNumber(),
hiveSplit.getEstimatedFileSize(),
hiveSplit.getFileModifiedTime());
// Perform dynamic partition pruning in case coordinator didn't prune split.
// This can happen when dynamic filters are collected after partition splits were listed.
if (shouldSkipSplit(columnMappings, dynamicFilter)) {
return new EmptyPageSource();
}
Optional pageSource = createHivePageSource(
pageSourceFactories,
session,
Location.of(hiveSplit.getPath()),
hiveSplit.getTableBucketNumber(),
hiveSplit.getStart(),
hiveSplit.getLength(),
hiveSplit.getEstimatedFileSize(),
hiveSplit.getFileModifiedTime(),
hiveSplit.getSchema(),
hiveTable.getCompactEffectivePredicate().intersect(
dynamicFilter.getCurrentPredicate().transformKeys(HiveColumnHandle.class::cast))
.simplify(domainCompactionThreshold),
typeManager,
hiveSplit.getBucketConversion(),
hiveSplit.getBucketValidation(),
hiveSplit.getAcidInfo(),
originalFile,
hiveTable.getTransaction(),
columnMappings);
if (pageSource.isPresent()) {
return pageSource.get();
}
throw new TrinoException(HIVE_UNSUPPORTED_FORMAT, "Unsupported input format: serde=%s, format=%s, partition=%s, path=%s".formatted(
getDeserializerClassName(hiveSplit.getSchema()),
getInputFormatName(hiveSplit.getSchema()).orElse(null),
hiveSplit.getPartitionName(),
hiveSplit.getPath()));
}
public static Optional createHivePageSource(
Set pageSourceFactories,
ConnectorSession session,
Location path,
OptionalInt tableBucketNumber,
long start,
long length,
long estimatedFileSize,
long fileModifiedTime,
Map schema,
TupleDomain effectivePredicate,
TypeManager typeManager,
Optional bucketConversion,
Optional bucketValidation,
Optional acidInfo,
boolean originalFile,
AcidTransaction transaction,
List columnMappings)
{
if (effectivePredicate.isNone()) {
return Optional.of(new EmptyPageSource());
}
List regularAndInterimColumnMappings = ColumnMapping.extractRegularAndInterimColumnMappings(columnMappings);
Optional bucketAdaptation = createBucketAdaptation(bucketConversion, tableBucketNumber, regularAndInterimColumnMappings);
Optional bucketValidator = createBucketValidator(path, bucketValidation, tableBucketNumber, regularAndInterimColumnMappings);
CoercionContext coercionContext = new CoercionContext(getTimestampPrecision(session), extractHiveStorageFormat(getDeserializerClassName(schema)));
for (HivePageSourceFactory pageSourceFactory : pageSourceFactories) {
List desiredColumns = toColumnHandles(regularAndInterimColumnMappings, typeManager, coercionContext);
Optional readerWithProjections = pageSourceFactory.createPageSource(
session,
path,
start,
length,
estimatedFileSize,
fileModifiedTime,
schema,
desiredColumns,
effectivePredicate,
acidInfo,
tableBucketNumber,
originalFile,
transaction);
if (readerWithProjections.isPresent()) {
ConnectorPageSource pageSource = readerWithProjections.get().get();
Optional readerProjections = readerWithProjections.get().getReaderColumns();
Optional adapter = Optional.empty();
if (readerProjections.isPresent()) {
adapter = Optional.of(hiveProjectionsAdapter(desiredColumns, readerProjections.get()));
}
return Optional.of(new HivePageSource(
columnMappings,
bucketAdaptation,
bucketValidator,
adapter,
typeManager,
coercionContext,
pageSource));
}
}
return Optional.empty();
}
private static boolean shouldSkipBucket(HiveTableHandle hiveTable, HiveSplit hiveSplit, DynamicFilter dynamicFilter)
{
if (hiveSplit.getTableBucketNumber().isEmpty()) {
return false;
}
Optional hiveBucketFilter = getHiveBucketFilter(hiveTable, dynamicFilter.getCurrentPredicate());
return hiveBucketFilter.map(filter -> !filter.getBucketsToKeep().contains(hiveSplit.getTableBucketNumber().getAsInt())).orElse(false);
}
private static boolean shouldSkipSplit(List columnMappings, DynamicFilter dynamicFilter)
{
TupleDomain predicate = dynamicFilter.getCurrentPredicate();
if (predicate.isNone()) {
return true;
}
Map domains = predicate.getDomains().get();
for (ColumnMapping columnMapping : columnMappings) {
if (columnMapping.getKind() != PREFILLED) {
continue;
}
Object value = columnMapping.getPrefilledValue().getValue();
Domain allowedDomain = domains.get(columnMapping.getHiveColumnHandle());
if (allowedDomain != null && !allowedDomain.includesNullableValue(value)) {
return true;
}
}
return false;
}
private static ReaderProjectionsAdapter hiveProjectionsAdapter(List expectedColumns, ReaderColumns readColumns)
{
return new ReaderProjectionsAdapter(
expectedColumns.stream().map(ColumnHandle.class::cast).collect(toImmutableList()),
readColumns,
column -> ((HiveColumnHandle) column).getType(),
HivePageSourceProvider::getProjection);
}
public static List getProjection(ColumnHandle expected, ColumnHandle read)
{
HiveColumnHandle expectedColumn = (HiveColumnHandle) expected;
HiveColumnHandle readColumn = (HiveColumnHandle) read;
checkArgument(expectedColumn.getBaseColumn().equals(readColumn.getBaseColumn()), "reader column is not valid for expected column");
List expectedDereferences = expectedColumn.getHiveColumnProjectionInfo()
.map(HiveColumnProjectionInfo::getDereferenceIndices)
.orElse(ImmutableList.of());
List readerDereferences = readColumn.getHiveColumnProjectionInfo()
.map(HiveColumnProjectionInfo::getDereferenceIndices)
.orElse(ImmutableList.of());
checkArgument(readerDereferences.size() <= expectedDereferences.size(), "Field returned by the reader should include expected field");
checkArgument(expectedDereferences.subList(0, readerDereferences.size()).equals(readerDereferences), "Field returned by the reader should be a prefix of expected field");
return expectedDereferences.subList(readerDereferences.size(), expectedDereferences.size());
}
public static class ColumnMapping
{
private final ColumnMappingKind kind;
private final HiveColumnHandle hiveColumnHandle;
private final Optional prefilledValue;
/**
* ordinal of this column in the underlying page source or record cursor
*/
private final OptionalInt index;
private final Optional baseTypeCoercionFrom;
public static ColumnMapping regular(HiveColumnHandle hiveColumnHandle, int index, Optional baseTypeCoercionFrom)
{
checkArgument(hiveColumnHandle.getColumnType() == REGULAR);
return new ColumnMapping(ColumnMappingKind.REGULAR, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), baseTypeCoercionFrom);
}
public static ColumnMapping synthesized(HiveColumnHandle hiveColumnHandle, int index, Optional baseTypeCoercionFrom)
{
checkArgument(hiveColumnHandle.getColumnType() == SYNTHESIZED);
return new ColumnMapping(ColumnMappingKind.SYNTHESIZED, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), baseTypeCoercionFrom);
}
public static ColumnMapping prefilled(HiveColumnHandle hiveColumnHandle, NullableValue prefilledValue, Optional baseTypeCoercionFrom)
{
checkArgument(hiveColumnHandle.getColumnType() == PARTITION_KEY || hiveColumnHandle.getColumnType() == SYNTHESIZED);
checkArgument(hiveColumnHandle.isBaseColumn(), "prefilled values not supported for projected columns");
return new ColumnMapping(PREFILLED, hiveColumnHandle, Optional.of(prefilledValue), OptionalInt.empty(), baseTypeCoercionFrom);
}
public static ColumnMapping interim(HiveColumnHandle hiveColumnHandle, int index, Optional baseTypeCoercionFrom)
{
checkArgument(hiveColumnHandle.getColumnType() == REGULAR);
return new ColumnMapping(ColumnMappingKind.INTERIM, hiveColumnHandle, Optional.empty(), OptionalInt.of(index), baseTypeCoercionFrom);
}
public static ColumnMapping empty(HiveColumnHandle hiveColumnHandle)
{
checkArgument(hiveColumnHandle.getColumnType() == REGULAR);
return new ColumnMapping(ColumnMappingKind.EMPTY, hiveColumnHandle, Optional.empty(), OptionalInt.empty(), Optional.empty());
}
private ColumnMapping(
ColumnMappingKind kind,
HiveColumnHandle hiveColumnHandle,
Optional prefilledValue,
OptionalInt index,
Optional baseTypeCoercionFrom)
{
this.kind = requireNonNull(kind, "kind is null");
this.hiveColumnHandle = requireNonNull(hiveColumnHandle, "hiveColumnHandle is null");
this.prefilledValue = requireNonNull(prefilledValue, "prefilledValue is null");
this.index = requireNonNull(index, "index is null");
this.baseTypeCoercionFrom = requireNonNull(baseTypeCoercionFrom, "baseTypeCoercionFrom is null");
}
public ColumnMappingKind getKind()
{
return kind;
}
public NullableValue getPrefilledValue()
{
checkState(kind == PREFILLED);
return prefilledValue.get();
}
public HiveColumnHandle getHiveColumnHandle()
{
return hiveColumnHandle;
}
public int getIndex()
{
checkState(kind == ColumnMappingKind.REGULAR || kind == ColumnMappingKind.INTERIM || isRowIdColumnHandle(hiveColumnHandle));
return index.getAsInt();
}
public Optional getBaseTypeCoercionFrom()
{
return baseTypeCoercionFrom;
}
public static List buildColumnMappings(
String partitionName,
List partitionKeys,
List columns,
List requiredInterimColumns,
Map hiveColumnCoercions,
String path,
OptionalInt bucketNumber,
long estimatedFileSize,
long fileModifiedTime)
{
Map partitionKeysByName = uniqueIndex(partitionKeys, HivePartitionKey::name);
// Maintain state about hive columns added to the mapping as we iterate (for validation)
Set baseColumnHiveIndices = new HashSet<>();
Map>> projectionsForColumn = new HashMap<>();
ImmutableList.Builder columnMappings = ImmutableList.builder();
int regularIndex = 0;
for (HiveColumnHandle column : columns) {
Optional baseTypeCoercionFrom = Optional.ofNullable(hiveColumnCoercions.get(column.getBaseHiveColumnIndex())).map(HiveTypeName::toHiveType);
if (column.getColumnType() == REGULAR) {
if (column.isBaseColumn()) {
baseColumnHiveIndices.add(column.getBaseHiveColumnIndex());
}
checkArgument(
projectionsForColumn.computeIfAbsent(column.getBaseHiveColumnIndex(), columnIndex -> new HashSet<>()).add(column.getHiveColumnProjectionInfo()),
"duplicate column in columns list");
// Add regular mapping if projection is valid for partition schema, otherwise add an empty mapping
if (baseTypeCoercionFrom.isEmpty()
|| projectionValidForType(baseTypeCoercionFrom.get(), column.getHiveColumnProjectionInfo())) {
columnMappings.add(regular(column, regularIndex, baseTypeCoercionFrom));
regularIndex++;
}
else {
columnMappings.add(empty(column));
}
}
else if (isRowIdColumnHandle(column)) {
baseColumnHiveIndices.add(column.getBaseHiveColumnIndex());
checkArgument(
projectionsForColumn.computeIfAbsent(column.getBaseHiveColumnIndex(), index -> new HashSet<>()).add(column.getHiveColumnProjectionInfo()),
"duplicate column in columns list");
if (baseTypeCoercionFrom.isEmpty()
|| projectionValidForType(baseTypeCoercionFrom.get(), column.getHiveColumnProjectionInfo())) {
columnMappings.add(synthesized(column, regularIndex, baseTypeCoercionFrom));
}
else {
throw new RuntimeException("baseTypeCoercisionFrom was empty for the rowId column");
}
regularIndex++;
}
else {
columnMappings.add(prefilled(
column,
getPrefilledColumnValue(column, partitionKeysByName.get(column.getName()), path, bucketNumber, estimatedFileSize, fileModifiedTime, partitionName),
baseTypeCoercionFrom));
}
}
for (HiveColumnHandle column : requiredInterimColumns) {
checkArgument(column.getColumnType() == REGULAR);
checkArgument(column.isBaseColumn(), "bucketed columns should be base columns");
if (baseColumnHiveIndices.contains(column.getBaseHiveColumnIndex())) {
continue; // This column exists in columns. Do not add it again.
}
if (projectionsForColumn.containsKey(column.getBaseHiveColumnIndex())) {
Optional baseTypeCoercionFrom = Optional.ofNullable(hiveColumnCoercions.get(column.getBaseHiveColumnIndex())).map(HiveTypeName::toHiveType);
columnMappings.add(interim(column, regularIndex, baseTypeCoercionFrom));
}
else {
// If coercion does not affect bucket number calculation, coercion doesn't need to be applied here.
// Otherwise, read of this partition should not be allowed.
// (Alternatively, the partition could be read as an unbucketed partition. This is not implemented.)
columnMappings.add(interim(column, regularIndex, Optional.empty()));
}
regularIndex++;
}
return columnMappings.build();
}
private static boolean projectionValidForType(HiveType baseType, Optional projection)
{
List dereferences = projection.map(HiveColumnProjectionInfo::getDereferenceIndices).orElse(ImmutableList.of());
Optional targetType = getHiveTypeForDereferences(baseType, dereferences);
return targetType.isPresent();
}
public static List extractRegularAndInterimColumnMappings(List columnMappings)
{
return columnMappings.stream()
.filter(columnMapping -> columnMapping.getKind() == ColumnMappingKind.REGULAR || columnMapping.getKind() == ColumnMappingKind.INTERIM)
.collect(toImmutableList());
}
public static List toColumnHandles(List regularColumnMappings, TypeManager typeManager, CoercionContext coercionContext)
{
return regularColumnMappings.stream()
.map(columnMapping -> {
HiveColumnHandle columnHandle = columnMapping.getHiveColumnHandle();
if (columnMapping.getBaseTypeCoercionFrom().isEmpty()) {
return columnHandle;
}
HiveType fromHiveTypeBase = columnMapping.getBaseTypeCoercionFrom().get();
Optional newColumnProjectionInfo = columnHandle.getHiveColumnProjectionInfo().map(projectedColumn -> {
HiveType fromHiveType = getHiveTypeForDereferences(fromHiveTypeBase, projectedColumn.getDereferenceIndices()).get();
return new HiveColumnProjectionInfo(
projectedColumn.getDereferenceIndices(),
projectedColumn.getDereferenceNames(),
fromHiveType,
createTypeFromCoercer(typeManager, fromHiveType, columnHandle.getHiveType(), coercionContext));
});
return new HiveColumnHandle(
columnHandle.getBaseColumnName(),
columnHandle.getBaseHiveColumnIndex(),
fromHiveTypeBase,
createTypeFromCoercer(typeManager, fromHiveTypeBase, columnHandle.getBaseHiveType(), coercionContext),
newColumnProjectionInfo,
columnHandle.getColumnType(),
columnHandle.getComment());
})
.collect(toList());
}
}
public enum ColumnMappingKind
{
REGULAR,
PREFILLED,
INTERIM,
SYNTHESIZED,
EMPTY
}
private static Optional createBucketAdaptation(Optional bucketConversion, OptionalInt bucketNumber, List columnMappings)
{
return bucketConversion.map(conversion -> {
List baseColumnMapping = columnMappings.stream()
.filter(mapping -> mapping.getHiveColumnHandle().isBaseColumn())
.collect(toList());
Map baseHiveColumnToBlockIndex = uniqueIndex(baseColumnMapping, mapping -> mapping.getHiveColumnHandle().getBaseHiveColumnIndex());
int[] bucketColumnIndices = conversion.bucketColumnHandles().stream()
.mapToInt(columnHandle -> baseHiveColumnToBlockIndex.get(columnHandle.getBaseHiveColumnIndex()).getIndex())
.toArray();
List bucketColumnHiveTypes = conversion.bucketColumnHandles().stream()
.map(columnHandle -> baseHiveColumnToBlockIndex.get(columnHandle.getBaseHiveColumnIndex()).getHiveColumnHandle().getHiveType())
.collect(toImmutableList());
return new BucketAdaptation(
bucketColumnIndices,
bucketColumnHiveTypes,
conversion.bucketingVersion(),
conversion.tableBucketCount(),
conversion.partitionBucketCount(),
bucketNumber.getAsInt());
});
}
public static class BucketAdaptation
{
private final int[] bucketColumnIndices;
private final List bucketColumnHiveTypes;
private final BucketingVersion bucketingVersion;
private final int tableBucketCount;
private final int partitionBucketCount;
private final int bucketToKeep;
public BucketAdaptation(
int[] bucketColumnIndices,
List bucketColumnHiveTypes,
BucketingVersion bucketingVersion,
int tableBucketCount,
int partitionBucketCount,
int bucketToKeep)
{
this.bucketColumnIndices = bucketColumnIndices;
this.bucketColumnHiveTypes = bucketColumnHiveTypes;
this.bucketingVersion = bucketingVersion;
this.tableBucketCount = tableBucketCount;
this.partitionBucketCount = partitionBucketCount;
this.bucketToKeep = bucketToKeep;
}
public int[] getBucketColumnIndices()
{
return bucketColumnIndices;
}
public List getBucketColumnHiveTypes()
{
return bucketColumnHiveTypes;
}
public BucketingVersion getBucketingVersion()
{
return bucketingVersion;
}
public int getTableBucketCount()
{
return tableBucketCount;
}
public int getPartitionBucketCount()
{
return partitionBucketCount;
}
public int getBucketToKeep()
{
return bucketToKeep;
}
}
static Optional createBucketValidator(Location path, Optional bucketValidation, OptionalInt bucketNumber, List columnMappings)
{
return bucketValidation.flatMap(validation -> {
Map baseHiveColumnToBlockIndex = columnMappings.stream()
.filter(mapping -> mapping.getHiveColumnHandle().isBaseColumn())
.collect(toImmutableMap(mapping -> mapping.getHiveColumnHandle().getBaseHiveColumnIndex(), identity()));
int[] bucketColumnIndices = new int[validation.bucketColumns().size()];
List bucketColumnTypes = new ArrayList<>();
for (int i = 0; i < validation.bucketColumns().size(); i++) {
HiveColumnHandle column = validation.bucketColumns().get(i);
ColumnMapping mapping = baseHiveColumnToBlockIndex.get(column.getBaseHiveColumnIndex());
if (mapping == null) {
// The bucket column is not read by the query, and thus invalid bucketing cannot
// affect the results. Filtering on the hidden $bucket column still correctly
// partitions the table by bucket, even if the bucket has the wrong data.
return Optional.empty();
}
bucketColumnIndices[i] = mapping.getIndex();
bucketColumnTypes.add(mapping.getHiveColumnHandle().getHiveType().getTypeInfo());
}
return Optional.of(new BucketValidator(
path,
bucketColumnIndices,
bucketColumnTypes,
validation.bucketingVersion(),
validation.bucketCount(),
bucketNumber.orElseThrow()));
});
}
/**
* Creates a mapping between the input {@code columns} and base columns based on baseHiveColumnIndex if required.
*/
public static Optional projectBaseColumns(List columns)
{
return projectBaseColumns(columns, false);
}
/**
* Creates a mapping between the input {@code columns} and base columns based on baseHiveColumnIndex or baseColumnName if required.
*/
public static Optional projectBaseColumns(List columns, boolean useColumnNames)
{
requireNonNull(columns, "columns is null");
// No projection is required if all columns are base columns
if (columns.stream().allMatch(HiveColumnHandle::isBaseColumn)) {
return Optional.empty();
}
ImmutableList.Builder projectedColumns = ImmutableList.builder();
ImmutableList.Builder outputColumnMapping = ImmutableList.builder();
Map
© 2015 - 2025 Weber Informatics LLC | Privacy Policy