org.apache.iceberg.spark.source.ChangelogRowReader Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of iceberg-spark-3.5_2.13 Show documentation
Show all versions of iceberg-spark-3.5_2.13 Show documentation
A table format for huge analytic datasets
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.source;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.apache.iceberg.AddedRowsScanTask;
import org.apache.iceberg.ChangelogScanTask;
import org.apache.iceberg.ChangelogUtil;
import org.apache.iceberg.ContentFile;
import org.apache.iceberg.ContentScanTask;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.DeletedDataFileScanTask;
import org.apache.iceberg.DeletedRowsScanTask;
import org.apache.iceberg.ScanTaskGroup;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.io.CloseableIterable;
import org.apache.iceberg.io.CloseableIterator;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.util.SnapshotUtil;
import org.apache.spark.rdd.InputFileBlockHolder;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.expressions.JoinedRow;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.unsafe.types.UTF8String;
class ChangelogRowReader extends BaseRowReader
implements PartitionReader {
ChangelogRowReader(SparkInputPartition partition) {
this(
partition.table(),
partition.taskGroup(),
SnapshotUtil.schemaFor(partition.table(), partition.branch()),
partition.expectedSchema(),
partition.isCaseSensitive());
}
ChangelogRowReader(
Table table,
ScanTaskGroup taskGroup,
Schema tableSchema,
Schema expectedSchema,
boolean caseSensitive) {
super(
table,
taskGroup,
tableSchema,
ChangelogUtil.dropChangelogMetadata(expectedSchema),
caseSensitive);
}
@Override
protected CloseableIterator open(ChangelogScanTask task) {
JoinedRow cdcRow = new JoinedRow();
cdcRow.withRight(changelogMetadata(task));
CloseableIterable rows = openChangelogScanTask(task);
CloseableIterable cdcRows = CloseableIterable.transform(rows, cdcRow::withLeft);
return cdcRows.iterator();
}
private static InternalRow changelogMetadata(ChangelogScanTask task) {
InternalRow metadataRow = new GenericInternalRow(3);
metadataRow.update(0, UTF8String.fromString(task.operation().name()));
metadataRow.update(1, task.changeOrdinal());
metadataRow.update(2, task.commitSnapshotId());
return metadataRow;
}
private CloseableIterable openChangelogScanTask(ChangelogScanTask task) {
if (task instanceof AddedRowsScanTask) {
return openAddedRowsScanTask((AddedRowsScanTask) task);
} else if (task instanceof DeletedRowsScanTask) {
throw new UnsupportedOperationException("Deleted rows scan task is not supported yet");
} else if (task instanceof DeletedDataFileScanTask) {
return openDeletedDataFileScanTask((DeletedDataFileScanTask) task);
} else {
throw new IllegalArgumentException(
"Unsupported changelog scan task type: " + task.getClass().getName());
}
}
CloseableIterable openAddedRowsScanTask(AddedRowsScanTask task) {
String filePath = task.file().path().toString();
SparkDeleteFilter deletes = new SparkDeleteFilter(filePath, task.deletes(), counter());
return deletes.filter(rows(task, deletes.requiredSchema()));
}
private CloseableIterable openDeletedDataFileScanTask(DeletedDataFileScanTask task) {
String filePath = task.file().path().toString();
SparkDeleteFilter deletes = new SparkDeleteFilter(filePath, task.existingDeletes(), counter());
return deletes.filter(rows(task, deletes.requiredSchema()));
}
private CloseableIterable rows(ContentScanTask task, Schema readSchema) {
Map idToConstant = constantsMap(task, readSchema);
String filePath = task.file().path().toString();
// update the current file for Spark's filename() function
InputFileBlockHolder.set(filePath, task.start(), task.length());
InputFile location = getInputFile(filePath);
Preconditions.checkNotNull(location, "Could not find InputFile");
return newIterable(
location,
task.file().format(),
task.start(),
task.length(),
task.residual(),
readSchema,
idToConstant);
}
@Override
protected Stream> referencedFiles(ChangelogScanTask task) {
if (task instanceof AddedRowsScanTask) {
return addedRowsScanTaskFiles((AddedRowsScanTask) task);
} else if (task instanceof DeletedRowsScanTask) {
throw new UnsupportedOperationException("Deleted rows scan task is not supported yet");
} else if (task instanceof DeletedDataFileScanTask) {
return deletedDataFileScanTaskFiles((DeletedDataFileScanTask) task);
} else {
throw new IllegalArgumentException(
"Unsupported changelog scan task type: " + task.getClass().getName());
}
}
private static Stream> deletedDataFileScanTaskFiles(DeletedDataFileScanTask task) {
DataFile file = task.file();
List existingDeletes = task.existingDeletes();
return Stream.concat(Stream.of(file), existingDeletes.stream());
}
private static Stream> addedRowsScanTaskFiles(AddedRowsScanTask task) {
DataFile file = task.file();
List deletes = task.deletes();
return Stream.concat(Stream.of(file), deletes.stream());
}
}