io.trino.plugin.hive.RcFileFileWriter Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableList;
import com.google.common.io.CountingOutputStream;
import io.airlift.slice.OutputStreamSliceOutput;
import io.trino.rcfile.AircompressorCodecFactory;
import io.trino.rcfile.HadoopCodecFactory;
import io.trino.rcfile.RcFileDataSource;
import io.trino.rcfile.RcFileEncoding;
import io.trino.rcfile.RcFileWriter;
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.RunLengthEncodedBlock;
import io.trino.spi.type.Type;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.function.Supplier;
import static com.google.common.base.MoreObjects.toStringHelper;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_CLOSE_ERROR;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED;
import static java.util.Objects.requireNonNull;
public class RcFileFileWriter
implements FileWriter
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(RcFileFileWriter.class).instanceSize();
private static final ThreadMXBean THREAD_MX_BEAN = ManagementFactory.getThreadMXBean();
private final CountingOutputStream outputStream;
private final RcFileWriter rcFileWriter;
private final Callable rollbackAction;
private final int[] fileInputColumnIndexes;
private final List nullBlocks;
private final Optional> validationInputFactory;
private long validationCpuNanos;
public RcFileFileWriter(
OutputStream outputStream,
Callable rollbackAction,
RcFileEncoding rcFileEncoding,
List fileColumnTypes,
Optional codecName,
int[] fileInputColumnIndexes,
Map metadata,
Optional> validationInputFactory)
throws IOException
{
this.outputStream = new CountingOutputStream(outputStream);
rcFileWriter = new RcFileWriter(
new OutputStreamSliceOutput(this.outputStream),
fileColumnTypes,
rcFileEncoding,
codecName,
new AircompressorCodecFactory(new HadoopCodecFactory(getClass().getClassLoader())),
metadata,
validationInputFactory.isPresent());
this.rollbackAction = requireNonNull(rollbackAction, "rollbackAction is null");
this.fileInputColumnIndexes = requireNonNull(fileInputColumnIndexes, "fileInputColumnIndexes is null");
ImmutableList.Builder nullBlocks = ImmutableList.builder();
for (Type fileColumnType : fileColumnTypes) {
BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0);
blockBuilder.appendNull();
nullBlocks.add(blockBuilder.build());
}
this.nullBlocks = nullBlocks.build();
this.validationInputFactory = validationInputFactory;
}
@Override
public long getWrittenBytes()
{
return outputStream.getCount();
}
@Override
public long getMemoryUsage()
{
return INSTANCE_SIZE + rcFileWriter.getRetainedSizeInBytes();
}
@Override
public void appendRows(Page dataPage)
{
Block[] blocks = new Block[fileInputColumnIndexes.length];
for (int i = 0; i < fileInputColumnIndexes.length; i++) {
int inputColumnIndex = fileInputColumnIndexes[i];
if (inputColumnIndex < 0) {
blocks[i] = new RunLengthEncodedBlock(nullBlocks.get(i), dataPage.getPositionCount());
}
else {
blocks[i] = dataPage.getBlock(inputColumnIndex);
}
}
Page page = new Page(dataPage.getPositionCount(), blocks);
try {
rcFileWriter.write(page);
}
catch (IOException | UncheckedIOException e) {
throw new TrinoException(HIVE_WRITER_DATA_ERROR, e);
}
}
@Override
public void commit()
{
try {
rcFileWriter.close();
}
catch (IOException | UncheckedIOException e) {
try {
rollbackAction.call();
}
catch (Exception ignored) {
// ignore
}
throw new TrinoException(HIVE_WRITER_CLOSE_ERROR, "Error committing write to Hive", e);
}
if (validationInputFactory.isPresent()) {
try {
try (RcFileDataSource input = validationInputFactory.get().get()) {
long startThreadCpuTime = THREAD_MX_BEAN.getCurrentThreadCpuTime();
rcFileWriter.validate(input);
validationCpuNanos += THREAD_MX_BEAN.getCurrentThreadCpuTime() - startThreadCpuTime;
}
}
catch (IOException | UncheckedIOException e) {
throw new TrinoException(HIVE_WRITE_VALIDATION_FAILED, e);
}
}
}
@Override
public void rollback()
{
try {
try {
rcFileWriter.close();
}
finally {
rollbackAction.call();
}
}
catch (Exception e) {
throw new TrinoException(HIVE_WRITER_CLOSE_ERROR, "Error rolling back write to Hive", e);
}
}
@Override
public long getValidationCpuNanos()
{
return validationCpuNanos;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("writer", rcFileWriter)
.toString();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy