com.facebook.presto.iceberg.IcebergParquetFileWriter Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of presto-iceberg Show documentation
Show all versions of presto-iceberg Show documentation
Presto - Iceberg Connector
The newest version!
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.iceberg;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.hive.HdfsContext;
import com.facebook.presto.hive.HdfsEnvironment;
import com.facebook.presto.hive.parquet.ParquetFileWriter;
import com.facebook.presto.parquet.writer.ParquetWriterOptions;
import org.apache.hadoop.fs.Path;
import org.apache.iceberg.Metrics;
import org.apache.iceberg.MetricsConfig;
import org.apache.iceberg.parquet.ParquetUtil;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import static java.util.Objects.requireNonNull;
public class IcebergParquetFileWriter
extends ParquetFileWriter
implements IcebergFileWriter
{
private final Path outputPath;
private final HdfsEnvironment hdfsEnvironment;
private final HdfsContext hdfsContext;
private final MetricsConfig metricsConfig;
public IcebergParquetFileWriter(
OutputStream outputStream,
Callable rollbackAction,
List fileColumnNames,
List fileColumnTypes,
MessageType messageType,
Map, Type> primitiveTypes,
ParquetWriterOptions parquetWriterOptions,
int[] fileInputColumnIndexes,
CompressionCodecName compressionCodecName,
Path outputPath,
HdfsEnvironment hdfsEnvironment,
HdfsContext hdfsContext,
MetricsConfig metricsConfig)
{
super(outputStream,
rollbackAction,
fileColumnNames,
fileColumnTypes,
messageType,
primitiveTypes,
parquetWriterOptions,
fileInputColumnIndexes,
compressionCodecName);
this.outputPath = requireNonNull(outputPath, "outputPath is null");
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.hdfsContext = requireNonNull(hdfsContext, "hdfsContext is null");
this.metricsConfig = requireNonNull(metricsConfig, "metricsConfig is null");
}
@Override
public Metrics getMetrics()
{
return hdfsEnvironment.doAs(hdfsContext.getIdentity().getUser(), () -> ParquetUtil.fileMetrics(new HdfsInputFile(outputPath, hdfsEnvironment, hdfsContext), metricsConfig));
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy