com.sap.hana.datalake.files.directaccess.s3.S3FileSystem Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of sap-hdlfs Show documentation
Show all versions of sap-hdlfs Show documentation
An implementation of org.apache.hadoop.fs.FileSystem targeting SAP HANA Data Lake Files.
// © 2024 SAP SE or an SAP affiliate company. All rights reserved.
package com.sap.hana.datalake.files.directaccess.s3;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import com.sap.hana.datalake.files.shaded.org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.util.Progressable;
import org.apache.http.client.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import com.sap.hana.datalake.files.HdlfsBaseFileSystem;
import com.sap.hana.datalake.files.HdlfsConstants;
import com.sap.hana.datalake.files.HdlfsMultipartUpload;
import com.sap.hana.datalake.files.HdlfsOutputStream;
import com.sap.hana.datalake.files.HdlfsRetryPolicies;
import com.sap.hana.datalake.files.classification.InterfaceAudience;
@InterfaceAudience.Private
public class S3FileSystem extends HdlfsBaseFileSystem {
private static final Logger LOG = LoggerFactory.getLogger(S3FileSystem.class);
private int openChunkSize;
private int openSignedUrlExpirationSafetyMargin;
private int createChunkSize;
private HttpClient httpClient;
public S3FileSystem(final WebHdfsFileSystem webHdfsFileSystem, final HttpClient httpClient) {
super(webHdfsFileSystem);
this.httpClient = httpClient;
}
@Override
public void initialize(final URI fsUri, final Configuration conf) throws IOException {
super.initialize(fsUri, conf);
final Configuration config = this.getConf();
this.openChunkSize = config.getInt(HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_OPEN_CHUNK_SIZE_BYTES_KEY,
HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_OPEN_CHUNK_SIZE_BYTES_DEFAULT);
this.openSignedUrlExpirationSafetyMargin = config.getInt(HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_OPEN_SIGNED_URL_EXPIRATION_SAFETY_MARGIN_SECONDS_KEY,
HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_OPEN_SIGNED_URL_EXPIRATION_SAFETY_MARGIN_SECONDS_DEFAULT);
this.createChunkSize = config.getInt(HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_S3_CREATE_CHUNK_SIZE_BYTES_KEY,
HdlfsConstants.FS_HDLFS_DIRECT_ACCESS_S3_CREATE_CHUNK_SIZE_BYTES_DEFAULT);
}
@Override
public FSDataInputStream open(final Path path, final int bufferSize) throws IOException {
LOG.debug("Calling open implementation from {}", S3FileSystem.class);
final Path delegateFsPath = this.rewritePathToDelegateFs(path);
this.assertObjectExistsIfNeeded(delegateFsPath);
return new FSDataInputStream(new S3DirectAccessInputStream(delegateFsPath, this.openChunkSize,
this.openSignedUrlExpirationSafetyMargin, this.getWebHdfsFileSystem(), this.httpClient));
}
@Override
public FSDataOutputStream create(final Path path, final FsPermission fsPermission, final boolean overwrite,
final int bufferSize, final short replication, final long blockSize,
final Progressable progress) throws IOException {
final Configuration config = this.getConf();
this.checkCreateOperation(path, overwrite);
final Path delegateFsPath = this.rewritePathToDelegateFs(path);
final HdlfsOutputStream.Builder prebuiltOutputStream = new HdlfsOutputStream.Builder()
.withFileSystem(this)
.withOverwrite(overwrite)
.withMultipartUploadConfig(new HdlfsMultipartUpload.Config(config))
.withTargetPath(delegateFsPath)
.withRetryPolicy(HdlfsRetryPolicies.createDefaultRetryPolicy(config));
final S3DirectAccessOutputStream s3DirectAccessOutputStream = new S3DirectAccessOutputStream(delegateFsPath, overwrite, this.createChunkSize, this.openSignedUrlExpirationSafetyMargin, this.getWebHdfsFileSystem(), this.httpClient, prebuiltOutputStream);
return new FSDataOutputStream(s3DirectAccessOutputStream, /* stats */ null);
}
}
// © 2024 SAP SE or an SAP affiliate company. All rights reserved.
© 2015 - 2025 Weber Informatics LLC | Privacy Policy