com.sap.hana.datalake.files.utils.DataChunk Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of sap-hdlfs Show documentation
Show all versions of sap-hdlfs Show documentation
An implementation of org.apache.hadoop.fs.FileSystem targeting SAP HANA Data Lake Files.
// © 2024 SAP SE or an SAP affiliate company. All rights reserved.
package com.sap.hana.datalake.files.utils;
import org.apache.commons.io.IOUtils;
import com.sap.hana.datalake.files.shaded.org.apache.hadoop.util.Preconditions;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import com.sap.hana.datalake.files.classification.InterfaceAudience;
import com.sap.hana.datalake.files.operations.create.ComposableBufferOutputStream;
@InterfaceAudience.Private
public class DataChunk implements Closeable {
private final int capacity;
private final String id;
private ComposableBufferOutputStream dataBuffer;
private String contentRange;
private String md5Checksum;
public static DataChunk newInstance(final int capacity, final String id) {
return new DataChunk(capacity, id);
}
public DataChunk(final DataChunk dataChunk, final String id) {
this.capacity = dataChunk.capacity;
this.dataBuffer = dataChunk.dataBuffer;
this.contentRange = dataChunk.contentRange;
this.md5Checksum = dataChunk.md5Checksum;
this.id = id;
}
public DataChunk(final int capacity, final String id) {
Preconditions.checkArgument(capacity > 0, "capacity must be > 0");
Preconditions.checkArgument(!id.isEmpty(), "id must not be empty");
this.capacity = capacity;
this.id = id;
this.dataBuffer = new ComposableBufferOutputStream();
}
public int getDataSize() {
return this.isOpen() ? (int) this.dataBuffer.size() : 0;
}
public int getRemainingCapacity() {
return this.isOpen() ? this.capacity - this.getDataSize() : 0;
}
public int getFullCapacity() {
return this.capacity;
}
public synchronized int write(final byte[] data, final int offset, final int length) throws IOException {
this.checkOpen();
/* Write no more than the remaining capacity */
final int written = Math.min(this.getRemainingCapacity(), length);
this.dataBuffer.write(data, offset, written);
return written;
}
public synchronized int write(final InputStream inputStream) throws IOException {
this.checkOpen();
final long limit = this.getRemainingCapacity();
return (int) IOUtils.copyLarge(inputStream, this.dataBuffer, /* inputOffset */ 0L, limit);
}
public InputStream getInputStream() {
this.checkOpen();
return this.dataBuffer.toInputStream();
}
public String getId() {
return this.id;
}
@Override
public synchronized void close() throws IOException {
if (this.dataBuffer != null) {
this.dataBuffer.close();
this.dataBuffer = null;
}
}
public boolean isOpen() {
return this.dataBuffer != null;
}
public String getContentRange() {
return this.contentRange;
}
public void setContentRange(final String contentRange) {
this.contentRange = contentRange;
}
public void setMd5Checksum(final String md5Checksum) {
this.md5Checksum = md5Checksum;
}
public String getMd5Checksum() {
return this.md5Checksum;
}
@Override
public String toString() {
return "Chunk{" + "capacity=" + this.capacity + ", dataSize=" + this.getDataSize() + ", contentRange='" + this.contentRange + '\'' + ", md5Checksum='" + this.md5Checksum + '\'' + '}';
}
private void checkOpen() {
Preconditions.checkState(this.isOpen(), "Chunk is already closed");
}
}
// © 2024 SAP SE or an SAP affiliate company. All rights reserved.
© 2015 - 2025 Weber Informatics LLC | Privacy Policy