
ncsa.hdf.object.h4.H4SDS Maven / Gradle / Ivy
/*****************************************************************************
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of the HDF Java Products distribution. *
* The full copyright notice, including terms governing use, modification, *
* and redistribution, is contained in the files COPYING and Copyright.html. *
* COPYING can be found at the root of the source code distribution tree. *
* Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html. *
* If you do not have access to either file, you may request a copy from *
* [email protected]. *
****************************************************************************/
package ncsa.hdf.object.h4;
import java.util.List;
import java.util.Vector;
import ncsa.hdf.hdflib.HDFChunkInfo;
import ncsa.hdf.hdflib.HDFCompInfo;
import ncsa.hdf.hdflib.HDFConstants;
import ncsa.hdf.hdflib.HDFDeflateCompInfo;
import ncsa.hdf.hdflib.HDFException;
import ncsa.hdf.hdflib.HDFJPEGCompInfo;
import ncsa.hdf.hdflib.HDFLibrary;
import ncsa.hdf.hdflib.HDFNBITCompInfo;
import ncsa.hdf.hdflib.HDFSKPHUFFCompInfo;
import ncsa.hdf.hdflib.HDFSZIPCompInfo;
import ncsa.hdf.object.Attribute;
import ncsa.hdf.object.Dataset;
import ncsa.hdf.object.Datatype;
import ncsa.hdf.object.FileFormat;
import ncsa.hdf.object.Group;
import ncsa.hdf.object.HObject;
import ncsa.hdf.object.ScalarDS;
/**
* H4SDS describes HDF4 Scientific Data Sets (SDS) and operations performed on
* the SDS. A SDS, is a group of data structures used to store and describe
* multidimensional arrays of scientific data.
*
* The data contained in an SDS array has a data type associated with it. The
* standard data types supported by the SD interface include 32- and 64-bit
* floating-point numbers, 8-, 16- and 32-bit signed integers, 8-, 16- and
* 32-bit unsigned integers, and 8-bit characters.
*
* How to Select a Subset
*
* Dataset defines APIs for read, write and subet a dataset. No function is defined
* to select a subset of a data array. The selection is done in an implicit way.
* Function calls to dimension information such as getSelectedDims() return an array
* of dimension values, which is a reference to the array in the dataset object.
* Changes of the array outside the dataset object directly change the values of
* the array in the dataset object. It is like pointers in C.
*
*
* The following is an example of how to make a subset. In the example, the dataset
* is a 4-dimension with size of [200][100][50][10], i.e.
* dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10;
* We want to select every other data points in dims[1] and dims[2]
*
int rank = dataset.getRank(); // number of dimension of the dataset
long[] dims = dataset.getDims(); // the dimension sizes of the dataset
long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
long[] start = dataset.getStartDims(); // the off set of the selection
long[] stride = dataset.getStride(); // the stride of the dataset
int[] selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
// select dim1 and dim2 as 2D data for display,and slice through dim0
selectedIndex[0] = 1;
selectedIndex[1] = 2;
selectedIndex[1] = 0;
// reset the selection arrays
for (int i=0; i
*
*
* @version 1.1 9/4/2007
* @author Peter X. Cao
*/
public class H4SDS extends ScalarDS
{
/**
*
*/
private static final long serialVersionUID = 2557157923292438696L;
/** tag for netCDF datasets.
* HDF4 library supports netCDF version 2.3.2. It only supports SDS APIs.
*/
// magic number for netCDF: "C(67) D(68) F(70) '\001'"
public static final int DFTAG_NDG_NETCDF = 67687001;
/**
* The list of attributes of this data object. Members of the list are
* instance of Attribute.
*/
private List attributeList;
/**
* The SDS interface identifier obtained from SDstart(filename, access)
*/
private int sdid;
/** the datatype identifier */
private int datatypeID = -1;
private int nAttributes = -1;
public H4SDS(FileFormat theFile, String name, String path)
{
this(theFile, name, path, null);
}
/**
* Creates an H4SDS object with specific name and path.
*
* @param theFile the HDF file.
* @param name the name of this H4SDS.
* @param path the full path of this H4SDS.
* @param oid the unique identifier of this data object.
*/
public H4SDS(
FileFormat theFile,
String name,
String path,
long[] oid)
{
super (theFile, name, path, oid);
unsignedConverted = false;
sdid = ((H4File)getFileFormat()).getSDAccessID();
}
/*
* (non-Javadoc)
* @see ncsa.hdf.object.DataFormat#hasAttribute()
*/
public boolean hasAttribute ()
{
if (nAttributes < 0) {
sdid = ((H4File)getFileFormat()).getSDAccessID();
int id = open();
try { // retireve attributes of the dataset
String[] objName = {""};
int[] sdInfo = {0, 0, 0};
int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
HDFLibrary.SDgetinfo(id, objName, tmpDim, sdInfo);
nAttributes = sdInfo[2];
} catch (Exception ex) {nAttributes=0;}
close(id);
}
return (nAttributes>0);
}
// ***** need to implement from ScalarDS *****
@Override
public byte[][] readPalette(int idx) { return null;}
// ***** need to implement from ScalarDS *****
@Override
public byte[] getPaletteRefs() { return null;}
// implementing Dataset
@Override
public Datatype getDatatype()
{
if (datatype == null)
{
datatype = new H4Datatype(datatypeID);
}
return datatype;
}
// To do: Implementing Dataset
@Override
public Dataset copy(Group pgroup, String dname, long[] dims, Object buff)
throws Exception
{
Dataset dataset = null;
int srcdid=-1, dstdid=-1, tid=-1, size=1, theRank=2;
String path=null;
int[] count=null, start=null;
if (pgroup == null) {
return null;
}
if (dname == null) {
dname = getName();
}
if (pgroup.isRoot()) {
path = HObject.separator;
} else {
path = pgroup.getPath()+pgroup.getName()+HObject.separator;
}
srcdid = open();
if (srcdid < 0) {
return null;
}
if (dims == null)
{
theRank = getRank();
if (theRank <=0) {
init();
}
theRank = getRank();
dims = getDims();
}
else
{
theRank = dims.length;
}
start = new int[theRank];
count = new int[theRank];
for (int i=0; i 1) && (selectedIndex[0] > selectedIndex[1]))
isDefaultImageOrder = false;
else
isDefaultImageOrder = true;
return theData;
}
// Implementing DataFormat
@Override
public void write(Object buf) throws HDFException
{
if (buf == null) {
return;
}
int id = open();
if (id < 0) {
return;
}
int[] select = new int[rank];
int[] start = new int[rank];
for (int i=0; i0)) {
attributeList = new Vector(n, 5);
}
boolean b = false;
String[] attrName = new String[1];
int[] attrInfo = {0, 0};
for (int i=0; i0) {
return; // already called. Initialize only once
}
int id = open();
String[] objName = {""};
String[] dimName = {""};
int[] dimInfo = {0, 0, 0};
int[] sdInfo = {0, 0, 0};
boolean isUnlimited = false;
int[] idims = new int[HDFConstants.MAX_VAR_DIMS];
try {
HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
// mask off the litend bit
sdInfo[1] = sdInfo[1] & (~HDFConstants.DFNT_LITEND);
nAttributes = sdInfo[2];
rank = sdInfo[0];
if (rank <= 0) {
rank = 1;
idims[0] = 1;
}
isUnlimited = HDFLibrary.SDisrecord(id);
datatypeID = sdInfo[1];
isText = ((datatypeID == HDFConstants.DFNT_CHAR) || (datatypeID == HDFConstants.DFNT_UCHAR8));
//idims = new int[rank];
//HDFLibrary.SDgetinfo(id, objName, idims, sdInfo);
// get the dimension names
try {
dimNames = new String[rank];
for (int i=0; i 1)
{
selectedDims[0] = dims[0];
if (isText) {
selectedDims[1] = 1;
} else {
selectedDims[1] = dims[1];
}
}
}
// Implementing ScalarDS
@Override
public byte[][] getPalette()
{
return palette;
}
/**
* Creates a new dataset.
* @param name the name of the dataset to create.
* @param pgroup the parent group of the new dataset.
* @param type the datatype of the dataset.
* @param dims the dimension size of the dataset.
* @param maxdims the max dimension size of the dataset.
* @param chunks the chunk size of the dataset.
* @param gzip the level of the gzip compression.
* @param data the array of data values.
* @return the new dataset if successful. Otherwise returns null.
*/
public static H4SDS create(
String name,
Group pgroup,
Datatype type,
long[] dims,
long[] maxdims,
long[] chunks,
int gzip,
Object fillValue,
Object data) throws Exception
{
H4SDS dataset = null;
if ((pgroup == null) ||
(name == null)||
(dims == null)) {
return null;
}
H4File file = (H4File)pgroup.getFileFormat();
if (file == null) {
return null;
}
String path = HObject.separator;
if (!pgroup.isRoot()) {
path = pgroup.getPath()+pgroup.getName()+HObject.separator;
}
// prepare the dataspace
int tsize = 1;
int rank = dims.length;
int idims[] = new int[rank];
int start[] = new int [rank];
for (int i=0; i0)) {
throw new HDFException("Unlimted cannot be used with chunking or compression");
}
int sdid, sdsid, vgid;
sdid = (file).getSDAccessID();
// datatype
int tid = type.toNative();
try {
sdsid = HDFLibrary.SDcreate(sdid, name, tid, rank, idims);
// set fill value to zero.
int vsize = HDFLibrary.DFKNTsize(tid);
byte[] fill = new byte[vsize];
for (int i=0; i 0)
{
// set compression
compInfo = new HDFDeflateCompInfo();
compInfo.level = gzip;
if (chunks == null)
HDFLibrary.SDsetcompress(sdsid, HDFConstants.COMP_CODE_DEFLATE, compInfo);
}
if (chunks != null)
{
// set chunk
HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
int flag = HDFConstants.HDF_CHUNK;
if (gzip > 0) {
flag = HDFConstants.HDF_CHUNK | HDFConstants.HDF_COMP;
chunkInfo = new HDFChunkInfo(ichunks, HDFConstants.COMP_CODE_DEFLATE, compInfo);
}
try {
HDFLibrary.SDsetchunk (sdsid, chunkInfo, flag);
} catch (Throwable err) {
err.printStackTrace();
throw new HDFException("SDsetchunk failed.");
}
}
if ((sdsid > 0) && (data != null))
{
HDFLibrary.SDwritedata(sdsid, start, null, idims, data);
}
int ref = HDFLibrary.SDidtoref(sdsid);
if (!pgroup.isRoot())
{
// add the dataset to the parent group
vgid = pgroup.open();
if (vgid < 0)
{
if (sdsid > 0) {
HDFLibrary.SDendaccess(sdsid);
}
throw (new HDFException("Unable to open the parent group."));
}
HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_NDG, ref);
pgroup.close(vgid);
}
try {
if (sdsid > 0) {
HDFLibrary.SDendaccess(sdsid);
}
} catch (Exception ex) {}
long[] oid = {HDFConstants.DFTAG_NDG, ref};
dataset = new H4SDS(file, name, path, oid);
if (dataset != null) {
pgroup.addToMemberList(dataset);
}
return dataset;
}
public static H4SDS create(
String name,
Group pgroup,
Datatype type,
long[] dims,
long[] maxdims,
long[] chunks,
int gzip,
Object data) throws Exception
{
return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
}
/**
* copy attributes from one SDS to another SDS
*/
private void copyAttribute(int srcdid, int dstdid)
{
try {
String[] objName = {""};
int[] sdInfo = {0, 0, 0};
int[] tmpDim = new int[HDFConstants.MAX_VAR_DIMS];
HDFLibrary.SDgetinfo(srcdid, objName, tmpDim, sdInfo);
int numberOfAttributes = sdInfo[2];
boolean b = false;
String[] attrName = new String[1];
int[] attrInfo = {0, 0};
for (int i=0; i