awips2/nativeLib/rary.cots.hdf5/examples/datasets/H5Ex_D_Compact.java
root 06a8b51d6d Initial revision of AWIPS2 11.9.0-7p5
Former-commit-id: 64fa9254b946eae7e61bbc3f513b7c3696c4f54f
2012-01-06 08:55:05 -06:00

276 lines
6.2 KiB
Java

/************************************************************
This example shows how to read and write data to a compact
dataset. The program first writes integers to a compact
dataset with dataspace dimensions of DIM_XxDIM_Y, then
closes the file. Next, it reopens the file, reads back
the data, and outputs it to the screen.
This file is intended for use with HDF5 Library verion 1.6
************************************************************/
package datasets;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Compact {
private static String FILENAME = "h5ex_d_compact.h5";
private static String DATASETNAME = "DS1";
private static final int DIM_X = 4;
private static final int DIM_Y = 7;
private static final int RANK = 2;
// Values for the status of space allocation
enum H5D_layout {
H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(
3);
private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
static {
for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
lookup.put(s.getCode(), s);
}
private int code;
H5D_layout(int layout_type) {
this.code = layout_type;
}
public int getCode() {
return this.code;
}
public static H5D_layout get(int code) {
return lookup.get(code);
}
}
private static void writeCompact() {
int file_id = -1;
int filespace_id = -1;
int dataset_id = -1;
int dcpl_id = -1;
long[] dims = { DIM_X, DIM_Y };
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
for (int indx = 0; indx < DIM_X; indx++)
for (int jndx = 0; jndx < DIM_Y; jndx++)
dset_data[indx][jndx] = indx * jndx - jndx;
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
filespace_id = H5.H5Screate_simple(RANK, dims, null);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the dataset creation property list.
try {
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
}
catch (Exception e) {
e.printStackTrace();
}
// Set the layout to compact.
try {
if (dcpl_id >= 0)
H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
}
catch (Exception e) {
e.printStackTrace();
}
// Create the dataset. We will use all default properties for this example.
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME,
HDF5Constants.H5T_STD_I32LE, filespace_id, dcpl_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Write the data to the dataset.
try {
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT,
HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
}
// End access to the dataset and release resources used by it.
try {
if (dcpl_id >= 0)
H5.H5Pclose(dcpl_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (filespace_id >= 0)
H5.H5Sclose(filespace_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
}
private static void readCompact() {
int file_id = -1;
int filespace_id = -1;
int dataset_id = -1;
int dcpl_id = -1;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open file and dataset using the default properties.
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY,
HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Open an existing dataset.
try {
if (file_id >= 0)
dataset_id = H5.H5Dopen(file_id, DATASETNAME);
}
catch (Exception e) {
e.printStackTrace();
}
// Retrieve the dataset creation property list.
try {
if (dataset_id >= 0)
dcpl_id = H5.H5Dget_create_plist(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Print the storage layout.
try {
if (dcpl_id >= 0) {
int layout_type = H5.H5Pget_layout(dcpl_id);
System.out.print("Storage layout for " + DATASETNAME + " is: ");
switch (H5D_layout.get(layout_type)) {
case H5D_COMPACT:
System.out.println("H5D_COMPACT");
break;
case H5D_CONTIGUOUS:
System.out.println("H5D_CONTIGUOUS");
break;
case H5D_CHUNKED:
System.out.println("H5D_CHUNKED");
break;
}
System.out.println();
}
}
catch (Exception e) {
e.printStackTrace();
}
// Read the data using the default properties.
try {
if (dataset_id >= 0)
H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT,
HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
}
// Output the data to the screen.
System.out.println("Data for " + DATASETNAME + " is: ");
for (int indx = 0; indx < DIM_X; indx++) {
System.out.print(" [ ");
for (int jndx = 0; jndx < DIM_Y; jndx++)
System.out.print(dset_data[indx][jndx] + " ");
System.out.println("]");
}
System.out.println();
// End access to the dataset and release resources used by it.
try {
if (dcpl_id >= 0)
H5.H5Pclose(dcpl_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (filespace_id >= 0)
H5.H5Sclose(filespace_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
H5Ex_D_Compact.writeCompact();
H5Ex_D_Compact.readCompact();
}
}