awips2/nativeLib/rary.cots.hdf5/ncsa/hdf/hdf5lib/test/TestInteger.java
root 9f19e3f712 Initial revision of AWIPS2 11.9.0-7p5
Former-commit-id: 64fa9254b946eae7e61bbc3f513b7c3696c4f54f
2012-01-06 08:55:05 -06:00

1670 lines
46 KiB
Java

/****************************************************************************
* NCSA HDF *
* National Comptational Science Alliance *
* University of Illinois at Urbana-Champaign *
* 605 E. Springfield, Champaign IL 61820 *
* *
* For conditions of distribution and use, see the accompanying *
* java-hdf5/COPYING file. *
* *
****************************************************************************/
package ncsa.hdf.hdf5lib.test;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import ncsa.hdf.hdf5lib.*;
import ncsa.hdf.hdf5lib.exceptions.*;
public class TestInteger implements TestModule
{
private String FILE_ROOT = null;
String dbgInfo;
boolean testResult = false;
public TestInteger()
{
}
public boolean setUp( String workingDir, boolean cleanFilesAtStart) {
FILE_ROOT = workingDir;
return true;
}
public boolean cleanUp( boolean saveFiles )
{
try {
H5.H5close();
} catch (HDF5Exception ex) {
dbgInfo += "\nH5close(): FAILED "+ex;
return false;
}
return true;
}
public String getTestName() {
String desc = "Test Integet/int, int/short, int/byte write & read";
return desc;
}
public String getTestDescription() {
String desc = "Test Integer and int and smaller data types, write and read either way.";
return desc;
}
public void runTest() {
testResult = false;
boolean res;
int ntests = 6;
int passed = 0;
dbgInfo = "\n\n========== Test Write & read 2D Integer/int flattened ==========";
if(testIntegerIntFlat()) {
passed++;
}
dbgInfo += "\n\n========== Test Write & read 2D Integer/int Array ==========";
if(testIntegerIntArray()) {
passed++;
}
dbgInfo += "\n\n========== Test Write & read 2D Integer/byte Flat ==========";
if(testIntegerByteFlat()) {
passed++;
}
dbgInfo += "\n\n========== Test Write & read 2D Integer/byte Array ==========";
if(testIntegerByteArray()) {
passed++;
}
dbgInfo += "\n\n========== Test Write & read 2D Integer/short Flat ==========";
if(testIntegerShortFlat()) {
passed++;
}
dbgInfo += "\n\n========== Test Write & read 2D Integer/short Array ==========";
if(testIntegerShortArray()) {
passed++;
}
dbgInfo += "\n\n========== Integer/int/short/byte tests complete: "+passed+" of "+ntests+" passed ==========";
testResult = (passed == ntests);
}
public boolean testPassed()
{
return testResult;
}
public String getVerboseResult()
{
return dbgInfo;
}
/**
* Tests
*/
private boolean testIntegerShortFlat()
{
String fileName = FILE_ROOT+"IntegerShortFlat.h5";
String dataset1Name = "shortArray";
String dataset2Name = "intArray";
String dataset3Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1;
int tfile, tdataset1;
int datatype1, dataspace1; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
short[] data1 = new short[nx*ny]; /* data to write */
Integer[] outData1 = new Integer[nx*ny];
int[] outData2 = new int[nx*ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j*ny+i] = (short)(j*ny+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_SHORT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_SHORT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead short data as Integer";
// // System.out.println("Read short data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// // System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
// //ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData1[j*ny+i].shortValue() !=( data1[j*ny+i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData1[j*ny+i];
// // System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData1[j*ny+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead short data as int";
// // System.out.println("Read short data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// // System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
// //ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j*ny+i] != data1[j*ny+i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData2[j*ny+i];
// // System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData2[j*ny+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerShortFlat(): OK ";
return true;
}
private boolean testIntegerShortArray()
{
String fileName = FILE_ROOT+"IntegerShortArray.h5";
String dataset1Name = "shortArray";
String dataset2Name = "intArray";
String dataset3Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1, dataset2;
int tfile, tdataset1, tdataset2;
int datatype1, dataspace1; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
short[][] data1 = new short[nx][ny]; /* data to write */
Integer[][] outData1 = new Integer[nx][ny];
int[][] outData2 = new int[nx][ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j][i] = (short)(j*nx+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_SHORT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_SHORT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead short data as Integer";
// // System.out.println("Read short data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// // System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
// //ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData1[j][i].shortValue() !=( data1[j][i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i];
// // System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead short data as int";
// // System.out.println("Read short data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// // System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
// //ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j][i] != data1[j][i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i];
// // System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerShortArray(): OK ";
return true;
}
private boolean testIntegerIntFlat()
{
String fileName = FILE_ROOT+"IntegerIntFlat.h5";
String dataset1Name = "intArray";
String dataset2Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1, dataset2;
int tfile, tdataset1, tdataset2;
int datatype2, dataspace2; /* handles */
int datatype1, dataspace1; /* handles */
int tdatatype1, tdataspace1; /* handles */
int tdatatype2, tdataspace2; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
Integer[] data1 = new Integer[nx*ny]; /* data to write */
Integer[] outData1 = new Integer[nx*ny];
Integer[] outData3 = new Integer[nx*ny];
int[] data2 = new int[nx*ny]; /* data to write */
int[] outData2 = new int[nx*ny];
int[] outData4 = new int[nx*ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j*ny+i] = new Integer(j*nx+i);
data2[j*ny+i] = (int)(j*nx+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace2 = -1;
try {
dataspace2 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace2 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype2 = -1;
try {
datatype2 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype2 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset2 = H5.H5Dcreate(file, dataset2Name,
datatype2, dataspace2,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset2 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data2);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): dataset2 failed. "+ex;
return false;
}
try {
H5.H5Sclose(dataspace2);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace2 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype2);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype2 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead Integer data as Integer";
// System.out.println("Read Integer data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (!outData1[j*nx+i].equals(data1[j*nx+i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*nx+i]+" read back as "+outData1[j*nx+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*nx+i]+" read back as "+outData1[j*nx+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead Integer data as int";
// System.out.println("Read Integer data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j*nx+i] != data1[j*nx+i].intValue()) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*nx+i]+" read back as "+outData2[j*nx+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*nx+i]+" read back as "+outData2[j*nx+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
tdataset2 = H5.H5Dopen(tfile, dataset2Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() Failed, dataset2 read 1 exception: "+ex;
return false;
}
dbgInfo += "\nRead int data as int";
// System.out.println("Read int data as int");
try {
status = H5.H5Dread(tdataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData4);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset 2 read 1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData4[j*nx+i] != data2[j*nx+i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data2[j*nx+i]+" read back as "+outData2[j*nx+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data2[j*nx+i]+" read back as "+outData4[j*nx+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset 2 read 1 failed. "+ex;
return false;
}
try {
tdataset2 = H5.H5Dopen(tfile, dataset2Name);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Dopen() dataset2 read 2 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead int data as Integer";
// System.out.println("Read int data as Integer");
try {
status = H5.H5Dread(tdataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData3);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset2 read 2 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: dataset 2 read 2 failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData3[j*nx+i].intValue() != data2[j*nx+i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data2[j*nx+i]+" read back as "+outData3[j*nx+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data2[j*nx+i]+" read back as "+outData3[j*nx+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset 2 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerIntFlat(): OK ";
return true;
}
private boolean testIntegerIntArray()
{
String fileName = FILE_ROOT+"IntegerIntArray.h5";
String dataset1Name = "intArray";
String dataset2Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1, dataset2;
int tfile, tdataset1, tdataset2;
int datatype2, dataspace2; /* handles */
int datatype1, dataspace1; /* handles */
int tdatatype1, tdataspace1; /* handles */
int tdatatype2, tdataspace2; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
Integer[][] data1 = new Integer[nx][ny]; /* data to write */
Integer[][] outData1 = new Integer[nx][ny];
Integer[][] outData3 = new Integer[nx][ny];
int[][] data2 = new int[nx][ny]; /* data to write */
int[][] outData2 = new int[nx][ny];
int[][] outData4 = new int[nx][ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j][i] = new Integer(j*nx+i);
data2[j][i] = (int)(j*nx+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace2 = -1;
try {
dataspace2 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace2 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype2 = -1;
try {
datatype2 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype2 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset2 = H5.H5Dcreate(file, dataset2Name,
datatype2, dataspace2,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset2 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data2);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): dataset2 failed. "+ex;
return false;
}
try {
H5.H5Sclose(dataspace2);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace2 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype2);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype2 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead Integer data as Integer";
// System.out.println("Read Integer data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (!outData1[j][i].equals(data1[j][i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead Integer data as int";
// System.out.println("Read Integer data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j][i] != data1[j][i].intValue()) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
tdataset2 = H5.H5Dopen(tfile, dataset2Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() Failed, dataset2 read 1 exception: "+ex;
return false;
}
dbgInfo += "\nRead int data as int";
// System.out.println("Read int data as int");
try {
status = H5.H5Dread(tdataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData4);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset 2 read 1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData4[j][i] != data2[j][i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data2[j][i]+" read back as "+outData2[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data2[j][i]+" read back as "+outData4[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset 2 read 1 failed. "+ex;
return false;
}
try {
tdataset2 = H5.H5Dopen(tfile, dataset2Name);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Dopen() dataset2 read 2 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead int data as Integer";
// System.out.println("Read int data as Integer");
try {
status = H5.H5Dread(tdataset2,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData3);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset2 read 2 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: dataset 2 read 2 failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData3[j][i].intValue() != data2[j][i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data2[j][i]+" read back as "+outData3[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data2[j][i]+" read back as "+outData3[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset2);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset 2 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerIntArray(): OK ";
return true;
}
private boolean testIntegerByteFlat()
{
String fileName = FILE_ROOT+"IntegerByteFlat.h5";
String dataset1Name = "byteArray";
String dataset2Name = "intArray";
String dataset3Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1, dataset2;
int tfile, tdataset1, tdataset2;
int datatype1, dataspace1; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
byte[] data1 = new byte[nx*ny]; /* data to write */
Integer[] outData1 = new Integer[nx*ny];
int[] outData2 = new int[nx*ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j*ny+i] = (byte)(j+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_STD_I8BE));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_STD_I8BE),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead byte data as Integer";
// System.out.println("Read byte data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData1[j*ny+i].byteValue() !=( data1[j*ny+i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData1[j*ny+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData1[j*ny+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead byte data as int";
// System.out.println("Read byte data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j*ny+i] != data1[j*ny+i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData2[j*ny+i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j*ny+i]+" read back as "+outData2[j*ny+i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerByteFlat(): OK ";
return true;
}
private boolean testIntegerByteArray()
{
String fileName = FILE_ROOT+"IntegerByteArray.h5";
String dataset1Name = "byteArray";
String dataset2Name = "intArray";
String dataset3Name = "IntegerArray";
int nx = 100;
int ny = 200;
int rank = 2;
int status;
int i, j;
int file, dataset1, dataset2;
int tfile, tdataset1, tdataset2;
int datatype2, dataspace2; /* handles */
int datatype1, dataspace1; /* handles */
int datatype3, dataspace3; /* handles */
long[] dimsf = {100, 200}; /* dataset dimensions */
byte[][] data1 = new byte[nx][ny]; /* data to write */
Integer[][] outData1 = new Integer[nx][ny];
int[][] outData2 = new int[nx][ny];
// Data and output buffer initialization.
for (j = 0; j < nx; j++) {
for (i = 0; i < ny; i++) {
data1[j][i] = (byte)(j+i);
}
}
// Create a new file using H5F_ACC_TRUNC access,
// default file creation properties, and default file
// access properties.
file = -1;
try {
file = H5.H5Fcreate(fileName,
HDF5Constants.H5F_ACC_TRUNC,
HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5create() Failed, exception: "+ex;
return false;
}
// Describe the size of the array and create the data space
// for fixed size dataset.
dataspace1 = -1;
try {
dataspace1 = H5.H5Screate_simple(rank, dimsf, null);
} catch (Exception ex) {
dbgInfo += "\nH5Screate_simple: dataspace1 failed. "+ex;
return false;
}
// Define datatype for the data in the file.
datatype1 = -1;
try {
datatype1 = H5.H5Tcopy(H5.J2C(HDF5CDataTypes.JH5T_STD_I8BE));
} catch (Exception ex) {
dbgInfo += "\nH5Tcopy: datatype1 failed. "+ex;
return false;
}
// Create a new dataset within the file using defined dataspace and
// datatype and default dataset creation properties.
try {
dataset1 = H5.H5Dcreate(file, dataset1Name,
datatype1, dataspace1,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dcreate() dataset1 Failed, exception: "+ex;
return false;
}
// Write the data to the dataset using default transfer properties.
try {
status = H5.H5Dwrite(dataset1,
H5.J2C(HDF5CDataTypes.JH5T_STD_I8BE),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
data1);
} catch (Exception ex) {
dbgInfo += "\nH5Dwrite(): failed dataset1 "+ex;
return false;
}
try {
H5.H5Sclose(dataspace1);
} catch (Exception ex) {
dbgInfo += "\nH5Sclose: dataspace 1 failed. "+ex;
return false;
}
try {
H5.H5Tclose(datatype1);
} catch (Exception ex) {
dbgInfo += "\nH5Tclose: datatype1 failed. "+ex;
return false;
}
try {
H5.H5Dclose(dataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 failed. "+ex;
return false;
}
try {
H5.H5Fclose(file);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
tfile = -1;
try {
tfile = H5.H5Fopen(fileName,
HDF5Constants.H5F_ACC_RDWR,
HDF5Constants.H5P_DEFAULT);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Fopen() second open Failed, exception: "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead byte data as Integer";
// System.out.println("Read byte data as Integer");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData1);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1, outData1 failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData1[j][i].byteValue() !=( data1[j][i])) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData1[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: dataset1 after read 1 failed. "+ex;
return false;
}
try {
tdataset1 = H5.H5Dopen(tfile, dataset1Name);
} catch (HDF5Exception ex) {
dbgInfo += "\n\nH5Dopen() dataset1 after read 1 Failed, exception: "+ex;
return false;
}
dbgInfo += "\nRead byte data as int";
// System.out.println("Read byte data as int");
try {
status = H5.H5Dread(tdataset1,
H5.J2C(HDF5CDataTypes.JH5T_NATIVE_INT),
HDF5Constants.H5S_ALL,
HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT,
outData2);
} catch (Exception ex) {
// System.out.println("\nH5Dread: dataset1 as int failed. "+ex);System.out.flush();
//ex.printStackTrace();
dbgInfo += "\nH5Dread: failed. "+ex;
return false;
}
for (j = 0; j < nx; j++)
{
for (i = 0; i < ny; i++) {
if (outData2[j][i] != data1[j][i]) {
dbgInfo += "\n* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i];
// System.out.println("* bad data at: ["+i+"]["+j+"] original: "+data1[j][i]+" read back as "+outData2[j][i]);System.out.flush();
}
}
}
try {
H5.H5Dclose(tdataset1);
} catch (Exception ex) {
dbgInfo += "\nH5Dclose: tdataset1 read 2 failed. "+ex;
return false;
}
try {
H5.H5Fclose(tfile);
} catch (HDF5Exception ex) {
dbgInfo += "\nH5Fclose(): FAILED "+ex;
return false;
}
dbgInfo += "\nIntegerByteArray(): OK ";
return true;
}
}