Primary change is HDFFV-11212 - new refs and JNI (#372)

* OESS-98 convert plugin option to FetchContent, add tests

* Fixes for pkcfg files because of plugin option

* OESS-98 fix tools test for plugins

* Keep doxygen comments under 100 chars long - format hint

* Whitespace

* HDFFV-11144 - Reclassify CMake messages

* HDFFV-11099/11100 added help text

* Reworked switch statement to compare string instead

* Fix typo

* Update CDash mode

* Correct name of threadsafe

* Correct option name

* Undo accidental commit

* Note LLVM 10 to 11 format default changes

* Update format plugin

* Undo clang-format version 11 changes

* One more correction

* Update supported platforms

* Revert whitespace changes

* Correct whitespace

* Changes from PR#3

* HDFFV-11213 added option to control gcc10 warnings diagnostics

* HDFFV-11212 Use the new references correctly in JNI utility and tests

* format source

* Fix typo

* Add new test file

* HDFFV-11212 - update test and remove unused arg

* Minor non-space formatting changes

* Use H5I_INVALID_ID instead of "-1"

* source formatting

* add missing testfile, update jni function

* Undo commit of debug code

* remove mislocated file

* Fix h5repack test for handling of fapls and id close

* Update h5diff test files usage text

* HDFFV-11212 add new ref tests for JNI export dataset

* src format update

* Remove blank line typo

* src format typo

* long double requires %Lg

* Another long double foramt specifer S.B. %Lg

* issue with t128bit test

* Windows issue with h5dump and type.

* Fix review issues

* refactor function nesting and fix error checks

* format fixes

* Remove untested functions and javadoc quiet comments

* Restore TRY block.

* Change string append errors to memory exception

* revert to H5_JNI_FATAL_ERROR - support functions need work

* Add assertion error for h5util functions

* remove duplicate function

* format fix

* Revert HD function error handling

* Update copyright comments
This commit is contained in:
Allen Byrne
2021-02-25 15:12:57 -06:00
committed by GitHub
parent c29e1b9fdf
commit c7ffe683e5
123 changed files with 2671 additions and 1300 deletions

View File

@@ -32,6 +32,7 @@ import java.io.StreamTokenizer;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
import hdf.hdf5lib.exceptions.HDF5Exception;
import hdf.hdf5lib.exceptions.HDF5LibraryException;
import org.junit.After;
@@ -47,22 +48,21 @@ import org.junit.rules.TestName;
*/
public class TestH5 {
@Rule public TestName testname = new TestName();
@Before
public void showTestName() {
System.out.print(testname.getMethodName());
}
@After
public void nextTestName() {
System.out.println();
}
private static final String H5_FILE = "testData.h5";
private static final String EXPORT_FILE = "testExport.txt";
private static final String H5_DREG_FILE = "trefer_reg.h5";
private static final String EXPORT_DREG_FILE = "testExportReg.txt";
private static final String H5_AREG_FILE = "trefer_attr.h5";
private static final String EXPORT_AREG_FILE = "testExportAReg.txt";
private static final int DIM_X = 4;
private static final int DIM_Y = 6;
private static final int DIM_BLKS = 36;
private static final int DIM_PNTS = 10;
private static final int DIM_ATTR = 12;
private static final int RANK = 2;
long H5fid = -1;
long H5dsid = -1;
long H5did = -1;
long H5fid = HDF5Constants.H5I_INVALID_HID;
long H5dsid = HDF5Constants.H5I_INVALID_HID;
long H5did = HDF5Constants.H5I_INVALID_HID;
long[] H5dims = { DIM_X, DIM_Y };
private final void _deleteFile(String filename) {
@@ -78,7 +78,7 @@ public class TestH5 {
}
private final long _createDataset(long fid, long dsid, String name, long dapl) {
long did = -1;
long did = HDF5Constants.H5I_INVALID_HID;
try {
did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32LE, dsid,
HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
@@ -115,22 +115,64 @@ public class TestH5 {
}
}
public final void _closeH5File() throws HDF5LibraryException {
private final void _closeH5File() {
if (H5did >= 0)
try {H5.H5Dclose(H5did);} catch (Exception ex) {}
if (H5dsid > 0)
try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
if (H5fid > 0)
try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
H5fid = -1;
H5dsid = -1;
H5did = -1;
H5fid = HDF5Constants.H5I_INVALID_HID;
H5dsid = HDF5Constants.H5I_INVALID_HID;
H5did = HDF5Constants.H5I_INVALID_HID;
}
public void _openH5File(String filename, String dsetname) {
try {
H5fid = H5.H5Fopen(filename,
HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
}
catch (Throwable err) {
err.printStackTrace();
fail("TestH5._openH5file: " + err);
}
assertTrue("TestH5._openH5file: H5.H5Fopen: ", H5fid >= 0);
try {
H5did = H5.H5Dopen(H5fid, dsetname, HDF5Constants.H5P_DEFAULT);
}
catch (Throwable err) {
err.printStackTrace();
fail("TestH5._openH5file: " + err);
}
assertTrue("TestH5._openH5file: H5.H5Dopen: ", H5did >= 0);
try {
H5dsid = H5.H5Dget_space(H5did);
}
catch (Throwable err) {
err.printStackTrace();
fail("TestH5._openH5file: " + err);
}
assertTrue("TestH5._openH5file: H5.H5Screate_simple: ",H5dsid > 0);
}
public final void _deleteH5file() {
_closeH5File();
_deleteFile(H5_FILE);
}
@After
public void closeH5File() throws HDF5LibraryException {
_closeH5File();
System.out.println();
}
@Before
public void verifyCount()
throws NullPointerException, HDF5Exception {
assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
System.out.print(testname.getMethodName());
}
/**
* Test method for {@link hdf.hdf5lib.H5#J2C(int)}.
* NOTE:
@@ -411,4 +453,84 @@ public class TestH5 {
}
_deleteH5file();
}
@Test
public void testH5export_regdataset() {
int[] dset_data_expect = {66, 69, 72, 75, 78, 81, 96, 99, 102, 105, 108,
111, 126, 129, 132, 135, 138, 141, 156, 159, 162, 165, 168, 171,
186, 189, 192, 195, 198, 201, 216, 219, 222, 225, 228, 231,
207, 66, 252, 48, 84, 96, 12, 14, 213, 99};
int[] dset_indata = new int[DIM_BLKS+DIM_PNTS];
String objName = "/Dataset1";
_openH5File(H5_DREG_FILE, objName);
try {
H5.H5export_dataset(EXPORT_DREG_FILE, H5_DREG_FILE, objName, 99);
}
catch (HDF5LibraryException err) {
err.printStackTrace();
fail("H5export_dataset failed: " + err);
}
File file = new File(EXPORT_DREG_FILE);
try {
Reader reader = new FileReader(EXPORT_DREG_FILE);
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
int indx = 0;
while(streamTokenizer.nextToken() != StreamTokenizer.TT_EOF){
if(streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
dset_indata[indx] = (int)streamTokenizer.nval;
indx++;
}
}
reader.close();
}
catch (IOException err) {
err.printStackTrace();
fail("read file failed: " + err);
}
for(int row = 0; row < DIM_X; row++)
assertTrue("H5export_dataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
}
@Test
public void testH5export_attrdataset() {
int[] dset_data_expect = {0, 3, 6, 9, 1, 4, 7, 10, 2, 5, 8, 11};
int[] dset_indata = new int[DIM_ATTR];
String objName = "/Dataset3";
_openH5File(H5_AREG_FILE, objName);
try {
H5.H5export_dataset(EXPORT_AREG_FILE, H5_AREG_FILE, objName, 99);
}
catch (HDF5LibraryException err) {
err.printStackTrace();
fail("H5export_dataset failed: " + err);
}
File file = new File(EXPORT_AREG_FILE);
try {
Reader reader = new FileReader(EXPORT_AREG_FILE);
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
int indx = 0;
int jndx = 0;
while(streamTokenizer.nextToken() != StreamTokenizer.TT_EOF){
if(streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
dset_indata[indx] = (int)streamTokenizer.nval;
indx++;
}
}
reader.close();
}
catch (IOException err) {
err.printStackTrace();
fail("read file failed: " + err);
}
for(int row = 0; row < DIM_X; row++)
assertTrue("H5export_dataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
}
}