Files
hdf5/java/examples/datasets/H5Ex_D_Alloc.java
Allen Byrne c7ffe683e5 Primary change is HDFFV-11212 - new refs and JNI (#372)
* OESS-98 convert plugin option to FetchContent, add tests

* Fixes for pkcfg files because of plugin option

* OESS-98 fix tools test for plugins

* Keep doxygen comments under 100 chars long - format hint

* Whitespace

* HDFFV-11144 - Reclassify CMake messages

* HDFFV-11099/11100 added help text

* Reworked switch statement to compare string instead

* Fix typo

* Update CDash mode

* Correct name of threadsafe

* Correct option name

* Undo accidental commit

* Note LLVM 10 to 11 format default changes

* Update format plugin

* Undo clang-format version 11 changes

* One more correction

* Update supported platforms

* Revert whitespace changes

* Correct whitespace

* Changes from PR#3

* HDFFV-11213 added option to control gcc10 warnings diagnostics

* HDFFV-11212 Use the new references correctly in JNI utility and tests

* format source

* Fix typo

* Add new test file

* HDFFV-11212 - update test and remove unused arg

* Minor non-space formatting changes

* Use H5I_INVALID_ID instead of "-1"

* source formatting

* add missing testfile, update jni function

* Undo commit of debug code

* remove mislocated file

* Fix h5repack test for handling of fapls and id close

* Update h5diff test files usage text

* HDFFV-11212 add new ref tests for JNI export dataset

* src format update

* Remove blank line typo

* src format typo

* long double requires %Lg

* Another long double foramt specifer S.B. %Lg

* issue with t128bit test

* Windows issue with h5dump and type.

* Fix review issues

* refactor function nesting and fix error checks

* format fixes

* Remove untested functions and javadoc quiet comments

* Restore TRY block.

* Change string append errors to memory exception

* revert to H5_JNI_FATAL_ERROR - support functions need work

* Add assertion error for h5util functions

* remove duplicate function

* format fix

* Revert HD function error handling

* Update copyright comments
2021-02-25 15:12:57 -06:00

300 lines
11 KiB
Java

/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/************************************************************
This example shows how to set the space allocation time
for a dataset. The program first creates two datasets,
one with the default allocation time (late) and one with
early allocation time, and displays whether each has been
allocated and their allocation size. Next, it writes data
to the datasets, and again displays whether each has been
allocated and their allocation size.
************************************************************/
package examples.datasets;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Alloc {
private static String FILENAME = "H5Ex_D_Alloc.h5";
private static String DATASETNAME1 = "DS1";
private static String DATASETNAME2 = "DS2";
private static final int DIM_X = 4;
private static final int DIM_Y = 7;
private static final int FILLVAL = 99;
private static final int RANK = 2;
// Values for the status of space allocation
enum H5D_space_status {
H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(1), H5D_SPACE_STATUS_ALLOCATED(
2);
private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
static {
for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
lookup.put(s.getCode(), s);
}
private int code;
H5D_space_status(int space_status) {
this.code = space_status;
}
public int getCode() {
return this.code;
}
public static H5D_space_status get(int code) {
return lookup.get(code);
}
}
private static void allocation() {
long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
long dataset_id1 = HDF5Constants.H5I_INVALID_HID;
long dataset_id2 = HDF5Constants.H5I_INVALID_HID;
long dcpl_id = HDF5Constants.H5I_INVALID_HID;
long[] dims = { DIM_X, DIM_Y };
int[][] dset_data = new int[DIM_X][DIM_Y];
int space_status = 0;
long storage_size = 0;
// Initialize the dataset.
for (int indx = 0; indx < DIM_X; indx++)
for (int jndx = 0; jndx < DIM_Y; jndx++)
dset_data[indx][jndx] = FILLVAL;
// Create a file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
filespace_id = H5.H5Screate_simple(RANK, dims, null);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the dataset creation property list, and set the chunk size.
try {
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
}
catch (Exception e) {
e.printStackTrace();
}
// Set the allocation time to "early". This way we can be sure
// that reading from the dataset immediately after creation will
// return the fill value.
try {
if (dcpl_id >= 0)
H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
}
catch (Exception e) {
e.printStackTrace();
}
System.out.println("Creating datasets...");
System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE");
System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY");
System.out.println();
// Create the dataset using the dataset default creation property list.
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the dataset using the dataset creation property list.
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Retrieve and print space status and storage size for dset1.
try {
if (dataset_id1 >= 0)
space_status = H5.H5Dget_space_status(dataset_id1);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id1 >= 0)
storage_size = H5.H5Dget_storage_size(dataset_id1);
}
catch (Exception e) {
e.printStackTrace();
}
String the_space = " ";
if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
the_space += "not ";
System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
// Retrieve and print space status and storage size for dset2.
try {
if (dataset_id2 >= 0)
space_status = H5.H5Dget_space_status(dataset_id2);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id2 >= 0)
storage_size = H5.H5Dget_storage_size(dataset_id2);
}
catch (Exception e) {
e.printStackTrace();
}
the_space = " ";
if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
the_space += "not ";
System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
System.out.println();
System.out.println("Writing data...");
System.out.println();
// Write the data to the datasets.
try {
if (dataset_id1 >= 0)
H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT, dset_data[0]);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id2 >= 0)
H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT, dset_data[0]);
}
catch (Exception e) {
e.printStackTrace();
}
// Retrieve and print space status and storage size for dset1.
try {
if (dataset_id1 >= 0)
space_status = H5.H5Dget_space_status(dataset_id1);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id1 >= 0)
storage_size = H5.H5Dget_storage_size(dataset_id1);
}
catch (Exception e) {
e.printStackTrace();
}
the_space = " ";
if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
the_space += "not ";
System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
// Retrieve and print space status and storage size for dset2.
try {
if (dataset_id2 >= 0)
space_status = H5.H5Dget_space_status(dataset_id2);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id2 >= 0)
storage_size = H5.H5Dget_storage_size(dataset_id2);
}
catch (Exception e) {
e.printStackTrace();
}
the_space = " ";
if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
the_space += "not ";
System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
System.out.println();
// End access to the dataset and release resources used by it.
try {
if (dcpl_id >= 0)
H5.H5Pclose(dcpl_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id1 >= 0)
H5.H5Dclose(dataset_id1);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id2 >= 0)
H5.H5Dclose(dataset_id2);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (filespace_id >= 0)
H5.H5Sclose(filespace_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
H5Ex_D_Alloc.allocation();
}
}