develop Merge doxygen from 1.12 branch (#2095)

This commit is contained in:
Allen Byrne
2022-09-14 15:44:24 -05:00
committed by GitHub
parent dcf3b54b6e
commit 45178c87a3
322 changed files with 39301 additions and 15724 deletions

View File

@@ -12,6 +12,11 @@ if (POLICY CMP0083)
cmake_policy (SET CMP0083 NEW)
endif ()
# Avoid warning about DOWNLOAD_EXTRACT_TIMESTAMP in CMake 3.24:
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.24.0")
cmake_policy(SET CMP0135 NEW)
endif()
#-----------------------------------------------------------------------------
# Instructions for use : Normal Build
#

View File

@@ -12,7 +12,7 @@
## This file should be placed in the root directory of your project.
## Then modify the CMakeLists.txt file in the root directory of your
## project to incorporate the testing dashboard.
## # The following are required to use Dart and the CDash dashboard.
## # The following are required to use Dart and the CDash dashboard
## ENABLE_TESTING()
## INCLUDE(CTest)
set (CTEST_PROJECT_NAME "HDF5")

View File

@@ -1448,6 +1448,7 @@ function(create_javadoc _target)
add_custom_target(${_target}_javadoc ALL
COMMAND ${Java_JAVADOC_EXECUTABLE}
-Xdoclint:none
${_javadoc_options}
${_javadoc_files}
${_javadoc_packages}

View File

@@ -1213,6 +1213,7 @@ if test "X$HDF5_DOXYGEN" = "Xyes"; then
AC_SUBST([DOXYGEN_PACKAGE])
AC_SUBST([DOXYGEN_VERSION_STRING])
AC_SUBST([DOXYGEN_DIR])
AC_SUBST([DOXYGEN_INCLUDE_ALIASES])
AC_SUBST([DOXYGEN_PROJECT_LOGO])
AC_SUBST([DOXYGEN_PROJECT_BRIEF])
@@ -1237,6 +1238,7 @@ if test "X$HDF5_DOXYGEN" = "Xyes"; then
# SRCDIR Environment variables used inside doxygen macro for the source location:
DOXYGEN_PACKAGE=${PACKAGE_NAME}
DOXYGEN_VERSION_STRING=${PACKAGE_VERSION}
DOXYGEN_DIR='$(SRCDIR)/doxygen'
DOXYGEN_INCLUDE_ALIASES='$(SRCDIR)/doxygen/aliases'
DOXYGEN_PROJECT_LOGO='$(SRCDIR)/doxygen/img/HDFG-logo.png'
DOXYGEN_PROJECT_BRIEF=''
@@ -1249,14 +1251,14 @@ if test "X$HDF5_DOXYGEN" = "Xyes"; then
DOXYGEN_HTML_HEADER='$(SRCDIR)/doxygen/hdf5_header.html'
DOXYGEN_HTML_FOOTER='$(SRCDIR)/doxygen/hdf5_footer.html'
DOXYGEN_HTML_EXTRA_STYLESHEET='$(SRCDIR)/doxygen/hdf5doxy.css'
DOXYGEN_HTML_EXTRA_FILES='$(SRCDIR)/doxygen/hdf5_navtree_hacks.js $(SRCDIR)/doxygen/img/FF-IH_FileGroup.gif $(SRCDIR)/doxygen/img/FF-IH_FileObject.gif $(SRCDIR)/doxygen/img/FileFormatSpecChunkDiagram.jpg $(SRCDIR)/doxygen/img/ftv2node.png $(SRCDIR)/doxygen/img/ftv2pnode.png $(SRCDIR)/doxygen/img/HDFG-logo.png $(SRCDIR)/doxygen/img/IOFlow2.gif $(SRCDIR)/doxygen/img/IOFlow3.gif $(SRCDIR)/doxygen/img/IOFlow.gif $(SRCDIR)/doxygen/img/PaletteExample1.gif $(SRCDIR)/doxygen/img/Palettes.fm.anc.gif'
DOXYGEN_HTML_EXTRA_FILES='$(SRCDIR)/doxygen/hdf5_navtree_hacks.js'
DOXYGEN_TAG_FILE=hdf5.tag
DOXYGEN_SERVER_BASED_SEARCH=NO
DOXYGEN_EXTERNAL_SEARCH=NO
DOXYGEN_SEARCHENGINE_URL=
DOXYGEN_STRIP_FROM_PATH='$(SRCDIR)'
DOXYGEN_STRIP_FROM_INC_PATH='$(SRCDIR)'
DOXYGEN_PREDEFINED='H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD'
DOXYGEN_PREDEFINED='H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN'
DX_INIT_DOXYGEN([HDF5], [./doxygen/Doxyfile], [hdf5lib_docs])
fi

View File

@@ -7,11 +7,12 @@ project (HDF5_DOXYGEN C)
if (DOXYGEN_FOUND)
set (DOXYGEN_PACKAGE ${HDF5_PACKAGE_NAME})
set (DOXYGEN_VERSION_STRING ${HDF5_PACKAGE_VERSION_STRING})
set (DOXYGEN_DIR ${HDF5_DOXYGEN_DIR})
set (DOXYGEN_INCLUDE_ALIASES_PATH ${HDF5_DOXYGEN_DIR})
set (DOXYGEN_INCLUDE_ALIASES aliases)
set (DOXYGEN_VERBATIM_VARS DOXYGEN_INCLUDE_ALIASES)
set (DOXYGEN_PROJECT_LOGO ${HDF5_DOXYGEN_DIR}/img/HDFG-logo.png)
set (DOXYGEN_PROJECT_BRIEF "C-API Reference")
set (DOXYGEN_PROJECT_BRIEF "API Reference")
set (DOXYGEN_INPUT_DIRECTORY "${HDF5_SOURCE_DIR} ${HDF5_DOXYGEN_DIR}/dox ${HDF5_GENERATED_SOURCE_DIR}")
set (DOXYGEN_OPTIMIZE_OUTPUT_FOR_C YES)
set (DOXYGEN_MACRO_EXPANSION YES)
@@ -28,7 +29,7 @@ if (DOXYGEN_FOUND)
set (DOXYGEN_SEARCHENGINE_URL)
set (DOXYGEN_STRIP_FROM_PATH ${HDF5_SOURCE_DIR})
set (DOXYGEN_STRIP_FROM_INC_PATH ${HDF5_SOURCE_DIR})
set (DOXYGEN_PREDEFINED "H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_HAVE_SUBFILING_VFD H5_HAVE_IOC_VFD")
set (DOXYGEN_PREDEFINED "H5_HAVE_DIRECT H5_HAVE_LIBHDFS H5_HAVE_MAP_API H5_HAVE_PARALLEL H5_HAVE_ROS3_VFD H5_DOXYGEN_FORTRAN H5_HAVE_SUBFILING_VFD H5_HAVE_IOC_VFD")
# This configure and individual custom targets work together
# Replace variables inside @@ with the current values

View File

@@ -280,13 +280,13 @@ OPTIMIZE_OUTPUT_FOR_C = YES
# qualified scopes will look different, etc.
# The default value is: NO.
OPTIMIZE_OUTPUT_JAVA = NO
OPTIMIZE_OUTPUT_JAVA = YES
# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
# sources. Doxygen will then generate output that is tailored for Fortran.
# The default value is: NO.
OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_FOR_FORTRAN = YES
# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
# sources. Doxygen will then generate output that is tailored for VHDL.
@@ -875,7 +875,11 @@ FILE_PATTERNS = H5*public.h \
H5VLconnector.h \
H5VLconnector_passthru.h \
H5VLnative.h \
H5Zdevelop.h \
H5version.h \
H5*.java \
HDF*.java \
*.F90 \
*.dox
# The RECURSIVE tag can be used to specify whether or not subdirectories should
@@ -944,7 +948,7 @@ EXAMPLE_RECURSIVE = NO
# that contain images that are to be included in the documentation (see the
# \image command).
IMAGE_PATH = @HDF5_DOXYGEN_DIR@/img
IMAGE_PATH = @DOXYGEN_DIR@/img
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program

View File

@@ -126,7 +126,7 @@ This section contains a brief explanation of the symbols used in the DDL.
<reference> ::= H5T_REFERENCE { <ref_type> }
<ref_type> ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG | H5T_STD_REF | UNDEFINED
<ref_type> ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG
<compound_type> ::= H5T_COMPOUND {
<member_type_def>+

View File

@@ -0,0 +1,23 @@
/** \page FMT3 HDF5 File Format Specification Version 3.0
\htmlinclude H5.format.html
*/
/** \page FMT2 HDF5 File Format Specification Version 2.0
\htmlinclude H5.format.2.0.html
*/
/** \page FMT11 HDF5 File Format Specification Version 1.1
\htmlinclude H5.format.1.1.html
*/
/** \page FMT1 HDF5 File Format Specification Version 1.0
\htmlinclude H5.format.1.0.html
*/

View File

@@ -1,3 +1,100 @@
/** \page GettingStarted \Code{Hello, HDF5!}
/** @page GettingStarted Getting Started with HDF5
*/
Navigate back: \ref index "Main"
<hr>
\section sec_learn Learning HDF5
There are several resources for learning about HDF5. The HDF Group provides an on-line HDF5 tutorial,
documentation, examples, and videos. There are also tutorials provided by other organizations that are very useful for learning about HDF5.
\subsection subsec_learn_intro The HDF Group Resources
For a quick introduction to HDF5 see the following:
<table>
<tr>
<td style="background-color:#F5F5F5">
@ref IntroHDF5
</td>
<td>
A very brief introduction to HDF5 and the HDF5 programming model and APIs
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
@ref LearnHDFView
</td>
<td>
A tutorial for learning how to use HDFView. NO programming involved!
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
@ref LearnBasics
</td>
<td>
Step by step instructions for learning HDF5 that include programming examples
</td>
</tr>
</table>
\subsection subsec_learn_tutor The HDF Group Tutorials and Examples
These tutorials and examples are available for learning about the HDF5 High Level APIs, tools,
Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features:
<table>
<tr>
<td style="background-color:#F5F5F5">
<a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+the+HDF5+High+Level+APIs">Using the High Level APIs</a>
</td>
<td>
\ref H5LT \ref H5IM \ref H5TB \ref H5PT \ref H5DS
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
<a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+Parallel+HDF5">Introduction to Parallel HDF5</a>
</td>
<td>
A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first.
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
\ref ViewTools
</td>
<td>
\li @ref LearnHDFView
\li @ref ViewToolsCommand
\li @ref ViewToolsJPSS
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
HDF5-1.10 New Features
</td>
<td>
\li <a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+the+Virtual+Dataset++-+VDS">Introduction to the Virtual Dataset - VDS</a>
\li <a href="https://portal.hdfgroup.org/pages/viewpage.action?pageId=48812567">Introduction to Single-Writer/Multiple-Reader (SWMR)</a>
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
Example Programs
</td>
<td>
\ref HDF5Examples
</td>
</tr>
<tr>
<td style="background-color:#F5F5F5">
Videos
</td>
<td>
\li <a href="https://www.youtube.com/watch?v=BAjsCldRMMc">Introduction to HDF5</a>
\li <a href="https://www.youtube.com/watch?v=qrI27pI0P1E">Parallel HDF5</a>
</td>
</tr>
</table>
<hr>
Navigate back: \ref index "Main"
*/

627
doxygen/dox/IntroHDF5.dox Normal file
View File

@@ -0,0 +1,627 @@
/** @page IntroHDF5 Introduction to HDF5
Navigate back: \ref index "Main" / \ref GettingStarted
<hr>
\section sec_intro_desc HDF5 Description
HDF5 consists of a file format for storing HDF5 data, a data model for logically organizing and accessing
HDF5 data from an application, and the software (libraries, language interfaces, and tools) for working with this format.
\subsection subsec_intro_desc_file File Format
HDF5 consists of a file format for storing HDF5 data, a data model for logically organizing and accessing HDF5 data from an application,
and the software (libraries, language interfaces, and tools) for working with this format.
\subsection subsec_intro_desc_dm Data Model
The HDF5 Data Model, also known as the HDF5 Abstract (or Logical) Data Model consists of
the building blocks for data organization and specification in HDF5.
An HDF5 file (an object in itself) can be thought of as a container (or group) that holds
a variety of heterogeneous data objects (or datasets). The datasets can be images, tables,
graphs, and even documents, such as PDF or Excel:
<table>
<tr>
<td>
\image html fileobj.png
</td>
</tr>
</table>
The two primary objects in the HDF5 Data Model are groups and datasets.
There are also a variety of other objects in the HDF5 Data Model that support groups and datasets,
including datatypes, dataspaces, properties and attributes.
\subsubsection subsec_intro_desc_dm_group Groups
HDF5 groups (and links) organize data objects. Every HDF5 file contains a root group that can
contain other groups or be linked to objects in other files.
<table>
<caption>There are two groups in the HDF5 file depicted above: Viz and SimOut.
Under the Viz group are a variety of images and a table that is shared with the SimOut group.
The SimOut group contains a 3-dimensional array, a 2-dimensional array and a link to a 2-dimensional
array in another HDF5 file.</caption>
<tr>
<td>
\image html group.png
</td>
</tr>
</table>
Working with groups and group members is similar in many ways to working with directories and files
in UNIX. As with UNIX directories and files, objects in an HDF5 file are often described by giving
their full (or absolute) path names.
\li / signifies the root group.
\li /foo signifies a member of the root group called foo.
\li /foo/zoo signifies a member of the group foo, which in turn is a member of the root group.
\subsubsection subsec_intro_desc_dm_dset Datasets
HDF5 datasets organize and contain the “raw” data values. A dataset consists of metadata
that describes the data, in addition to the data itself:
<table>
<caption>In this picture, the data is stored as a three dimensional dataset of size 4 x 5 x 6 with an integer datatype.
It contains attributes, Time and Pressure, and the dataset is chunked and compressed.</caption>
<tr>
<td>
\image html dataset.png
</td>
</tr>
</table>
Datatypes, dataspaces, properties and (optional) attributes are HDF5 objects that describe a dataset.
The datatype describes the individual data elements.
\subsection subsec_intro_desc_props Datatypes, Dataspaces, Properties and Attributes
\subsubsection subsec_intro_desc_prop_dtype Datatypes
The datatype describes the individual data elements in a dataset. It provides complete information for
data conversion to or from that datatype.
<table>
<caption>In the dataset depicted, each element of the dataset is a 32-bit integer.</caption>
<tr>
<td>
\image html datatype.png
</td>
</tr>
</table>
Datatypes in HDF5 can be grouped into:
<ul>
<li>
<b>Pre-Defined Datatypes</b>: These are datatypes that are created by HDF5. They are actually opened (and closed)
by HDF5 and can have different values from one HDF5 session to the next. There are two types of pre-defined datatypes:
<ul>
<li>
Standard datatypes are the same on all platforms and are what you see in an HDF5 file. Their names are of the form
H5T_ARCH_BASE where ARCH is an architecture name and BASE is a pro­gramming type name. For example, #H5T_IEEE_F32BE
indicates a standard Big Endian floating point type.
</li>
<li>
Native datatypes are used to simplify memory operations (reading, writing) and are NOT the same on different platforms.
For example, #H5T_NATIVE_INT indicates an int (C).
</li>
</ul>
</li>
<li>
<b>Derived Datatypes</b>: These are datatypes that are created or derived from the pre-defined datatypes.
An example of a commonly used derived datatype is a string of more than one character. Compound datatypes
are also derived types. A compound datatype can be used to create a simple table, and can also be nested,
in which it includes one more other compound datatypes.
<table>
<caption>This is an example of a dataset with a compound datatype. Each element in the dataset consists
of a 16-bit integer, a character, a 32-bit integer, and a 2x3x2 array of 32-bit floats (the datatype).
It is a 2-dimensional 5 x 3 array (the dataspace). The datatype should not be confused with the dataspace.
</caption>
<tr>
<td>
\image html cmpnddtype.png
</td>
</tr>
</table>
</li>
</ul>
\subsubsection subsec_intro_desc_prop_dspace Dataspaces
A dataspace describes the layout of a datasets data elements. It can consist of no elements (NULL),
a single element (scalar), or a simple array.
<table>
<caption>This image illustrates a dataspace that is an array with dimensions of 5 x 3 and a rank (number of dimensions) of 2.</caption>
<tr>
<td>
\image html dataspace1.png
</td>
</tr>
</table>
A dataspace can have dimensions that are fixed (unchanging) or unlimited, which means they can grow
in size (i.e. they are extendible).
There are two roles of a dataspace:
\li It contains the spatial information (logical layout) of a dataset stored in a file. This includes the rank and dimensions of a dataset, which are a permanent part of the dataset definition.
\li It describes an applications data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset.
<table>
<caption>The dataspace is used to describe both the logical layout of a dataset and a subset of a dataset.</caption>
<tr>
<td>
\image html dataspace.png
</td>
</tr>
</table>
\subsubsection subsec_intro_desc_prop_property Properties
A property is a characteristic or feature of an HDF5 object. There are default properties which
handle the most common needs. These default properties can be modified using the HDF5 Property
List API to take advantage of more powerful or unusual features of HDF5 objects.
<table>
<tr>
<td>
\image html properties.png
</td>
</tr>
</table>
For example, the data storage layout property of a dataset is contiguous by default. For better
performance, the layout can be modified to be chunked or chunked and compressed:
\subsubsection subsec_intro_desc_prop_attr Attributes
Attributes can optionally be associated with HDF5 objects. They have two parts: a name and a value.
Attributes are accessed by opening the object that they are attached to so are not independent objects.
Typically an attribute is small in size and contains user metadata about the object that it is attached to.
Attributes look similar to HDF5 datasets in that they have a datatype and dataspace. However, they
do not support partial I/O operations, and they cannot be compressed or extended.
\subsection subsec_intro_desc_soft HDF5 Software
The HDF5 software is written in C and includes optional wrappers for C++, FORTRAN (90 and F2003),
and Java. The HDF5 binary distribution consists of the HDF5 libraries, include files, command-line
utilities, scripts for compiling applications, and example programs.
\subsubsection subsec_intro_desc_soft_apis HDF5 APIs and Libraries
There are APIs for each type of object in HDF5. For example, all C routines in the HDF5 library
begin with a prefix of the form H5*, where * is one or two uppercase letters indicating the type
of object on which the function operates:
\li @ref H5A <b>A</b>ttribute Interface
\li @ref H5D <b>D</b>ataset Interface
\li @ref H5F <b>F</b>ile Interface
The HDF5 High Level APIs simplify many of the steps required to create and access objects, as well
as providing templates for storing objects. Following is a list of the High Level APIs:
\li @ref H5LT simplifies steps in creating datasets and attributes
\li @ref H5IM defines a standard for storing images in HDF5
\li @ref H5TB condenses the steps required to create tables
\li @ref H5DS provides a standard for dimension scale storage
\li @ref H5PT provides a standard for storing packet data
\subsubsection subsec_intro_desc_soft_tools Tools
Useful tools for working with HDF5 files include:
\li h5dump: A utility to dump or display the contents of an HDF5 File
\li h5cc, h5c++, h5fc: Unix scripts for compiling applications
\li HDFView: A java browser to view HDF (HDF4 and HDF5) files
<h4>h5dump</h4>
The h5dump utility displays the contents of an HDF5 file in Data Description Language (\ref DDLBNF110).
Below is an example of h5dump output for an HDF5 file that contains no objects:
\code
$ h5dump file.h5
HDF5 "file.h5" {
GROUP "/" {
}
}
\endcode
With large files and datasets the output from h5dump can be overwhelming.
There are options that can be used to examine specific parts of an HDF5 file.
Some useful h5dump options are included below:
\code
-H, --header Display header information only (no data)
-d <name> Display a dataset with a specified path and name
-p Display properties
-n Display the contents of the file
\endcode
<h4>h5cc, h5fc, h5c++</h4>
The built HDF5 binaries include the h5cc, h5fc, h5c++ compile scripts for compiling applications.
When using these scripts there is no need to specify the HDF5 libraries and include files.
Compiler options can be passed to the scripts.
<h4>HDFView</h4>
The HDFView tool allows browsing of data in HDF (HDF4 and HDF5) files.
\section sec_intro_pm Introduction to the HDF5 Programming Model and APIs
The HDF5 Application Programming Interface is extensive, but a few functions do most of the work.
To introduce the programming model, examples in Python and C are included below. The Python examples
use the HDF5 Python APIs (h5py). See the Examples from "Learning the Basics" page for complete examples
that can be downloaded and run for C, FORTRAN, C++, Java and Python.
The general paradigm for working with objects in HDF5 is to:
\li Open the object.
\li Access the object.
\li Close the object.
The library imposes an order on the operations by argument dependencies. For example, a file must be
opened before a dataset because the dataset open call requires a file handle as an argument. Objects
can be closed in any order. However, once an object is closed it no longer can be accessed.
Keep the following in mind when looking at the example programs included in this section:
<ul>
<li>
<ul>
<li>
C routines begin with the prefix “H5*” where * is a single letter indicating the object on which the
operation is to be performed.
</li>
<li>
FORTRAN routines are similar; they begin with “h5*” and end with “_f”.
</li>
<li>
Java routines are similar; the routine names begin with “H5*” and are prefixed with “H5.” as the class. Constants are
in the HDF5Constants class and are prefixed with "HDF5Constants.". The function arguments
are usually similar, @see @ref HDF5LIB
</li>
</ul>
For example:
<ul>
<li>
File Interface:<ul><li>#H5Fopen (C)</li><li>h5fopen_f (FORTRAN)</li><li>H5.H5Fopen (Java)</li></ul>
</li>
<li>
Dataset Interface:<ul><li>#H5Dopen (C)</li><li>h5dopen_f (FORTRAN)</li><li>H5.H5Dopen (Java)</li></ul>
</li>
<li>
Dataspace interface:<ul><li>#H5Sclose (C)</li><li>h5sclose_f (FORTRAN)</li><li>H5.H5Sclose (Java)</li></ul>
</li>
</ul>
The HDF5 Python APIs use methods associated with specific objects.
</li>
<li>
For portability, the HDF5 library has its own defined types. Some common types that you will see
in the example code are:
<ul>
<li>
#hid_t is used for object handles
</li>
<li>
hsize_t is used for dimensions
</li>
<li>
#herr_t is used for many return values
</li>
</ul>
</li>
<li>
Language specific files must be included in applications:
<ul>
<li>
Python: Add <code>"import h5py / import numpy"</code>
</li>
<li>
C: Add <code>"#include hdf5.h"</code>
</li>
<li>
FORTRAN: Add <code>"USE HDF5"</code> and call h5open_f and h5close_f to initialize and close the HDF5 FORTRAN interface
</li>
<li>
Java: Add <code>"import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;"</code>
</li>
</ul>
</li>
</ul>
\subsection subsec_intro_pm_file Steps to create a file
To create an HDF5 file you must:
\li Specify property lists (or use the defaults).
\li Create the file.
\li Close the file (and property lists if needed).
Example:
<table>
<caption>The following Python and C examples create a file, file.h5, and then close it.
The resulting HDF5 file will only contain a root group:</caption>
<tr>
<td>
\image html crtf-pic.png
</td>
</tr>
</table>
Calling h5py.File with w for the file access flag will create a new HDF5 file and overwrite
an existing file with the same name. “file” is the file handle returned from opening the file.
When finished with the file, it must be closed. When not specifying property lists, the default
property lists are used:
<table>
<tr>
<td>
<em>Python</em>
\code
import h5py
file = h5py.File (file.h5, w)
file.close ()
\endcode
</td>
</tr>
</table>
The H5Fcreate function creates an HDF5 file. #H5F_ACC_TRUNC is the file access flag to create a new
file and overwrite an existing file with the same name, and #H5P_DEFAULT is the value specified to
use a default property list.
<table>
<tr>
<td>
<em>C</em>
\code
#include “hdf5.h”
int main() {
hid_t file_id;
herr_t status;
file_id = H5Fcreate ("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
status = H5Fclose (file_id);
}
\endcode
</td>
</tr>
</table>
\subsection subsec_intro_pm_dataset Steps to create a dataset
As described previously, an HDF5 dataset consists of the raw data, as well as the metadata that
describes the data (datatype, spatial information, and properties). To create a dataset you must:
\li Define the dataset characteristics (datatype, dataspace, properties).
\li Decide which group to attach the dataset to.
\li Create the dataset.
\li Close the dataset handle from step 3.
Example:
<table>
<caption>The code excerpts below show the calls that need to be made to create a 4 x 6 integer dataset dset
in a file dset.h5. The dataset will be located in the root group:</caption>
<tr>
<td>
\image html crtdset.png
</td>
</tr>
</table>
With Python, the creation of the dataspace is included as a parameter in the dataset creation method.
Just one call will create a 4 x 6 integer dataset dset. A pre-defined Big Endian 32-bit integer datatype
is specified. The create_dataset method creates the dataset in the root group (the file object).
The dataset is close by the Python interface.
<table>
<tr>
<td>
<em>Python</em>
\code
dataset = file.create_dataset("dset",(4, 6), h5py.h5t.STD_I32BE)
\endcode
</td>
</tr>
</table>
To create the same dataset in C, you must specify the dataspace with the #H5Screate_simple function,
create the dataset by calling #H5Dcreate, and then close the dataspace and dataset with calls to #H5Dclose
and #H5Sclose. #H5P_DEFAULT is specified to use a default property list. Note that the file identifier
(file_id) is passed in as the first parameter to #H5Dcreate, which creates the dataset in the root group.
<table>
<tr>
<td>
<em>C</em>
\code
// Create the dataspace for the dataset.
dims[0] = 4;
dims[1] = 6;
dataspace_id = H5Screate_simple(2, dims, NULL);
// Create the dataset.
dataset_id = H5Dcreate (file_id, "/dset", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
// Close the dataset and dataspace
status = H5Dclose(dataset_id);
status = H5Sclose(dataspace_id);
\endcode
</td>
</tr>
</table>
\subsection subsec_intro_pm_write Writing to or reading from a dataset
Once you have created or opened a dataset you can write to it:
<table>
<tr>
<td>
<em>Python</em>
\code
data = np.zeros((4,6))
for i in range(4):
for j in range(6):
data[i][j]= i*6+j+1
dataset[...] = data <-- Write data to dataset
data_read = dataset[...] <-- Read data from dataset
\endcode
</td>
</tr>
</table>
#H5S_ALL is passed in for the memory and file dataspace parameters to indicate that the entire dataspace
of the dataset is specified. These two parameters can be modified to allow subsetting of a dataset.
The native predefined datatype, #H5T_NATIVE_INT, is used for reading and writing so that HDF5 will do
any necessary integer conversions:
<table>
<tr>
<td>
<em>C</em>
\code
status = H5Dwrite (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data);
status = H5Dread (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data);
\endcode
</td>
</tr>
</table>
\subsection subsec_intro_pm_group Steps to create a group
An HDF5 group is a structure containing zero or more HDF5 objects. Before you can create a group you must
obtain the location identifier of where the group is to be created. Following are the steps that are required:
\li Decide where to put the group in the “root group” (or file identifier) or in another group. Open the group if it is not already open.
\li Define properties or use the default.
\li Create the group.
\li Close the group.
<table>
<caption>Creates attributes that are attached to the dataset dset</caption>
<tr>
<td>
\image html crtgrp.png
</td>
</tr>
</table>
The code below opens the dataset dset.h5 with read/write permission and creates a group MyGroup in the root group.
Properties are not specified so the defaults are used:
<table>
<tr>
<td>
<em>Python</em>
\code
import h5py
file = h5py.File('dset.h5', 'r+')
group = file.create_group ('MyGroup')
file.close()
\endcode
</td>
</tr>
</table>
To create the group MyGroup in the root group, you must call #H5Gcreate, passing in the file identifier returned
from opening or creating the file. The default property lists are specified with #H5P_DEFAULT. The group is then
closed:
<table>
<tr>
<td>
<em>C</em>
\code
group_id = H5Gcreate (file_id, "MyGroup", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
status = H5Gclose (group_id);
\endcode
</td>
</tr>
</table>
\subsection subsec_intro_pm_attr Steps to create and write to an attribute
To create an attribute you must open the object that you wish to attach the attribute to. Then you can create,
access, and close the attribute as needed:
\li Open the object that you wish to add an attribute to.
\li Create the attribute
\li Write to the attribute
\li Close the attribute and the object it is attached to.
<table>
<caption>Creates attributes that are attached to the dataset dset</caption>
<tr>
<td>
\image html crtatt.png
</td>
</tr>
</table>
The dataspace, datatype, and data are specified in the call to create an attribute in Python:
<table>
<tr>
<td>
<em>Python</em>
\code
dataset.attrs["Units"] = “Meters per second” <-- Create string
attr_data = np.zeros((2,))
attr_data[0] = 100
attr_data[1] = 200
dataset.attrs.create("Speed", attr_data, (2,), “i”) <-- Create Integer
\endcode
</td>
</tr>
</table>
To create an integer attribute in C, you must create the dataspace, create the attribute, write
to it and then close it in separate steps:
<table>
<tr>
<td>
<em>C</em>
\code
hid_t attribute_id, dataspace_id; // identifiers
hsize_t dims;
int attr_data[2];
herr_t status;
...
// Initialize the attribute data.
attr_data[0] = 100;
attr_data[1] = 200;
// Create the data space for the attribute.
dims = 2;
dataspace_id = H5Screate_simple(1, &dims, NULL);
// Create a dataset attribute.
attribute_id = H5Acreate2 (dataset_id, "Units", H5T_STD_I32BE,
dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
// Write the attribute data.
status = H5Awrite(attribute_id, H5T_NATIVE_INT, attr_data);
// Close the attribute.
status = H5Aclose(attribute_id);
// Close the dataspace.
status = H5Sclose(dataspace_id);
\endcode
</td>
</tr>
</table>
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted
@page HDF5Examples HDF5 Examples
Example programs of how to use HDF5 are provided below.
For HDF-EOS specific examples, see the <a href="http://hdfeos.org/zoo/index.php">examples</a>
of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL on the
<a href="http://hdfeos.org/">HDF-EOS Tools and Information Center</a> page.
\section secHDF5Examples Examples
\li \ref LBExamples
\li <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>
\li <a href="https://portal.hdfgroup.org/display/HDF5/Examples+in+the+Source+Code">Examples in the Source Code</a>
\li <a href="https://portal.hdfgroup.org/display/HDF5/Other+Examples">Other Examples</a>
\section secHDF5ExamplesCompile How To Compile
For information on compiling in C, C++ and Fortran, see: \ref LBCompiling
\section secHDF5ExamplesOther Other Examples
<a href="http://hdfeos.org/zoo/index.php">IDL, MATLAB, and NCL Examples for HDF-EOS</a>
Examples of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL.
<a href="https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/">Miscellaneous Examples</a>
These (very old) examples resulted from working with users, and are not fully tested. Most of them are in C, with a few in Fortran and Java.
<a href="https://support.hdfgroup.org/ftp/HDF5/examples/special_values_HDF5_example.tar">Using Special Values</a>
These examples show how to create special values in an HDF5 application.
*/

183
doxygen/dox/LearnBasics.dox Normal file
View File

@@ -0,0 +1,183 @@
/** @page LearnBasics Learning the Basics
Navigate back: \ref index "Main" / \ref GettingStarted
<hr>
\section secIntro Introduction
The following topics cover the basic features in HDF5. The topics build on each other and are
intended to be completed in order. Some sections use files created in earlier sections. The
examples used can also be found on the \ref LBExamples
page and in the HDF5 source code (C, C++, Fortran).
\section Topics Topics
\li @subpage LBFileOrg
\li @subpage LBAPI
\li @subpage LBProg
\li @subpage LBFileCreate
\li @subpage LBDsetCreate
\li @subpage LBDsetRW
\li @subpage LBAttrCreate
\li @subpage LBGrpCreate
\li @subpage LBGrpCreateNames
\li @subpage LBGrpDset
\li @subpage LBDsetSubRW
\li @subpage LBDatatypes
\li @subpage LBPropsList
\li @subpage LBDsetLayout
\li @subpage LBExtDset
\li @subpage LBComDset
\li @subpage LBContents
\li @subpage LBQuiz
\li @subpage LBQuizAnswers
\li @subpage LBCompiling
\li @subpage LBTraining
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted
@page LBExamples Examples from Learning the Basics
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
<hr>
\section secLBExamples
These examples are used in the \ref LearnBasics topic. See \ref LBCompiling for details on compiling them.
PLEASE NOTE that the example programs are listed in the order they are expected to be run. Some example
programs use files created in earlier examples.
\section secLBExamplesSrc HDF5 Source Code Examples
These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) binaries.
<table>
<tr>
<th>Feature
</th>
<th>Examples
</th>
<th>Comments
</th>
<tr>
<td>Create a file
</td>
<td>C Fortran C++ <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateFile.java">Java</a> Python
</td>
<td>
</td>
</tr>
<tr>
<td>Create a dataset
</td>
<td><a href="https://raw.githubusercontent.com//HDFGroup/hdf5/hdf5_1_10/examples/h5_crtdat.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtdat.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtdat.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateDataset.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtdat.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Read and write to a dataset
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_rdwt.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_rdwt.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_rdwt.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_ReadWrite.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_rdwt.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Create an attribute
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtatt.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtatt.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtatt.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateAttribute.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtatt.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Create a group
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrp.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrp.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrp.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroup.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrp.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Create groups in a file using absolute and relative paths
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrpar.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrpar.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrpar.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroupAbsoluteRelative.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrpar.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Create datasets in a group
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrpd.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrpd.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrpd.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroupDataset.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrpd.py">Python</a>
</td>
<td>
</td>
</tr>
<tr>
<td>Create a file and dataset and select/read a subset from the dataset
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_subset.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_subset.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_subset.cpp">C++</a> Java Python
</td>
<td>Also see examples to Write by row (and column) below.
</td>
</tr>
<tr>
<td>Create an extendible (unlimited dimension) dataset
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_extend.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_extend.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_extend.cpp">C++</a> Java Python
</td>
<td>Also see examples to Extend by row (and column) below
</td>
</tr>
<tr>
<td>Create a chunked and compressed dataset
</td>
<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_cmprss.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_cmprss.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_cmprss.cpp">C++</a> Java <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_cmprss.py">Python</a>
</td>
<td>
</td>
</tr>
</table>
*See <a href="https://github.com/scotmartin1234/HDF5Mathematica">HDF5Mathematica</a> for user-contributed
HDF5 Mathematica Wrappers and Introductory Tutorial Examples. The examples use P/Invoke.
\section secLBExamplesAddl Additional Examples
These examples make minor changes to the tutorial examples.
<table>
<tr>
<th>Feature
</th>
<th>Examples
</th>
</tr>
<tr>
<td>Write by row
</td>
<td><a href="">C</a> <a href="">Fortran</a>
</td>
</tr>
<tr>
<td>Write by column
</td>
<td><a href="">C</a> <a href="">Fortran</a>
</td>
</tr>
<tr>
<td>Extend by row
</td>
<td><a href="">C</a> <a href="">Fortran</a>
</td>
</tr>
<tr>
<td>Extend by column
</td>
<td><a href="">C</a> <a href="">Fortran</a>
</td>
</tr>
</table>
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
*/

1023
doxygen/dox/LearnBasics1.dox Normal file

File diff suppressed because it is too large Load Diff

1159
doxygen/dox/LearnBasics2.dox Normal file

File diff suppressed because it is too large Load Diff

1015
doxygen/dox/LearnBasics3.dox Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,472 @@
/** @page LearnHDFView Learning HDF5 with HDFView
Navigate back: \ref index "Main" / \ref GettingStarted
<hr>
This tutorial enables you to get a feel for HDF5 by using the HDFView browser. It does NOT require
any programming experience.
\section sec_learn_hv_install HDFView Installation
\li Download and install HDFView. It can be downloaded from the <a href="https://portal.hdfgroup.org/display/support/Download+HDFView">Download HDFView</a> page.
\li Obtain the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> text file, used in the tutorial.
\section sec_learn_hv_begin Begin Tutorial
Once you have HDFView installed, bring it up and you are ready to begin the tutorial.
<table style="background-color:#FAFAD2">
<caption>
Unable to complete tutorial because fields are greyed out?
</caption>
<tr>
<td>
This tutorial requires that the default HDFView File Access Mode be Read / Write. If fields are greyed out so that you cannot select them, then the File Access Mode is Read Only.
To change the File Access Mode follow these steps:
<ul>
<li>Bring up HDFView</li>
<li>Left-mouse click on the Tools pull-down menu and select User Options.</li>
<li>A Preferences window pops up with the General Settings tab selected.
About half-way down you will see Default File Access Mode.
Select Read / Write.</li>
<li>Click on Apply and Close at the bottom of the window.</li>
<li>Close down HDFView.</li>
<li>Bring HDFView back up and try the tutorial again.</li>
PLEASE BE AWARE that selecting a File Access Mode of Read / Write can result in changes to the timestamp of HDF files that are viewed with HDFView. In general, a File Access Mode
of Read Only should be used to ensure that this does not occur.
</ul>
</td>
</tr>
</table>
\subsection subsec_learn_hv_begin_topics Topics Covered
Following are the topics covered in the tutorial. The first topic creates the file that is used in
the subsequent topics.
<ul>
<li>@ref subsec_learn_hv_topics_file</li>
<li>@ref subsec_learn_hv_topics_image</li>
<li>@ref subsec_learn_hv_topics_attr</li>
<li>@ref subsec_learn_hv_topics_compress</li>
<li>@ref subsec_learn_hv_topics_subset</li>
<li>@ref subsec_learn_hv_topics_table</li>
</ul>
\section sec_learn_hv_topics Topics
\subsection subsec_learn_hv_topics_file Creating a New HDF5 File with a Contiguous Dataset
The steps below describe how to create a file (storm.h5), group (/Data), and a contiguous dataset
(/Data/Storm) using HDFView. A group is an HDF5 object that allows objects to be collected together.
A dataset is an array of data values. A contiguous dataset is one that is stored as a single block
in the HDF5 file.
<ul>
<li>Select the <em>File</em> pull-down menu at the top left, and then select <em>New -> HDF5</em>.</li>
<li>Specify a location and type in <em>storm.h5</em> for the name of your file, and click on the <em>Save</em> button.
You will see the <em>storm.h5</em> file in the TableView:
<table>
<tr>
<td>
\image html storm.png
</td>
</tr>
</table>
</li>
<li>Right click on <em>storm.h5</em>, and select <em>New -> Group</em>.</li>
<li>Enter <em>Data</em> for the name of the group and then click the <em>Ok</em> button. You will see the group <em>Data</em> in the TableView.
<table>
<tr>
<td>
\image html DataGroup.png
</td>
</tr>
</table>
</li>
<li>Right click on the group <em>Data</em> and select <em>New -> Dataset</em>.</li>
<li>A window pops up on the right. Fill in the information as follows, and then click <em>Ok</em> (leave the
Datatype information as is):
<table>
<tr>
<th>Dataset Name
</th>
<td><em>Storm</em>
</td>
</tr>
<tr>
<th>Under Dataspace, Current size
</th>
<td>57x57
</td>
</tr>
<tr>
<th>Layout
</th>
<td><em>Contiguous</em> (default)
</td>
</tr>
</table>
</li>
<li>Click to expand the <em>Data</em> group in the tree view to see the <em>Storm</em> dataset:
<table>
<tr>
<td>
\image html StormDataset.png
</td>
</tr>
</table>
</li>
<li>Double left click on the <em>Storm</em> dataset in the tree view. A window with an empty spreadsheet pops open.</li>
<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset.
If you downloaded <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a>,
then click on the <em>Import/Export Data</em> menu and select <em>Import Data from -> Text File</em>.
Specify a location, select <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a>
and click on the <em>Open</em> button. Answer <em>Yes</em> in the dialog box that
pops up (which asks if you wish to paste the selected data).
Alternately, you can copy/paste directly. Select and copy the data in a separate window. Position your
cursor at (0,0) in your table, and select <em>Paste</em> from the <em>Table</em> menu.
The values will be entered into the spreadsheet.
<table>
<tr>
<td>
\image html datasetwdata.png
</td>
</tr>
</table>
</li>
<li><em>Table -> Close</em> the dataset, and save the data.</li>
</ul>
\subsection subsec_learn_hv_topics_image Displaying a Dataset as an Image
Any dataset can be viewed as an image in HDFView. Below are the steps that demonstrate this.
<ul>
<li>Right click on <em>Storm</em> in the tree view, and select <em>Open As</em>.</li>
<li>Select the <em>Image</em> button under <em>Display As</em> (near the top) in the Dataset Selection window that pops
up. Then click <em>OK</em> at the bottom of the window to display the image.
<table>
<tr>
<td>
\image html showasimage.png
</td>
</tr>
</table>
</li>
<li>The rainbow icon brings you to the Image Palette window. Click on that to play with the palette
(GrayWave probably is the best choice). Close.</li>
</ul>
\subsection subsec_learn_hv_topics_attr Creating Attributes
Additional information to describe an object can be stored in attributes. An attribute can be
added to a group or dataset with HDFView.
The following illustrates how to add an attribute to the group <em>/Data</em>:
<ul>
<li>Click on the <em>/Data</em> folder in the tree view. You will see two tabs, <em>Object Attribute Info</em> and
<em>General Object Info</em>, in the pane on the right site of the HDFView window.
<table>
<tr>
<td>
\image html noattrs.png
</td>
</tr>
</table>
</li>
<li>With the left mouse button, select the <em>Add Attribute</em> button.</li>
<li>Select the <em>Add Attribute</em> button to add an attribute with these values:</li>
<table>
<tr>
<th>Name
</th>
<td><em>BatchID</em>
</td>
</tr>
<tr>
<th>Type
</th>
<td>INTEGER
</td>
</tr>
<tr>
<th>Size (bits)
</th>
<td>32
</td>
</table>
<li>Select the <em>Ok</em> button. The attribute will show up under the <em>Object Attribute Info</em> tab.</li>
<li>Double-click the BatchID attribute line to open the data table for BatchID.</li>
<li>Click in the first cell and enter <em>3343</em> followed by the enter key.</li>
<li><em>Table -> Close</em>, answer <em>Yes</em> in the dialog box that
pops up (which asks if you wish to paste the selected data).</li>
</ul>
Adding an attribute to a dataset is very similar to adding an attribute to a group. For example,
the following adds an attribute to the <em>/Storm</em> dataset:
<ul>
<li>Left mouse click on the <em>/Storm</em> dataset in the tree view. You will see the <em>Object Attribute
Info</em> and <em>General Object Info</em> tabs on the right</li>
<li>In the <em>Object Attribute Info</em> pane select the <em>Add Attribute</em> button and enter an attribute with
these values. (Be sure to add a <em>String Length</em> or the string will be truncated to one character!):</li>
<table>
<tr>
<th>Name
</th>
<td><em>Units</em>
</td>
</tr>
<tr>
<th>Type
</th>
<td>STRING
</td>
</tr>
<tr>
<th>String Length
</th>
<td>3
</td>
</table>
<li>Select the <em>Ok</em> button. The attribute will show up under the <em>Object Attribute Info</em> tab.</li>
<li>Double-click the Units attribute line to open the data table for Units.</li>
<li>Click in the first cell and enter <em>m/s</em> followed by the enter key.</li>
<li><em>Table -> Close</em>, answer <em>Yes</em> in the dialog box that
pops up (which asks if you wish to paste the selected data).
<table>
<tr>
<td>
\image html scarletletter.png
</td>
</tr>
</table>
</li>
</ul>
\subsection subsec_learn_hv_topics_compress Creating a Compressed and Chunked Dataset
A chunked and compressed dataset can be created using HDFView. A compressed dataset is a dataset
whose size has been compressed to take up less space. In order to compress an HDF5 dataset, the
dataset must be stored with a chunked dataset layout (as multiple <em>chunks</em> that are stored separately
in the file).
Please note that the chunk sizes used in this topic are for demonstration purposes only. For
information on chunking and specifying an appropriate chunk size, see the
<a href="https://confluence.hdfgroup.org/display/HDF5/Chunking+in+HDF5">Chunking in HDF5</a> documentation.
Also see the HDF5 Tutorial topic on \ref secLBComDsetCreate.
<ul>
<li>Right click on storm.h5. Select <em>New -> Group</em>.</li>
<li>Enter <em>Image</em> for the name of the group, and click the <em>OK</em> button to create the group.
<table>
<tr>
<td>
\image html newgroupimage.png
</td>
</tr>
</table>
</li>
<li>Right click on the <em>Image</em> group, and select <em>New -> Dataset</em>.</li>
<li>Enter the following information for the dataset. Leave the <em>Datatype</em> as is (INTEGER):
<table>
<tr>
<th>Dataset name
</th>
<td><em>Another Storm</em>
</td>
</tr>
<tr>
<th>Under Dataspace, Current size
</th>
<td>57x57
</td>
</tr>
<tr>
<th>Storage Layout
</th>
<td>Chunked
</td>
</tr>
<tr>
<th>Chunk Size
</th>
<td>20x20
</td>
</tr>
<tr>
<th>Compression
</th>
<td>gzip
</td>
</tr>
<tr>
<th>Compression Level
</th>
<td>9
</td>
</table>
You will see the <em>Another Storm</em> dataset in the <em>Image</em> group:
<table>
<tr>
<td>
\image html hdfview-anthrstrm.png
</td>
</tr>
</table>
</li>
<li>Double left-mouse click on the <em>Another Storm</em> dataset to display the spreadsheet:
<table>
<tr>
<td>
\image html hdfview-anthrstrm-sprdsht.png
</td>
</tr>
</table>
</li>
<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset. (See the previous topic for copying
<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> into a dataset.)</li>
<li><em>Table -> Close</em>, and save the data.</li>
<li>Right click on <em>Another Storm</em>, and select <em>Open As</em>.</li>
<li>Select the <em>Image</em> button in the Dataset Selection window that pops up. Click the <em>Ok</em> button at the
bottom of the window to view the dataset as an image.
<table>
<tr>
<td>
\image html hdfview-anthrstrm-img.png
</td>
</tr>
</table>
</li>
</ul>
\subsection subsec_learn_hv_topics_subset Creating an Image and a Subset
A previous topic demonstrated how to view any dataset as an image in HDFView. With HDFView you can also
create an image to begin with, as is shown below.
<ul>
<li>Right click on the <em>Data</em> group and select <em>New -> Image</em>.</li>
<li>A window pops up on the right. Enter the following and then click <em>Ok</em>:</li>
<table>
<tr>
<th>Image name
</th>
<td><em>Storm Image</em>
</td>
</tr>
<tr>
<th>Height
</th>
<td>57
</td>
</tr>
<tr>
<th>Width
</th>
<td>57
</td>
</table>
<li>Close the dataset.</li>
<li>Expand the <em>Data</em> group to see its contents. You will see the <em>Storm Image</em> dataset.
<table>
<tr>
<td>
\image html hdfview-imgicon.png
</td>
</tr>
</table>
</li>
<li>
Add data to the <em>Storm Image</em> dataset as was shown previously:
<ul>
<li>Right click on <em>Storm Image</em>, and select <em>Open As</em> to open the Dataset Selection window.</li>
<li>Click on the <em>Spreadsheet</em> button at the top left of the Dataset Selection window to view the image
as a spreadsheet.</li>
<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset.</li>
<li>Close the dataset and save the data.</li>
</ul>
</li>
<li>Left double click on <em>Storm Image</em> to see the image. Close the dataset.</li>
<li>Right click on <em>Storm Image</em> and select <em>Open As</em> to bring up the Data Selection window.</li>
<li>Select a subset by clicking the left mouse on the image in the window and dragging the mouse.
Notice that the Height and Width values change. Select to display it as an image. Click <em>Ok</em>.
<table>
<tr>
<td>
\image html hdfview-imgsubset.png
</td>
</tr>
</table>
</li>
<li>Position the cursor in the middle of the image. Press Shift+Left Mouse button and hold, and then
drag the mouse to select another subset.</li>
<li>Select <em>Image->Write Selection to Image</em>. Enter <em>Subset</em> for the new image name. Click <em>Ok</em>. The <em>Subset</em>
image will appear in the tree view on the left.</li>
<li>Left double click on the image <em>Subset</em> to bring it up on the right.
<table>
<tr>
<td>
\image html hdfview-newimgsubset.png
</td>
</tr>
</table>
</li>
<li>Close the <em>Subset</em> image.</li>
</ul>
\subsection subsec_learn_hv_topics_table Creating a Table (Compound Dataset)
A dataset with a compound datatype contains data elements that consist of multiple fields. If the
dataspace for the compound dataset is one-dimensional, then the dataset can be viewed as a table in
HDFView, as is shown below.
<ul>
<li>Right button click on the group <em>Data</em>. Select <em>New -> Compound DS</em>.</li>
<li>A window pops up. Only fill in the following fields:
<table>
<tr>
<th>Dataset name
</th>
<td>Table
</td>
</tr>
<tr>
<th>Dataspace (Current size only)
</th>
<td>4
</td>
</tr>
<tr>
<th>Compound Datatype Properties:
<br />Number of Members
</th>
<td>3
</td>
</tr>
<tr>
<th>Compound Datatype Properties:
<br /><em>Name</em> / Datatype / Size
</th>
<td><em>Description</em> / string / 4
<br /><em>Temperature</em> / float / 1
<br /><em>Pressure</em> / double / 1
</td>
</tr>
</table>
<table>
<tr>
<td>
\image html hdfview-newcmpd.png
</td>
</tr>
</table>
</li>
<li>Click Ok at the bottom.</li>
<li>Open the Data group (if it is not open) and double left click on the Table object.
<table>
<tr>
<td>
\image html hdfview-table.png
</td>
</tr>
</table>
</li>
<li>Close the dataset.</li>
</ul>
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted
*/

View File

@@ -1,53 +1,32 @@
/** \page RM HDF5 Reference Manual
The functions provided by the HDF5 C-API are grouped into the following
The functions provided by the HDF5 API are grouped into the following
\Emph{modules}:
<table>
<tr><th>Modules</th></tr>
<tr valign="top">
<td>
<table>
<tr valign="top"><td style="border: none;">
\li \ref H5A "Attributes (H5A)"
\li \ref H5D "Datasets (H5D)"
\li \ref H5S "Dataspaces (H5S)"
\li \ref H5T "Datatypes (H5T)"
\li \ref H5E "Error Handling (H5E)"
\li \ref H5ES "Event Sets (H5ES)"
\li \ref H5F "Files (H5F)"
\li \ref H5Z "Filters (H5Z)"
\li \ref H5G "Groups (H5G)"
</td><td style="border: none;">
\li \ref H5I "Identifiers (H5I)"
\li \ref H5 "Library General (H5)"
\li \ref H5L "Links (H5L)"
\li \ref H5M "Maps (H5M)"
\li \ref H5O "Objects (H5O)"
\li \ref H5P "Property Lists (H5P)"
\li \ref H5PL "Dynamically-loaded Plugins (H5PL)"
\li \ref H5R "References (H5R)"
\li \ref H5VL "Virtual Object Layer (H5VL)"
</td><td style="border: none;">
\li \ref high_level
<ul>
<li>\ref H5LT "Lite (H5LT, H5LD)"
<li>\ref H5IM "Images (H5IM)"
<li>\ref H5TB "Table (H5TB)"
<li>\ref H5PT "Packet Table (H5PT)"
<li>\ref H5DS "Dimension Scale (H5DS)"
<li>\ref H5DO "Optimizations (H5DO)"
<li>\ref H5LR "Extensions (H5LR, H5LT)"
</ul>
</td></tr>
<tr><td colspan="3" style="border: none;">
\a Core \a library: \ref H5 \ref H5A \ref H5D \ref H5E \ref H5ES \ref H5F \ref H5G \ref H5I \ref H5L
\ref H5M \ref H5O \ref H5P \ref H5PL \ref H5R \ref H5S \ref H5T \ref H5VL \ref H5Z
</td></tr>
<tr><td colspan="3" style="border: none;">
\a High-level \a library: \ref H5LT \ref H5IM \ref H5TB \ref H5PT \ref H5DS \ref H5DO \ref H5LR
</td></tr>
\include{doc} core_menu.md
</td>
</tr>
<tr valign="top"><td style="border: none;">
<!-- High-level library -->
\include{doc} high_level_menu.md
</td>
</tr>
<tr valign="top"><td style="border: none;">
<!-- Fortran library -->
\include{doc} fortran_menu.md
</td>
</tr>
<tr valign="top"><td style="border: none;">
<!-- Java library -->
\include{doc} java_menu.md
</td>
</tr>
<tr>
<td><a href="./deprecated.html">Deprecated functions</a></td>
<td>Functions with \ref ASYNC</td>

View File

@@ -2,20 +2,20 @@
\section DDL
\li \ref DDLBNF110 "DDL in BNF through HDF5 1.10"
\li \ref DDLBNF112 "DDL in BNF for HDF5 1.12 and above"
\li \ref DDLBNF110
\li \ref DDLBNF112
\section File Format
\li \ref FMT1 "HDF5 File Format Specification Version 1.0"
\li \ref FMT11 "HDF5 File Format Specification Version 1.1"
\li \ref FMT2 "HDF5 File Format Specification Version 2.0"
\li \ref FMT3 "HDF5 File Format Specification Version 3.0"
\li \ref FMT1
\li \ref FMT11
\li \ref FMT2
\li \ref FMT3
\section Other
\li \ref IMG "HDF5 Image and Palette Specification Version 1.2"
\li \ref TBL "HDF5 Table Specification Version 1.0"
\li \ref IMG
\li \ref TBL
\li <a href="https://support.hdfgroup.org/HDF5/doc/HL/H5DS_Spec.pdf">
HDF5 Dimension Scale Specification</a>

View File

@@ -1,13 +1,13 @@
/** \page TN Technical Notes
\li \link api-compat-macros API Compatibility Macros \endlink
\li \ref APPDBG "Debugging HDF5 Applications"
\li \ref FMTDISC "File Format Walkthrough"
\li \ref FILTER "Filters"
\li \ref IOFLOW "HDF5 Raw I/O Flow Notes"
\li \ref TNMDC "Metadata Caching in HDF5"
\li \ref MT "Thread Safe library"
\li \ref VFL "Virtual File Layer"
\li \ref api-compat-macros
\li \ref APPDBG
\li \ref FMTDISC
\li \ref FILTER
\li \ref IOFLOW
\li \ref TNMDC
\li \ref MT
\li \ref VFL
*/

403
doxygen/dox/UsersGuide.dox Normal file
View File

@@ -0,0 +1,403 @@
/** \page UG HDF5 User Guide
<center>
HDF5 Release 1.14
\image html HDFG-logo.png "The HDF Group"
</center>
\ref sec_data_model
\li \ref subsec_data_model_intro
\li \ref subsec_data_model_abstract
<ul>
<li> \ref subsubsec_data_model_abstract_file
<li> \ref subsubsec_data_model_abstract_group
<li> \ref subsubsec_data_model_abstract_dataset
<li> \ref subsubsec_data_model_abstract_space
<li> \ref subsubsec_data_model_abstract_type
<li> \ref subsubsec_data_model_abstract_attr
<li> \ref subsubsec_data_model_abstract_plist
<li> \ref subsubsec_data_model_abstract_link
</ul>
\li \ref subsec_data_model_storage
<ul>
<li> \ref subsubsec_data_model_storage_spec
<li> \ref subsubsec_data_model_storage_imple
</ul>
\li \ref subsec_data_model_structure
<ul>
<li> \ref subsubsec_data_model_structure_file
<li> \ref subsubsec_data_model_structure_path
<li> \ref subsubsec_data_model_structure_example
</ul>
\ref sec_program
\li \ref subsec_program_intro
\li \ref subsec_program_model
<ul>
<li> \ref subsubsec_program_model_create
<li> \ref subsubsec_program_model_dset
<li> \ref subsubsec_program_model_close
<li> \ref subsubsec_program_model_data
<li> \ref subsubsec_program_model_partial
<li> \ref subsubsec_program_model_info
<li> \ref subsubsec_program_model_compound
<li> \ref subsubsec_program_model_extend
<li> \ref subsubsec_program_model_group
<li> \ref subsubsec_program_model_attr
</ul>
\li \ref subsec_program_transfer_pipeline
\ref sec_file
\li \ref subsec_file_intro
\li \ref subsec_file_access_modes
\li \ref subsec_file_creation_access
\li \ref subsec_file_drivers
\li \ref subsec_file_program_model
<ul>
<li> \ref subsubsec_file_program_model_create
<li> \ref subsubsec_file_program_model_open
<li> \ref subsubsec_file_program_model_close
</ul>
\li \ref subsec_file_h5dump
\li \ref subsec_file_summary
\li \ref subsec_file_create
\li \ref subsec_file_closes
\li \ref subsec_file_property_lists
<ul>
<li> \ref subsubsec_file_property_lists_create
<li> \ref subsubsec_file_property_lists_props
<li> \ref subsubsec_file_property_lists_access
</ul>
\li \ref subsec_file_alternate_drivers
<ul>
<li> \ref subsubsec_file_alternate_drivers_id
<li> \ref subsubsec_file_alternate_drivers_sec2
<li> \ref subsubsec_file_alternate_drivers_direct
<li> \ref subsubsec_file_alternate_drivers_log
<li> \ref subsubsec_file_alternate_drivers_win
<li> \ref subsubsec_file_alternate_drivers_stdio
<li> \ref subsubsec_file_alternate_drivers_mem
<li> \ref subsubsec_file_alternate_drivers_family
<li> \ref subsubsec_file_alternate_drivers_multi
<li> \ref subsubsec_file_alternate_drivers_split
<li> \ref subsubsec_file_alternate_drivers_par
</ul>
\li \ref subsec_file_examples
<ul>
<li> \ref subsubsec_file_examples_trunc
<li> \ref subsubsec_file_examples_props
<li> \ref subsubsec_file_examples_access
</ul>
\li \ref subsec_file_multiple
\ref sec_group
\li \ref subsec_group_intro
\li \ref subsec_group_descr
<ul>
<li> \ref subsubsec_group_descr_object
<li> \ref subsubsec_group_descr_model
<li> \ref subsubsec_group_descr_path
<li> \ref subsubsec_group_descr_impl
</ul>
\li \ref subsec_group_h5dump
\li \ref subsec_group_function
\li \ref subsec_group_program
<ul>
<li> \ref subsubsec_group_program_create
<li> \ref subsubsec_group_program_open
<li> \ref subsubsec_group_program_dataset
<li> \ref subsubsec_group_program_close
<li> \ref subsubsec_group_program_links
<li> \ref subsubsec_group_program_info
<li> \ref subsubsec_group_program_objs
<li> \ref subsubsec_group_program_all
</ul>
\li \ref subsec_group_examples
\ref sec_dataset
\li \ref subsec_dataset_intro
\li \ref subsec_dataset_function
\li \ref subsec_dataset_program
<ul>
<li> \ref subsubsec_dataset_program_general
<li> \ref subsubsec_dataset_program_create
<li> \ref subsubsec_dataset_program_transfer
<li> \ref subsubsec_dataset_program_read
</ul>
\li \ref subsec_dataset_transfer Data Transfer
<ul>
<li> \ref subsubsec_dataset_transfer_pipe
<li> \ref subsubsec_dataset_transfer_filter
<li> \ref subsubsec_dataset_transfer_drive
<li> \ref subsubsec_dataset_transfer_props
<li> \ref subsubsec_dataset_transfer_store
<li> \ref subsubsec_dataset_transfer_partial
</ul>
\li \ref subsec_dataset_allocation
<ul>
<li> \ref subsubsec_dataset_allocation_store
<li> \ref subsubsec_dataset_allocation_delete
<li> \ref subsubsec_dataset_allocation_release
<li> \ref subsubsec_dataset_allocation_ext
</ul>
\li \ref subsec_dataset_filters
<ul>
<li> \ref subsubsec_dataset_filters_nbit
<li> \ref subsubsec_dataset_filters_scale
<li> \ref subsubsec_dataset_filters_szip
</ul>
\ref sec_datatype
\li \ref subsec_datatype_intro
\li \ref subsec_datatype_model
<ul>
<li> \ref subsubsec_datatype_model_class
<li> \ref subsubsec_datatype_model_predefine
</ul>
\li \ref subsec_datatype_usage
<ul>
<li> \ref subsubsec_datatype_usage_object
<li> \ref subsubsec_datatype_usage_create
<li> \ref subsubsec_datatype_usage_transfer
<li> \ref subsubsec_datatype_usage_discover
<li> \ref subsubsec_datatype_usage_user
</ul>
\li \ref subsec_datatype_function
\li \ref subsec_datatype_program
<ul>
<li> \ref subsubsec_datatype_program_discover
<li> \ref subsubsec_datatype_program_define
</ul>
\li \ref subsec_datatype_other
<ul>
<li> \ref subsubsec_datatype_other_strings
<li> \ref subsubsec_datatype_other_refs
<li> \ref subsubsec_datatype_other_enum
<li> \ref subsubsec_datatype_other_opaque
<li> \ref subsubsec_datatype_other_bitfield
</ul>
\li \ref subsec_datatype_fill
\li \ref subsec_datatype_complex
<ul>
<li> \ref subsubsec_datatype_complex_create
<li> \ref subsubsec_datatype_complex_analyze
</ul>
\li \ref subsec_datatype_life
\li \ref subsec_datatype_transfer
\li \ref subsec_datatype_text
\ref sec_dataspace
\li \ref subsec_dataspace_intro
\li \ref subsec_dataspace_function
\li \ref subsec_dataspace_program
<ul>
<li> \ref subsubsec_dataspace_program_object
<li> \ref subsubsec_dataspace_program_model
</ul>
\li \ref subsec_dataspace_transfer
<ul>
<li> \ref subsubsec_dataspace_transfer_select
<li> \ref subsubsec_dataspace_transfer_model
</ul>
\li \ref subsec_dataspace_select
\li \ref subsec_dataspace_refer
<ul>
<li> \ref subsubsec_dataspace_refer_use
<li> \ref subsubsec_dataspace_refer_create
<li> \ref subsubsec_dataspace_refer_read
</ul>
\li \ref subsec_dataspace_sample
\ref sec_attribute
\li \ref subsec_attribute_intro
\li \ref subsec_attribute_program
<ul>
<li> <!-- \subsubsection subsubsec_attribute_program_exist --> To Open and Read or Write an Existing Attribute </li>
</ul>
\li \ref subsec_error_H5A
\li \ref subsec_attribute_work
<ul>
<li> \ref subsubsec_attribute_work_struct
<li> \ref subsubsec_attribute_work_create
<li> \ref subsubsec_attribute_work_access
<li> \ref subsubsec_attribute_work_info
<li> \ref subsubsec_attribute_work_iterate
<li> \ref subsubsec_attribute_work_delete
<li> \ref subsubsec_attribute_work_close
</ul>
\li \ref subsec_attribute_special
\ref sec_error
\li \ref subsec_error_intro
\li \ref subsec_error_program
\li \ref subsec_error_H5E
\li \ref subsec_error_ops
<ul>
<li> \ref subsubsec_error_ops_stack
<li> \ref subsubsec_error_ops_print
<li> \ref subsubsec_error_ops_mute
<li> \ref subsubsec_error_ops_custom_print
<li> \ref subsubsec_error_ops_walk
<li> \ref subsubsec_error_ops_travers
</ul>
\li \ref subsec_error_adv
<ul>
<li> \ref subsubsec_error_adv_more
<li> \ref subsubsec_error_adv_app
</ul>
\ref sec_plist
\li \ref subsec_plist_intro
\li \ref subsec_plist_class
<ul>
<li> \ref subsubsec_plist_class
<li> \ref subsubsec_plist_lists
<li> \ref subsubsec_plist_props
</ul>
\li \ref subsec_plist_program
<ul>
<li> \ref subsubsec_plist_default
<li> \ref subsubsec_plist_basic
<li> \ref subsubsec_plist_additional
</ul>
\li \ref subsec_plist_generic
\li \ref subsec_plist_H5P
\li \ref subsec_plist_resources
\li \ref subsec_plist_notes
\ref sec_vol
\li \ref subsec_vol_intro
\li \ref subsec_vol_abstract_layer
\li \ref subsec_vol_connect
\li \ref subsec_vol_use
\ref sec_async
\li \ref subsec_async_intro
\ref sec_map
\ref sec_addition
\page AR_UG Additional Resources
\section sec_addition Additional Resources
These documents provide additional information for the use and tuning of specific HDF5 features.
<table style=" border-spacing:0; padding-left:6.00pt; padding-top:6.00pt; padding-right:6.00pt; padding-bottom:6.00pt; float:aligncenter; width:100%; max-width:432.00pt;" cellspacing="0">
<caption x-list-start="1" style="font-size: 12.0pt;">Table of Additional resources</caption>
<tr style="height: 23.00pt;">
<th style="width: 234.000pt; border-top-style: solid; border-top-width: 1px; border-top-color: #228a22; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align : top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Document</p>
</th>
<th style="width: 198.000pt; border-top-style: solid; border-top-width: 1px; border-top-color: #228a22; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align : top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Comments</p>
</th>
</tr>
<tr style="height: 23.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span>@ref HDF5Examples</span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Code examples by API. </p>
</td>
</tr>
<tr style="height: 36.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/Chunking/index.html">Chunking in HDF5</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Structuring the use of chunking and tuning it for performance.</p>
</td>
</tr>
<tr style="height: 36.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span class="FM_LT_LinkText"><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/DirectChunkWrite/UsingDirectChunkWrite.pdf">Using the Direct Chunk Write Function</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes another way that chunks can be written to datasets.</p>
</td>
</tr>
<tr style="height: 88.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/CommittedDatatypeCopying/CopyingCommittedDatatypesWithH5Ocopy.pdf">Copying Committed Datatypes with H5Ocopy</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how to copy to another file a dataset that uses a committed datatype or an object with an attribute that uses a committed datatype so that the committed datatype in the destination file can be used by multiple objects.</p>
</td>
</tr>
<tr style="height: 36.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/MetadataCache/index.html">Metadata Caching in HDF5</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Managing the HDF5 metadata cache and tuning it for performance.</p>
</td>
</tr>
<tr style="height: 49.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/DynamicallyLoadedFilters/HDF5DynamicallyLoadedFilters.pdf">HDF5 Dynamically Loaded Filters</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how an HDF5 application can apply a filter that is not registered with the HDF5 Library.</p>
</td>
</tr>
<tr style="height: 62.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/FileImageOperations/HDF5FileImageOperations.pdf">HDF5 File Image Operations</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how to work with HDF5 files in memory. Disk I/O is not required when file images are opened, created, read from, or written to.</p>
</td>
</tr>
<tr style="height: 62.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/ModifiedRegionWrites/ModifiedRegionWrites.pdf">Modified Region Writes</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how to set write operations for in-memory files so that only modified regions are written to storage. Available when the Core (Memory) VFD is used.</p>
</td>
</tr>
<tr style="height: 36.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/UsingIdentifiers/index.html">Using Identifiers</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how identifiers behave and how they should be treated.</p>
</td>
</tr>
<tr style="height: 36.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/UsingUnicode/index.html">Using UTF-8 Encoding in HDF5 Applications</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes the use of UTF-8 Unicode character encodings in HDF5 applications.</p>
</td>
</tr>
<tr style="height: 49.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/FreeingMemory/FreeingMemoryAllocatedByTheHdf5Library.pdf">Freeing Memory Allocated by the HDF5 Library</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>Describes how inconsistent memory management can cause heap corruption or resource leaks and possible solutions.</p>
</td>
</tr>
<tr style="height: 23.00pt;">
<td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Glossary.html">HDF5 Glossary</a></span></p>
</td>
<td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
<p>A glossary of terms.</p>
</td>
</tr>
</table>
Previous Chapter \ref sec_plist
\par Don't like what you see? - You can help to improve this User Guide
Complete the survey linked near the top of this page!\n
We treat documentation like code: Fork the
<a href="https://github.com/HDFGroup/hdf5">HDF5 repo</a>, make changes, and create a
<a href="https://github.com/HDFGroup/hdf5/pulls">pull request</a> !\n
*/

1198
doxygen/dox/ViewTools.dox Normal file

File diff suppressed because it is too large Load Diff

786
doxygen/dox/ViewTools2.dox Normal file
View File

@@ -0,0 +1,786 @@
/** @page ViewToolsEdit Command-line Tools For Editing HDF5 Files
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
<hr>
\section secViewToolsEditTOC Contents
<ul>
<li>\ref secViewToolsEditRemove</li>
<li>\ref secViewToolsEditChange</li>
<li>\ref secViewToolsEditApply</li>
<li>\ref secViewToolsEditCopy</li>
<li>\ref secViewToolsEditAdd</li>
</ul>
\section secViewToolsEditRemove Remove Inaccessible Objects and Unused Space in a File
HDF5 files may accumulate unused space when they are read and rewritten to or if objects are deleted within
them. With many edits and deletions this unused space can add up to a sizable amount.
The <code style="background-color:whitesmoke;">h5repack</code> tool can be used to remove unused space in an HDF5
file. If no options other than the input and output HDF5 files are specified on the
<code style="background-color:whitesmoke;">h5repack</code> command line, it will write the file to the new
file, getting rid of the unused space:
\code
h5repack <input file> <output file>
\endcode
\section secViewToolsEditChange Change a Dataset's Storage Layout
The <code style="background-color:whitesmoke;">h5repack</code> utility can be used to change a dataset's storage
layout. By default, the storage layout of a dataset is defined at creation time and it cannot be changed.
However, with h5repack you can write an HDF5 file to a new file and change the layout for objects in the new file.
The <code style="background-color:whitesmoke;">-l</code> option in <code style="background-color:whitesmoke;">h5repack</code>
is used to change the layout for an object. The string following the <code style="background-color:whitesmoke;">-l</code>
option defines the layout type and parameters for specified objects (or all objects):
\code
h5repack -l [list of objects:]<layout type>=<layout parameters> <input file> <output file>
\endcode
If no object is specified, then everything in the input file will be written to the output file with the specified
layout type and parameters. If objects are specified then everything in the input file will be written to the
output file as is, except for those specified objects. They will be written to the output file with the given
layout type and parameters.
Following is a description of the dataset layouts and the <code style="background-color:whitesmoke;">h5repack</code>
options to use to change a dataset:
<table>
<tr>
<th>Storage Layout</th><th>h5repack Option</th><th>Description</th>
</tr>
<tr>
<td>Contiguous
</td>
<td>CONTI
</td>
<td>Data is stored physically together
</td>
</tr>
<tr>
<td>Chunked
</td>
<td>CHUNK=DIM[xDIM...xDIM]
</td>
<td>Data is stored in DIM[xDIM...xDIM] chunks
</td>
</tr>
<tr>
<td>Compact
</td>
<td>COMPA
</td>
<td>Data is stored in the header of the object (less I/O)
</td>
</tr>
</table>
If you type <code style="background-color:whitesmoke;">h5repack -h</code> on the command line, you will see
a detailed usage statement with examples of modifying the layout.
In the following example, the dataset <code style="background-color:whitesmoke;">/dset</code> in the file
dset.h5 is contiguous, as shown by the <code style="background-color:whitesmoke;">h5dump -pH</code> command.
The <code style="background-color:whitesmoke;">h5repack</code> utility writes dset.h5 to a new file, dsetrpk.h5,
where the dataset <code style="background-color:whitesmoke;">dset</code> is chunked. This can be seen by examining
the resulting dsetrpk.h5 file with <code style="background-color:whitesmoke;">h5dump</code>, as shown:
\code
$ h5dump -pH dset.h5
HDF5 "dset.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
STORAGE_LAYOUT {
CONTIGUOUS
SIZE 96
OFFSET 1400
}
FILTERS {
NONE
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 0
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_LATE
}
}
}
}
$ h5repack -l dset:CHUNK=4x6 dset.h5 dsetrpk.h5
$ h5dump -pH dsetrpk.h5
HDF5 "dsetrpk.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
STORAGE_LAYOUT {
CHUNKED ( 4, 6 )
SIZE 96
}
FILTERS {
NONE
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 0
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_INCR
}
}
}
}
\endcode
There can be many reasons that the storage layout needs to be changed for a dataset. For example,
there may be a performance issue with a dataset due to a small chunk size.
\section secViewToolsEditApply Apply Compression Filter to a Dataset
The <code style="background-color:whitesmoke;">h5repack</code> utility can be used to compress or
remove compression from a dataset in a file. By default, compression cannot be added to or removed
from a dataset once it has been created. However, with <code style="background-color:whitesmoke;">h5repack</code>
you can write a file to a new file and specify a compression filter to apply to a dataset or datasets in the new file.
To apply a filter to an object in an HDF5 file, specify the <code style="background-color:whitesmoke;">-f</code> option,
where the string following the <code style="background-color:whitesmoke;">-f</code> option defines the filter and
its parameters (if there are any) to apply to a given object or objects:
\code
h5repack -f [list of objects:]<name of filter>=<filter parameters> <input file> <output file>
\endcode
If no objects are specified then everything in the input file will be written to the output file with
the filter and parameters specified. If objects are specified, then everything in the input file will
be written to the output file as is, except for the specified objects. They will be written to the
output file with the filter and parameters specified.
If you type <code style="background-color:whitesmoke;">h5repack --help</code> on the command line,
you will see a detailed usage statement with examples of modifying a filter. There are actually
numerous filters that you can apply to a dataset:
<table>
<tr>
<th>Filter<th></th>Options</th>
</tr>
<tr>
<td>GZIP compression (levels 1-9)
<td>GZIP=&lt;deflation level&gt;
</td>
</tr>
<tr>
<td>SZIP compression
<td>SZIP=<pixels per block,coding>
</td>
</tr>
<tr>
<td>Shuffle filter
<td>SHUF
</td>
</tr>
<tr>
<td>Checksum filter
<td>FLET
</td>
</tr>
<tr>
<td>NBIT compression
<td>NBIT
</td>
</tr>
<tr>
<td>HDF5 Scale/Offset filter
<td>SOFF=<scale_factor,scale_type>
</td>
</tr>
<tr>
<td>User defined filter
<td>UD=<filter_number,cd_value_count,value_1[,value_2,...,value_N]>
</td>
</tr>
<tr>
<td>Remove ALL filters
</td>
<td>NONE
</td>
</tr>
</table>
Be aware that a dataset must be chunked to apply compression to it. If the dataset is not already chunked,
then <code style="background-color:whitesmoke;">h5repack</code> will apply chunking to it. Both chunking
and compression cannot be applied to a dataset at the same time with <code style="background-color:whitesmoke;">h5repack</code>.
In the following example,
\li <em>h5dump</em> lists the properties for the objects in <em>dset.h5</em>. Note that the dataset <em>dset</em> is contiguous.
\li <em>h5repack</em> writes dset.h5 into a new file <em>dsetrpk.h5</em>, applying GZIP Level 5 compression to the dataset <em>/dset</em> in dsetrpk.h5.
\li <em>h5dump</em> lists the properties for the new <em>dsetrpk.h5</em> file. Note that <em>/dset</em> is both compressed and chunked.
<em>Example</em>
\code
$ h5dump -pH dset.h5
HDF5 "dset.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 12, 18 ) / ( 12, 18 ) }
STORAGE_LAYOUT {
CONTIGUOUS
SIZE 864
OFFSET 1400
}
FILTERS {
NONE
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 0
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_LATE
}
}
}
}
$ h5repack -f dset:GZIP=5 dset.h5 dsetrpk.h5
$ h5dump -pH dsetrpk.h5
HDF5 "dsetrpk.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 12, 18 ) / ( 12, 18 ) }
STORAGE_LAYOUT {
CHUNKED ( 12, 18 )
SIZE 160 (5.400:1 COMPRESSION)
}
FILTERS {
COMPRESSION DEFLATE { LEVEL 5 }
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 0
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_INCR
}
}
}
}
\endcode
\section secViewToolsEditCopy Copy Objects to Another File
The <code style="background-color:whitesmoke;">h5copy</code> utility can be used to copy an object or
objects from one HDF5 file to another or to a different location in the same file. It uses the
#H5Ocopy and #H5Lcopy APIs in HDF5.
Following are some of the options that can be used with <code style="background-color:whitesmoke;">h5copy</code>.
<table>
<tr>
<th>h5copy Options</th><th>Description</th>
</tr>
<tr>
<td>-i, --input
</td>
<td>Input file name
</td>
</tr>
<tr>
<td>-o, --output
</td>
<td>Output file name
</td>
</tr>
<tr>
<td>-s, --source
</td>
<td>Source object name
</td>
</tr>
<tr>
<td>-d, --destination
</td>
<td>Destination object name
</td>
</tr>
<tr>
<td>-p, --parents
</td>
<td>Make parent groups as needed
</td>
</tr>
<tr>
<td>-v, --verbose
</td>
<td>Verbose mode
</td>
</tr>
<tr>
<td>-f, --flag
</td>
<td>Flag type
</td>
</tr>
</table>
For a complete list of options and information on using <code style="background-color:whitesmoke;">h5copy</code>, type:
\code
h5copy --help
\endcode
In the example below, the dataset <code style="background-color:whitesmoke;">/MyGroup/Group_A/dset2</code>
in <code style="background-color:whitesmoke;">groups.h5</code> gets copied to the root
("<code style="background-color:whitesmoke;">/</code>") group of a new file,
<code style="background-color:whitesmoke;">newgroup.h5</code>, with the name
<code style="background-color:whitesmoke;">dset3</code>:
\code
$h5dump -H groups.h5
HDF5 "groups.h5" {
GROUP "/" {
GROUP "MyGroup" {
GROUP "Group_A" {
DATASET "dset2" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 2, 10 ) / ( 2, 10 ) }
}
}
GROUP "Group_B" {
}
DATASET "dset1" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) }
}
}
}
}
$ h5copy -i groups.h5 -o newgroup.h5 -s /MyGroup/Group_A/dset2 -d /dset3
$ h5dump -H newgroup.h5
HDF5 "newgroup.h5" {
GROUP "/" {
DATASET "dset3" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 2, 10 ) / ( 2, 10 ) }
}
}
}
\endcode
There are also <code style="background-color:whitesmoke;">h5copy</code> flags that can be specified
with the <code style="background-color:whitesmoke;">-f</code> option. In the example below, the
<code style="background-color:whitesmoke;">-f shallow</code> option specifies to copy only the
immediate members of the group <code style="background-color:whitesmoke;">/MyGroup</code> from
the <code style="background-color:whitesmoke;">groups.h5</code> file mentioned above to a new
file <code style="background-color:whitesmoke;">mygrouponly.h5</code>:
\code
h5copy -v -i groups.h5 -o mygrouponly.h5 -s /MyGroup -d /MyGroup -f shallow
\endcode
The output of the above command is shown below. The verbose option <code style="background-color:whitesmoke;">-v</code>
describes the action that was taken, as shown in the highlighted text.
\code
Copying file <groups.h5> and object </MyGroup> to file <mygrouponly.h5> and object </MyGroup>
Using shallow flag
$ h5dump -H mygrouponly.h5
HDF5 "mygrouponly.h5" {
GROUP "/" {
GROUP "MyGroup" {
GROUP "Group_A" {
}
GROUP "Group_B" {
}
DATASET "dset1" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) }
}
}
}
}
\endcode
\section secViewToolsEditAdd Add or Remove User Block from File
The user block is a space in an HDF5 file that is not interpreted by the HDF5 library. It is a property
list that can be added when creating a file. See the #H5Pset_userblock API in the \ref RM for more
information regarding this property.
Once created in a file, the user block cannot be removed. However, you can use the
<code style="background-color:whitesmoke;">h5jam</code> and <code style="background-color:whitesmoke;">h5unjam</code>
utilities to add or remove a user block from a file into a new file.
These two utilities work similarly, except that <code style="background-color:whitesmoke;">h5jam</code>
adds a user block to a file and <code style="background-color:whitesmoke;">h5unjam</code> removes the user
block. You can also overwrite or delete a user block in a file.
Specify the <code style="background-color:whitesmoke;">-h</code> option to see a complete list of options
that can be used with <code style="background-color:whitesmoke;">h5jam</code> and
<code style="background-color:whitesmoke;">h5unjam</code>. For example:
\code
h5jam -h
h5unjam -h
\endcode
Below are the basic options for adding or removing a user block with <code style="background-color:whitesmoke;">h5jam</code>
and <code style="background-color:whitesmoke;">h5unjam</code>:
<table>
<tr>
<th>h5copy Options</th><th>Description</th>
</tr>
<tr>
<td>-i
</td>
<td>Input File
</td>
</tr>
<tr>
<td>-o
</td>
<td>Output File
</td>
</tr>
<tr>
<td>-u
</td>
<td>File to add or remove from user block
</td>
</tr>
</table>
Let's say you wanted to add the program that creates an HDF5 file to its user block. As an example, you
can take the <code style="background-color:whitesmoke;">h5_crtgrpar.c</code> program from the
\ref LBExamples
and add it to the file it creates, <code style="background-color:whitesmoke;">groups.h5</code>. This can
be done with <code style="background-color:whitesmoke;">h5jam</code>, as follows:
\code
h5jam -i groups.h5 -u h5_crtgrpar.c -o groupsub.h5
\endcode
You can actually view the file with more <code style="background-color:whitesmoke;">groupsub.h5</code>
to see that the <code style="background-color:whitesmoke;">h5_crtgrpar.c</code> file is indeed included.
To remove the user block that was just added, type:
\code
h5unjam -i groupsub.h5 -u h5_crtgrparNEW.c -o groups-noub.h5
\endcode
This writes the user block in the file <code style="background-color:whitesmoke;">groupsub.h5</code>
into <code style="background-color:whitesmoke;">h5_crtgrparNEW.c</code>. The new HDF5 file,
<code style="background-color:whitesmoke;">groups-noub.h5</code>, will not contain a user block.
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
*/
/** @page ViewToolsConvert Command-line Tools For Converting HDF5 Files
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
<hr>
\section secViewToolsConvertTOC Contents
<ul>
<li>\ref secViewToolsConvertASCII</li>
<li>\ref secViewToolsConvertBinary</li>
<li>\ref secViewToolsConvertExport</li>
</ul>
\section secViewToolsConvertASCII Output HDF5 Dataset into an ASCII File (to Import into Excel and Other Applications)
The <code style="background-color:whitesmoke;">h5dump</code> utility can be used to convert an HDF5 dataset
into an ASCII file, which can then be imported into Excel and other applications. The following options are used:
<table>
<tr>
<th>Options</th><th>Description</th>
</tr>
<tr>
<td> -d D, --dataset=D
</td>
<td>Display dataset D
</td>
</tr>
<tr>
<td> -o F, --output=F
</td>
<td>Output raw data into file F
</td>
</tr>
<tr>
<td> -y, --noindex
</td>
<td>Suppress printing of array indices with the data
</td>
</tr>
<tr>
<td> -w N, --width=N
</td>
<td>Set N number of columns of output. A value of 0
sets the number to 65535 (the maximum)
</td>
</tr>
</table>
As an example, <code style="background-color:whitesmoke;">h5_crtdat.c</code> from the \ref LBDsetCreate
HDF5 Tutorial topic, creates the file <code style="background-color:whitesmoke;">dset.h5</code> with
a dataset <code style="background-color:whitesmoke;">/dset</code> that is a 4 x 6 integer array. The
following is displayed when viewing <code style="background-color:whitesmoke;">dset.h5</code> with
<code style="background-color:whitesmoke;">h5dump</code>:
\code
$ h5dump dset.h5
HDF5 "dset.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
(0,0): 1, 2, 3, 4, 5, 6,
(1,0): 7, 8, 9, 10, 11, 12,
(2,0): 13, 14, 15, 16, 17, 18,
(3,0): 19, 20, 21, 22, 23, 24
}
}
}
}
\endcode
The following command will output the values of the <code style="background-color:whitesmoke;">/dset</code>
dataset to the ASCII file <code style="background-color:whitesmoke;">dset.asci</code>:
\code
h5dump -d /dset -o dset.asci -y -w 50 dset.h5
\endcode
In particular, note that:
\li The default behavior of <code style="background-color:whitesmoke;">h5dump</code> is to print indices,
and the <code style="background-color:whitesmoke;">-y</code> option suppresses this.
\li The <code style="background-color:whitesmoke;">-w 50</code> option tells
<code style="background-color:whitesmoke;">h5dump</code> to allow 50 columns for outputting the data. The
value specified must be large enough to accommodate the dimension size of the dataset multiplied by the
number of positions and spaces needed to print each value. If the value is not large enough, the output
will wrap to the next line, and the data will not display as expected in Excel or other applications. To
ensure that the output does not wrap to the next line, you can also specify 0 (zero) for the
<code style="background-color:whitesmoke;">-w</code> option.
In addition to creating the ASCII file <code style="background-color:whitesmoke;">dset.asci</code>, the
above command outputs the metadata of the specified dataset:
\code
HDF5 "dset.h5" {
DATASET "/dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
}
}
}
\endcode
The <code style="background-color:whitesmoke;">dset.asci</code> file will contain the values for the dataset:
\code
1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24
\endcode
\section secViewToolsConvertBinary Output HDF5 Dataset into Binary File
The <code style="background-color:whitesmoke;">h5dump</code> utility can be used to convert an
HDF5 dataset to a binary file with the following options:
<table>
<tr>
<th>Options</th><th>Description</th>
</tr>
<tr>
<td>-d D, --dataset=D
</td>
<td>Display dataset D
</td>
</tr>
<tr>
<td>-o F, --output=F
</td>
<td>Output raw data into file F
</td>
</tr>
<tr>
<td>-b B, --binary=B
</td>
<td>Binary file output of form B.
Valid values are: LE, BE, NATIVE, FILE
</td>
</tr>
</table>
As an example, <code style="background-color:whitesmoke;">h5_crtdat.c</code> from the
\ref LBDsetCreate HDF5 Tutorial topic, creates the file dset.h5 with a dataset
<code style="background-color:whitesmoke;">/dset</code> that is a 4 x 6 integer array. The
following is displayed when viewing <code style="background-color:whitesmoke;">dset.h5</code>
with <code style="background-color:whitesmoke;">h5dump</code>:
\code
$ h5dump -d /dset/ dset.h5
HDF5 "dset.h5" {
DATASET "/dset/" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
(0,0): 1, 2, 3, 4, 5, 6,
(1,0): 7, 8, 9, 10, 11, 12,
(2,0): 13, 14, 15, 16, 17, 18,
(3,0): 19, 20, 21, 22, 23, 24
}
}
}
\endcode
As specified by the <code style="background-color:whitesmoke;">-d</code> and
<code style="background-color:whitesmoke;">-o</code> options, the following
<code style="background-color:whitesmoke;">h5dump</code> command will output the values of the dataset
<code style="background-color:whitesmoke;">/dset </code>to a file called
<code style="background-color:whitesmoke;">dset.bin</code>. The <code style="background-color:whitesmoke;">-b</code>
option specifies that the output will be binary in Little Endian format (LE).
\code
h5dump -d /dset -b LE -o dset.bin dset.h5
\endcode
This command outputs the metadata for the dataset, as well as creating the binary file
<code style="background-color:whitesmoke;">dset.bin</code>:
\code
HDF5 "dset.h5" {
DATASET "/dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
}
}
}
\endcode
If you look at the resulting <code style="background-color:whitesmoke;">dset.bin</code> file with
a binary editor, you will see that it contains the dataset's values. For example (on Linux) you will see:
\code
$ od -t d dset.bin
0000000 1 2 3 4
0000020 5 6 7 8
0000040 9 10 11 12
0000060 13 14 15 16
0000100 17 18 19 20
0000120 21 22 23 24
0000140
\endcode
\section secViewToolsConvertExport Export from h5dump and Import into HDF5
The <code style="background-color:whitesmoke;">h5import</code> utility can use the output of
<code style="background-color:whitesmoke;">h5dump</code> as input to create a dataset or file.
The <code style="background-color:whitesmoke;">h5dump</code> utility must first create two files:
\li A DDL file, which will be used as an <code style="background-color:whitesmoke;">h5import</code> configuration file
\li A raw data file containing the data to be imported
The DDL file must be generated with the <code style="background-color:whitesmoke;">h5dump -p</code> option, to generate properties.
The raw data file that can be imported into HDF5 using this method may contain either numeric or string data with the following restrictions:
\li Numeric data requires the use of the <code style="background-color:whitesmoke;">h5dump -b</code> option to produce a binary data file.
\li String data must be written with the <code style="background-color:whitesmoke;">h5dump -y</code> and
<code style="background-color:whitesmoke;">--width=1</code> options, generating a single column of strings without indices.
Two examples follow: the first imports a dataset with a numeric datatype. Note that numeric data requires
the use of the <code style="background-color:whitesmoke;">h5dump -b</code> option to produce a binary data
file. The example program (<code style="background-color:whitesmoke;">h5_crtdat.c</code>) that creates this
file is included with the \ref IntroHDF5 tutorial and can be obtained from the \ref LBExamples page:
\code
h5dump -p -d "/dset" --ddl=dsetbin.dmp -o dset.bin -b dset.h5
h5import dset.bin -c dsetbin.dmp -o new-dset.h5
\endcode
The output before and after running these commands is shown below:
\code
$ h5dump dset.h5
HDF5 "dset.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
(0,0): 1, 2, 3, 4, 5, 6,
(1,0): 7, 8, 9, 10, 11, 12,
(2,0): 13, 14, 15, 16, 17, 18,
(3,0): 19, 20, 21, 22, 23, 24
}
}
}
}
$ h5dump -p -d "/dset" --ddl=dsetbin.dmp -o dset.bin -b dset.h5
$ h5import dset.bin -c dsetbin.dmp -o new-dset.h5
$ h5dump new-dset.h5
HDF5 "new-dset.h5" {
GROUP "/" {
DATASET "dset" {
DATATYPE H5T_STD_I32BE
DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
DATA {
(0,0): 1, 2, 3, 4, 5, 6,
(1,0): 7, 8, 9, 10, 11, 12,
(2,0): 13, 14, 15, 16, 17, 18,
(3,0): 19, 20, 21, 22, 23, 24
}
}
}
}
\endcode
The second example imports string data. The example program that creates this file can be downloaded
from the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page.
Note that string data requires use of the <code style="background-color:whitesmoke;">h5dump -y</code>
option to exclude indexes and the <code style="background-color:whitesmoke;">h5dump --width=1</code>
option to generate a single column of strings. The <code style="background-color:whitesmoke;">-o</code>
option outputs the data into an ASCII file.
\code
h5dump -p -d "/DS1" -O vlstring.dmp -o vlstring.ascii -y --width=1 h5ex_t_vlstring.h5
h5import vlstring.ascii -c vlstring.dmp -o new-vlstring.h5
\endcode
The output before and after running these commands is shown below:
\code
$ h5dump h5ex_t_vlstring.h5
HDF5 "h5ex_t_vlstring.h5" {
GROUP "/" {
DATASET "DS1" {
DATATYPE H5T_STRING {
STRSIZE H5T_VARIABLE;
STRPAD H5T_STR_SPACEPAD;
CSET H5T_CSET_ASCII;
CTYPE H5T_C_S1;
}
DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
DATA {
(0): "Parting", "is such", "sweet", "sorrow."
}
}
}
}
$ h5dump -p -d "/DS1" -O vlstring.dmp -o vlstring.ascii -y --width=1 h5ex_t_vlstring.h5
$ h5import vlstring.ascii -c vlstring.dmp -o new-vlstring.h5
$ h5dump new-vlstring.h5
HDF5 "new-vlstring.h5" {
GROUP "/" {
DATASET "DS1" {
DATATYPE H5T_STRING {
STRSIZE H5T_VARIABLE;
STRPAD H5T_STR_NULLTERM;
CSET H5T_CSET_ASCII;
CTYPE H5T_C_S1;
}
DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
DATA {
(0): "Parting", "is such", "sweet", "sorrow."
}
}
}
\endcode
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
*/

View File

@@ -0,0 +1,763 @@
/** @page ViewToolsJPSS Use Case: Examining a JPSS NPP File With HDF5 Tools
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
<hr>
\section secViewToolsJPSSTOC Contents
<ul>
<li>\ref secViewToolsJPSSDeter</li>
<li>\ref secViewToolsJPSSView</li>
<li>\ref secViewToolsJPSSExam</li>
</ul>
This tutorial illustrates how to use the HDF5 tools to examine NPP files from the JPSS project. The following files are discussed:
\code
SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5 (<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5.gz">gzipped file</a>)
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5 (<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5.gz">gzipped file</a>)
\endcode
\section secViewToolsJPSSDeter Determining File Contents
The first thing you may want to do is determine what is in your file. You can use the command-line tools or HDFView to do this:
\li @ref subsecViewToolsJPSSDeter_h5dump
\li @ref subsecViewToolsJPSSDeter_h5ls
\li @ref subsecViewToolsJPSSDeter_HDFView
JPSS NPP files all contain two root level groups:
<table>
<tr>
<th>Group</th><th>Description</th>
</tr>
<tr>
<td>/All_Data
</td>
<td>Contains the raw data and optional geo-location information.
</td>
</tr>
<tr>
<td>/Data_Products
</td>
<td>Contains a dataset ending in <code style="background-color:whitesmoke;">Aggr</code> with
references to objects in the <code style="background-color:whitesmoke;">/All_Data</code> group.
Contains granules (datasets with a name ending in <code style="background-color:whitesmoke;">Gran_#</code>)
with references to selected regions in datasets under <code style="background-color:whitesmoke;">/All_Data</code>.
</td>
</tr>
</table>
\subsection subsecViewToolsJPSSDeter_h5dump h5dump
With <code style="background-color:whitesmoke;">h5dump</code> you can see a list of the objects
in the file using the <code style="background-color:whitesmoke;">-n</code> option:
\code
h5dump -n <file>
\endcode
For example:
\code
$ h5dump -n SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
FILE_CONTENTS {
group /
group /All_Data
group /All_Data/VIIRS-M9-SDR_All
dataset /All_Data/VIIRS-M9-SDR_All/ModeGran
dataset /All_Data/VIIRS-M9-SDR_All/ModeScan
dataset /All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums
dataset /All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts
dataset /All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts
dataset /All_Data/VIIRS-M9-SDR_All/NumberOfScans
dataset /All_Data/VIIRS-M9-SDR_All/PadByte1
dataset /All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR
dataset /All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR
dataset /All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR
dataset /All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR
dataset /All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR
dataset /All_Data/VIIRS-M9-SDR_All/Radiance
dataset /All_Data/VIIRS-M9-SDR_All/RadianceFactors
dataset /All_Data/VIIRS-M9-SDR_All/Reflectance
dataset /All_Data/VIIRS-M9-SDR_All/ReflectanceFactors
group /Data_Products
group /Data_Products/VIIRS-M9-SDR
dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Aggr
dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0
dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_1
dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_2
dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_3
}
}
\endcode
In the output above you can see that there are four granules (ending in
<code style="background-color:whitesmoke;">Gran_#</code>) in the
<code style="background-color:whitesmoke;">/Data_Products/VIIRS-M9-SDR/</code> group.
\subsection subsecViewToolsJPSSDeter_h5ls h5ls
With <code style="background-color:whitesmoke;">h5ls</code> you can see a list of the objects in the
file using the <code style="background-color:whitesmoke;">-lr</code>
options. The <code style="background-color:whitesmoke;">h5ls</code> utility also shows shape and size
(dataspace) information about datasets.
\code
h5ls -lr <file>
\endcode
For example:
\code
$ h5ls -lr SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
/ Group
/All_Data Group
/All_Data/VIIRS-M9-SDR_All Group
/All_Data/VIIRS-M9-SDR_All/ModeGran Dataset {4/Inf}
/All_Data/VIIRS-M9-SDR_All/ModeScan Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/NumberOfScans Dataset {4/Inf}
/All_Data/VIIRS-M9-SDR_All/PadByte1 Dataset {12/Inf}
/All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR Dataset {3072/Inf, 3200/Inf}
/All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR Dataset {192/Inf}
/All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR Dataset {3072/Inf}
/All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR Dataset {64/Inf}
/All_Data/VIIRS-M9-SDR_All/Radiance Dataset {3072/Inf, 3200/Inf}
/All_Data/VIIRS-M9-SDR_All/RadianceFactors Dataset {8/Inf}
/All_Data/VIIRS-M9-SDR_All/Reflectance Dataset {3072/Inf, 3200/Inf}
/All_Data/VIIRS-M9-SDR_All/ReflectanceFactors Dataset {8/Inf}
/Data_Products Group
/Data_Products/VIIRS-M9-SDR Group
/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Aggr Dataset {16/Inf}
/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0 Dataset {16/Inf}
/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_1 Dataset {16/Inf}
/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_2 Dataset {16/Inf}
/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_3 Dataset {16/Inf}
\endcode
Note that the <code style="background-color:whitesmoke;">Inf</code> indicates that those datasets are appendable or unlimited in size.
\subsection subsecViewToolsJPSSDeter_HDFView HDFView
If you open the file in HDFView, it will display the file and the root level groups within
it in the TreeView on the left. An HDF5 file is a folder with a "5" in the middle, followed
by the file name. There are two folders (groups) within the JPSS file
(<code style="background-color:whitesmoke;">All_Data/</code> and <code style="background-color:whitesmoke;">Data Products/</code>),
which you can select to see their contents:
<table>
<tr>
<td>
\image html hdfview-tree.png
</td>
</tr>
</table>
If you click twice with the left-mouse button on a folder or group in the TreeView, the contents
of the folder will be listed. If you click twice on an object such as a dataset, a window with
the object's values will be displayed.
Underneath the <code style="background-color:whitesmoke;">VIIRS-M1-SDR</code> folder are what HDF5
calls datasets. The scarlet letter <code style="background-color:whitesmoke;">"A"</code> attached
to the group and datasets under <code style="background-color:whitesmoke;">Data_Products/</code>
indicates that there are attributes associated with them.
\section secViewToolsJPSSView Viewing the User Block
All JPSS files contain a user block in XML with information about the file. The user block is an
optional space allocated at the beginning of an HDF5 file that is not interpreted by the HDF5
library. Its size is a multiple of 512.
Since the user block in JPSS files is stored in ASCII and it is stored at the beginning of an
HDF5 file, you could use a text editor or viewer to examine it. However, there are HDF5 utilities
that can help with this:
<table>
<tr>
<th>Utility</th><th>Description</th>
</tr>
<tr>
<td>h5unjam
</td>
<td>Extracts a user block from an HDF5 file
</td>
</tr>
<tr>
<td>h5dump
</td>
<td>The -B (--superblock) option displays the size of the user block in an HDF5 file
</td>
</tr>
</table>
\subsection subsecViewToolsJPSSView_h5unjam h5unjam
The \ref secViewToolsEditAdd tutorial topic discusses the use of the
<code style="background-color:whitesmoke;">h5jam</code> and <code style="background-color:whitesmoke;">h5unjam</code>
utilities for adding or removing a user block from a file. An input HDF5 file
(<code style="background-color:whitesmoke;">-i</code>), output HDF5 file
(<code style="background-color:whitesmoke;">-o</code>), and user block text file
(<code style="background-color:whitesmoke;">-u</code>) can be specified with these tools. You can use the
<code style="background-color:whitesmoke;">h5unjam</code> tool to extract and view the user block in a JPSS file:
\code
h5unjam -i <Input HDF5 File> -o <Output HDF5 File> -u <User Block File>
\endcode
For example this command will extract the user block into the file UB.xml:
\code
$ h5unjam -i SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
-o svm09-noUB.h5 -u UB.xml
\endcode
The input HDF5 file remains unchanged. The output HDF5 file will not contain the user block.
The <code style="background-color:whitesmoke;">UB.xml</code> file contains the user block
which can be viewed with a browser.
\subsection subsecViewToolsJPSSView_h5dump h5dump
The h5dump utility has the <code style="background-color:whitesmoke;">-B (--superblock)</code> option for displaying the superblock in an HDF5 file.
The superblock contains information about the file such as the file signature, file consistency flags,
the number of bytes to store addresses and size of an object, as well as the size of the user block:
\code
h5dump -B (--superblock)
\endcode
Below is an example (Unix):
\code
$ h5dump -B -H SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5 | more
HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
SUPER_BLOCK {
SUPERBLOCK_VERSION 0
FREELIST_VERSION 0
SYMBOLTABLE_VERSION 0
OBJECTHEADER_VERSION 0
OFFSET_SIZE 8
LENGTH_SIZE 8
BTREE_RANK 16
BTREE_LEAF 4
ISTORE_K 32
USER_BLOCK {
USERBLOCK_SIZE 1024
}
}
\endcode
Once you have the size of the user block, you can extract it from the file using system commands.
For example, on Unix platforms you can use the head command-line tool:
\code
head -c <USERBLOCK_SIZE> <JPSS File> >& USERBLOCK.xml
\endcode
There are Unix tools for Windows that may work, such as <a href="http://gnuwin32.sourceforge.net/packages/coreutils.htm">CoreUtils for Windows</a>.
\section secViewToolsJPSSExam Examining a Granule
<ul>
<li>@ref subsecViewToolsJPSSExam_h5dump<br />
<ul>
<li>@ref subsubsecViewToolsJPSSExam_h5dumpRegRef</li>
<li>@ref subsubsecViewToolsJPSSExam_h5dumpQuality</li>
<li>@ref subsubsecViewToolsJPSSExam_h5dumpProps</li>
</ul></li>
<li>@ref subsecViewToolsJPSSExamr_HDFView</li>
</ul>
\subsection subsecViewToolsJPSSExam_h5dump h5dump
There are several options that you may first want to use when examining a granule with h5dump:
<table>
<tr>
<th>Option</th><th>Description</th>
</tr>
<tr>
<td>-H, --header
</td>
<td>Prints header (metadata) information only
</td>
</tr>
<tr>
<td>-d D, --dataset=D
</td>
<td>Specifies the granule dataset
</td>
</tr>
<tr>
<td>-A 0, --onlyattr=0
</td>
<td>Suppresses attributes
</td>
</tr>
<tr>
<td>-p, --properties
</td>
<td>Show properties of datasets
(See Properties)
</td>
</tr>
</table>
You would specify the dataset (<code style="background-color:whitesmoke;">-d D</code>) and the
<code style="background-color:whitesmoke;">-H</code> options to view the metadata associated with
a specific granule. There are many attributes associated with a granule and
<code style="background-color:whitesmoke;">-A 0</code> can be used to suppress those.
For example:
\code
h5dump -H -A 0 -d "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0"
SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
\endcode
This command displays:
\code
HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
DATASET "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0" {
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
}
}
\endcode
To see the actual contents of the granule remove the <code style="background-color:whitesmoke;">-H</code> option:
\code
h5dump -A 0 -d "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0"
SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
\endcode
The above command displays:
\code
HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
DATASET "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0" {
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
DATA {
DATASET /All_Data/VIIRS-M9-SDR_All/Radiance {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M9-SDR_All/Reflectance {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M9-SDR_All/ModeScan {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/ModeGran {(0)-(0)},
DATASET /All_Data/VIIRS-M9-SDR_All/PadByte1 {(0)-(2)},
DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfScans {(0)-(0)},
DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR {(0)-(47)},
DATASET /All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR {(0)-(767)},
DATASET /All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR {(0)-(15)},
DATASET /All_Data/VIIRS-M9-SDR_All/RadianceFactors {(0)-(1)},
DATASET /All_Data/VIIRS-M9-SDR_All/ReflectanceFactors {(0)-(1)}
}
}
}
\endcode
As you can see in the output above, the datatype for this dataset is:
\code
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
\endcode
This indicates that it is a dataset specifically for storing references to regions (or subsets)
in other datasets. The dataset contains 16 such references, and more can be added to it, as
indicated by the dataspace (in other words it is unlimited):
\code
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
\endcode
\subsubsection subsubsecViewToolsJPSSExam_h5dumpRegRef Viewing a Region Reference
What if we wanted to look at the <code style="background-color:whitesmoke;">NumberOfScans</code> data for a specific granule in a file?
First, we may be interested in determining whether the scans were done at night or in the day. If a scan was at night, there will be no data.
The attribute <code style="background-color:whitesmoke;">N_Day_Night_Flag</code> is used to determine when the scan was done. If you don't know where this attribute is located, you can use the <code style="background-color:whitesmoke;">-N</code> option to search for it in the file. If you were to run this command on the <code style="background-color:whitesmoke;">SVM09</code> file used above, you would see that the <code style="background-color:whitesmoke;">N_Day_Night_Flag</code> attribute has a value of <code style="background-color:whitesmoke;">Night</code> for the four granules in the file. Indeed, if you actually examine the <code style="background-color:whitesmoke;">NumberOfScans</code> data, you will see that only fill values are written.
For that reason we will examine the <code style="background-color:whitesmoke;">NumberOfScans</code> data for the <code style="background-color:whitesmoke;">SVMO1</code> file below, as it was obtained during the day:
\code
h5dump -N N_Day_Night_Flag SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
\endcode
It displays:
\code
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
ATTRIBUTE "N_Day_Night_Flag" {
DATATYPE H5T_STRING {
STRSIZE 4;
STRPAD H5T_STR_NULLTERM;
CSET H5T_CSET_ASCII;
CTYPE H5T_C_S1;
}
DATASPACE SIMPLE { ( 1, 1 ) / ( 1, 1 ) }
DATA {
(0,0): "Day"
}
}
}
\endcode
There is just one granule in this <code style="background-color:whitesmoke;">SVM01</code> file, as shown below:
\code
$ h5dump -n SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
FILE_CONTENTS {
group /
group /All_Data
group /All_Data/VIIRS-M1-SDR_All
dataset /All_Data/VIIRS-M1-SDR_All/ModeGran
dataset /All_Data/VIIRS-M1-SDR_All/ModeScan
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfScans
dataset /All_Data/VIIRS-M1-SDR_All/PadByte1
dataset /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR
dataset /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR
dataset /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR
dataset /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR
dataset /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR
dataset /All_Data/VIIRS-M1-SDR_All/Radiance
dataset /All_Data/VIIRS-M1-SDR_All/RadianceFactors
dataset /All_Data/VIIRS-M1-SDR_All/Reflectance
dataset /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors
group /Data_Products
group /Data_Products/VIIRS-M1-SDR
dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Aggr
dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0
}
}
\endcode
Now examine the references in the <code style="background-color:whitesmoke;">VIIRS-M1-SDR_Gran_0</code> granule
\code
$ h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0"
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
DATA {
DATASET /All_Data/VIIRS-M1-SDR_All/Radiance {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M1-SDR_All/Reflectance {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M1-SDR_All/ModeScan {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/ModeGran {(0)-(0)},
DATASET /All_Data/VIIRS-M1-SDR_All/PadByte1 {(0)-(2)},
DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {(0)-(0)},
DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR {(0,0)-(767,3199)},
DATASET /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR {(0)-(47)},
DATASET /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR {(0)-(767)},
DATASET /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR {(0)-(15)},
DATASET /All_Data/VIIRS-M1-SDR_All/RadianceFactors {(0)-(1)},
DATASET /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors {(0)-(1)}
}
}
}
\endcode
In the output above, you can see that the <code style="background-color:whitesmoke;">NumberOfScans</code>
reference is the sixth reference in the granule counting from the top.
The list of references shown above is a 0-based index to the dataset. Therefore, to specify
<code style="background-color:whitesmoke;">NumberOfScans</code>, enter a start offset of
<code style="background-color:whitesmoke;">5</code> for the <code style="background-color:whitesmoke;">-s</code>
option (the sixth reference minus 1). To see the region reference data, use the <code style="background-color:whitesmoke;">-R</code> option.
This command will display the data in the <code style="background-color:whitesmoke;">NumberOfScans</code> region reference:
\code
h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" -s 5 -R
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
\endcode
It displays the number of scans (48):
\code
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
SUBSET {
START ( 5 );
STRIDE ( 1 );
COUNT ( 1 );
BLOCK ( 1 );
DATA {
(5): DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {
(5): REGION_TYPE BLOCK (0)-(0)
(5): DATATYPE H5T_STD_I32BE
(5): DATASPACE SIMPLE { ( 1 ) / ( H5S_UNLIMITED ) }
(5): DATA {
(0): 48
(5): }
(5): }
}
}
}
}
\endcode
The <code style="background-color:whitesmoke;">-s</code> option may be familiar as one of the options
that was described in the \ref secViewToolsViewSub tutorial topic. The other subsetting options are not included,
indicating that the default values are used.
If you leave off the <code style="background-color:whitesmoke;">-R</code> option, you will see the subset selection, but not the data:
\code
$ h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" -s 5
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
SUBSET {
START ( 5 );
STRIDE ( 1 );
COUNT ( 1 );
BLOCK ( 1 );
DATA {
DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {(0)-(0)}
}
}
}
}
\endcode
\subsubsection subsubsecViewToolsJPSSExam_h5dumpQuality Viewing a Quality Flag
The quality flags in an NPP file can be viewed with h5dump using the <code style="background-color:whitesmoke;">-M</code>
option. Quality flags are packed into each integer value in a quality flag dataset. Quality flag datasets in NPP
files begin with the letters <code style="background-color:whitesmoke;">QF</code>.
In the following NPP file, there are five Quality Flag datasets
(<code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/QF*</code>):
\code
$ h5dump -n SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
FILE_CONTENTS {
group /
group /All_Data
group /All_Data/VIIRS-M1-SDR_All
dataset /All_Data/VIIRS-M1-SDR_All/ModeGran
dataset /All_Data/VIIRS-M1-SDR_All/ModeScan
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts
dataset /All_Data/VIIRS-M1-SDR_All/NumberOfScans
dataset /All_Data/VIIRS-M1-SDR_All/PadByte1
dataset /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR
dataset /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR
dataset /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR
dataset /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR
dataset /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR
dataset /All_Data/VIIRS-M1-SDR_All/Radiance
dataset /All_Data/VIIRS-M1-SDR_All/RadianceFactors
dataset /All_Data/VIIRS-M1-SDR_All/Reflectance
dataset /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors
group /Data_Products
group /Data_Products/VIIRS-M1-SDR
dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Aggr
dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0
}
}
\endcode
The flags in this particular dataset happen to be stored in every two bits of each quality flag dataset
element, and the values range from 0 to 2. In other words, to see the quality flag values for this
dataset, these bits would be examined: 0 and 1, 2 and 3, 4 and 5, or 6 and 7 (This information was
obtained from the Product Profile XML File.)
For example, bits 0 and 1 in the <code style="background-color:whitesmoke;">VQF1_VIIRSMBANDSDR</code> dataset specify the flag that
"Indicates calibration quality due to bad space view offsets, OBC view offsets, etc or use of a
previous calibration view". It has 3 values: Good (0), Poor (1), or No Calibration (2).
The <code style="background-color:whitesmoke;">-M</code> option is used to specify the quality
flag bit offset (<code style="background-color:whitesmoke;">O</code>) and length (<code style="background-color:whitesmoke;">L</code>):
\code
h5dump -d DATASET -M O,L FILE
\endcode
To view the first quality flag (0-1) in a 5 x 6 subset of the <code style="background-color:whitesmoke;">QF1_VIIRSMBANDSDR</code> dataset, specify:
\code
h5dump -d "/All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR[0,0;;5,6;]"
-M 0,2 SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
\endcode
This outputs:
\code
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
DATASET "/All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR" {
DATATYPE H5T_STD_U8BE
DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
PACKED_BITS OFFSET=0 LENGTH=2
SUBSET {
START ( 0, 0 );
STRIDE ( 1, 1 );
COUNT ( 5, 6 );
BLOCK ( 1, 1 );
DATA {
(0,0): 2, 2, 2, 2, 2, 2,
(1,0): 2, 2, 2, 2, 2, 2,
(2,0): 0, 0, 0, 0, 0, 0,
(3,0): 0, 0, 0, 0, 0, 0,
(4,0): 0, 0, 0, 0, 0, 0
}
}
}
}
\endcode
To view more than one quality flag at a time simply add the bit offset and length values to
<code style="background-color:whitesmoke;">-M</code>, separated by commas. For example, this
<code style="background-color:whitesmoke;">-M</code> option specifies bits 0-1 and 2-3:
\code
h5dump -d DATASET -M 0,2,2,2 FILE
\endcode
\subsubsection subsubsecViewToolsJPSSExam_h5dumpProps Properties
To view properties of a specific dataset with <code style="background-color:whitesmoke;">h5dump</code>
use the <code style="background-color:whitesmoke;">-p</code> option along with the
<code style="background-color:whitesmoke;">-d</code> option. Depending on the number of attributes
and the amount of data, the <code style="background-color:whitesmoke;">-A 0</code> and
<code style="background-color:whitesmoke;">-H</code> options can also be specified to suppress
printing of attributes and data values:
\code
h5dump -p -H -A 0 -d DATASET
\endcode
The <code style="background-color:whitesmoke;">-p</code> option shows any compression filters
associated with a dataset, as well as layout and fill value information. This option can be helpful
in diagnosing performance and other issues.
As an example, examine the <code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/Radiance</code>
dataset in the <code style="background-color:whitesmoke;">SVM01</code> file:
\code
$ h5dump -p -H -A 0 -d "/All_Data/VIIRS-M1-SDR_All/Radiance"
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
DATASET "/All_Data/VIIRS-M1-SDR_All/Radiance" {
DATATYPE H5T_STD_U16BE
DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
STORAGE_LAYOUT {
CHUNKED ( 768, 3200 )
SIZE 4915200
}
FILTERS {
NONE
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 65529
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_INCR
}
}
}
\endcode
We can see that the chunk size for this dataset is 768 x 3200, and the storage size is 4915200.
What if the chunk size were smaller?
The dataset was modified to have a chunk size of 1 x 10, using the
<code style="background-color:whitesmoke;">h5repack</code> utility, as shown below.
\code
$ h5repack -l /All_Data/VIIRS-M1-SDR_All/Radiance:CHUNK=1x10
SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5 SVM01repack.h5
$ h5dump -p -H -A 0 -d "/All_Data/VIIRS-M1-SDR_All/Radiance" SVM01repack.h5
HDF5 "SVM01repack.h5" {
DATASET "/All_Data/VIIRS-M1-SDR_All/Radiance" {
DATATYPE H5T_STD_U16BE
DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
STORAGE_LAYOUT {
CHUNKED ( 1, 10 )
SIZE 4915200
}
FILTERS {
NONE
}
FILLVALUE {
FILL_TIME H5D_FILL_TIME_IFSET
VALUE 65529
}
ALLOCATION_TIME {
H5D_ALLOC_TIME_INCR
}
}
}
\endcode
In this case, the storage size of the dataset is the same, but the size of the file almost doubled!:
\code
$ ls -1sh
total 35M
12M SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
23M SVM01repack.h5
\endcode
In general, the smaller the chunk size, the more chunks that HDF5 has to keep track of, which increases
the size of the file and can affect performance.
\subsection subsecViewToolsJPSSExamr_HDFView HDFView
As mentioned previously, the structure of an HDF5 file is displayed in the TreeView on the left side of the HDFView screen,
and you can click on objects and have metadata information displayed on the right side.
To discover more about the granule <code style="background-color:whitesmoke;">/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0</code>
in the <code style="background-color:whitesmoke;">SVM01</code> file shown below in the TreeView, position
the mouse over the granule and click to select. Properties for the object is displayed on the right side of the HDFView screen.
You can see Datatype and Dataspace information on the <code style="background-color:whitesmoke;">General Object Info</code>
tab, any Attributes associated with the granulewill be on the
<code style="background-color:whitesmoke;">Object Attribute Info</code> tab. In the
<code style="background-color:whitesmoke;">General Object Info</code>, you can see that the dataset is a
Region Reference dataset, and that there are sixteen Region References in this dataset:
<table>
<tr>
<td>
\image html hdfview-prop.png
</td>
</tr>
</table>
To examine the data in the granule, click twice on it with the left mouse button in the TreeView,
and it will open in a new window.:
<table>
<tr>
<td>
\image html hdfview-regref.png
</td>
</tr>
</table>
If you click twice with the left mouse button on the fifth Region Reference
<code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/NumberOfScans</code> a window
will pop up with the value(s) of the reference:
<table>
<tr>
<td>
\image html hdfview-regref2.png
</td>
</tr>
</table>
You can also set a user option to automatically show the value(s) in a Region Reference. Under the
<code style="background-color:whitesmoke;">Tools</code> pull-down menu, select
<code style="background-color:whitesmoke;">User Options</code> and then select
<code style="background-color:whitesmoke;">HDF Settings</code> and then select
<code style="background-color:whitesmoke;">Show RegRef Values</code> in the
<code style="background-color:whitesmoke;">Data</code> section (see the middle of the image below):
<table>
<tr>
<td>
\image html hdfview-regrefval.png
</td>
</tr>
</table>
Then you will automatically see the values of the Region Reference when you open it and select an entry:
<table>
<tr>
<td>
\image html hdfview-regref1.png
</td>
</tr>
</table>
You can view and set quality flags by clicking the right mouse button over a quality flags dataset under
<code style="background-color:whitesmoke;">All_Data</code> and selecting
<code style="background-color:whitesmoke;">Open As</code> from the pop-up menu. In the middle of
the window that pops up, you will see where you can specify <code style="background-color:whitesmoke;">Bitmask</code> options.
<table>
<tr>
<td>
\image html hdfview-qf.png
</td>
</tr>
</table>
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
*/

View File

@@ -8,16 +8,14 @@
* These functions were created as part of a project supporting
* NPP/NPOESS Data Production and Exploitation (
* <a href="https://support.hdfgroup.org/projects/jpss/documentation">
* project </a>,
* <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
* project</a>, <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
* software </a>).
* While they were written to facilitate access to NPP, NPOESS, and JPSS
* data in the HDF5 format, these functions may be useful to anyone working
* with region references, hyperslab selections, or bit-fields.
*
* Note that these functions are not part of the standard HDF5 distribution;
* the
* <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
* the <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
* software </a>
* must be separately downloaded and installed.
*
@@ -28,15 +26,12 @@
* https://support.hdfgroup.org/projects/jpss/documentation/HL/UG/NPOESS_HL-UG.pdf.
*
* - \ref H5LRcopy_reference
* \n Copies data from the specified dataset to a new location and
* creates a reference to it.
* \n Copies data from the specified dataset to a new location and creates a reference to it.
* - \ref H5LRcopy_region
* \n Copies data from a referenced region to a region in a
* destination dataset.
* \n Copies data from a referenced region to a region in a destination dataset.
* - \ref H5LRcreate_ref_to_all
* \n Creates a dataset with the region references to the data in all
* datasets located under a specified group in a file or creates a
* dataset with object references to all objects (groups or datasets)
* \n Creates a dataset with the region references to the data in all datasets located under a
* specified group in a file or creates a dataset with object references to all objects (groups or datasets)
* located under a specified group in a file.
* - \ref H5LRcreate_region_references
* \n Creates an array of region references using an array of paths to
@@ -44,17 +39,13 @@
* - \ref H5LRget_region_info
* \n Retrieves information about the data a region reference points to.
* - \ref H5LRmake_dataset
* \n Creates and writes a dataset containing a list of
* region references.
* \n Creates and writes a dataset containing a list of region references.
* - \ref H5LRread_region
* \n Retrieves raw data pointed to by a region reference to
* an application buffer.
* \n Retrieves raw data pointed to by a region reference to an application buffer.
* - \ref H5LTcopy_region
* \n Copies data from a specified region in a source dataset
* to a specified region in a destination dataset.
* \n Copies data from a specified region in a source dataset to a specified region in a destination dataset.
* - \ref H5LTread_bitfield_value
* \n Retrieves the values of quality flags for each element
* to the application provided buffer.
* \n Retrieves the values of quality flags for each element to the application provided buffer.
* - \ref H5LTread_region
* \n Reads selected data to an application buffer.
*
@@ -125,14 +116,11 @@ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id,
* \param[in] obj_id File identifier for the HDF5 file containing
* the referenced regions or an object identifier
* for any object in that file
* \param[in] num_elem Number of elements in the \p path and
* \p buf arrays
* \param[in] num_elem Number of elements in the \p path and \p buf arrays
* \param[in] path Array of pointers to strings, which contain
* the paths to the target datasets for the
* region references
* the paths to the target datasets for the region references
* \param[in] block_coord Array of hyperslab coordinate
* \param[out] buf Buffer for returning an array of region
* references
* \param[out] buf Buffer for returning an array of region references
*
* \return \herr_t
*
@@ -179,18 +167,14 @@ H5_HLRDLL herr_t H5LRcreate_region_references(hid_t obj_id,
* --------------------------------------------------------------------------
* \ingroup H5LR
*
* \brief Copies data from the specified dataset to a new location and
* creates a reference to it.
* \brief Copies data from the specified dataset to a new location and creates a reference to it.
*
* \param[in] obj_id Identifier of any object in a file an
* HDF5 reference belongs to
* \param[in] obj_id Identifier of any object in a file an HDF5 reference belongs to
* \param[in] ref Reference to the datasets region
* \param[in] file Name of the destination file
* \param[in] path Full path to the destination dataset
* \param[in] block_coord Hyperslab coordinates in the destination
* dataset
* \param[out] ref_new Region reference to the new location of
* data
* \param[in] block_coord Hyperslab coordinates in the destination dataset
* \param[out] ref_new Region reference to the new location of data
*
* \return \herr_t
*
@@ -222,16 +206,14 @@ H5_HLRDLL herr_t H5LRcopy_reference(hid_t obj_id, hdset_reg_ref_t *ref, const ch
* --------------------------------------------------------------------------
* \ingroup H5LR
*
* \brief Copies data from a referenced region to a region in a
* destination dataset.
* \brief Copies data from a referenced region to a region in a destination dataset.
*
* \param[in] obj_id Identifier of any object in a file
* dataset region reference belongs to
* \param[in] ref Dataset region reference
* \param[in] file Name of the destination file
* \param[in] path Full path to the destination dataset
* \param[in] block_coord Hyperslab coordinates in the destination
* dataset
* \param[in] block_coord Hyperslab coordinates in the destination dataset
*
* \return \herr_t
*
@@ -273,16 +255,11 @@ H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id,
* (groups or datasets) located under a specified group in a file.
*
* \fg_loc_id
* \param[in] group_path Absolute or relative path to the group
* at which traversal starts
* \param[in] ds_path Absolute or relative path to the dataset
* with region references to be created
* \param[in] index_type Index_type;
* see valid values below in description
* \param[in] order Order in which index is traversed;
* see valid values below in description
* \param[in] ref_type Reference type;
* see valid values below in description
* \param[in] group_path Absolute or relative path to the group at which traversal starts
* \param[in] ds_path Absolute or relative path to the dataset with region references to be created
* \param[in] index_type Index_type; see valid values below in description
* \param[in] order Order in which index is traversed; see valid values below in description
* \param[in] ref_type Reference type; see valid values below in description
*
* \return \herr_t
*
@@ -320,7 +297,7 @@ H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id,
* - #H5_ITER_NATIVE Fastest available order
*
* For more detailed information on these two parameters,
* see H5Lvisit().
* @see H5Lvisit().
*
* \p ref_type specifies the type of the reference to be used.
* Valid values include the following:
@@ -352,14 +329,11 @@ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path,
* \param[in] obj_id File identifier for the HDF5 file containing
* the dataset with the referenced region or an
* object identifier for any object in that file
* \param[in] ref Region reference specifying data to be read
* in
* \param[in] ref Region reference specifying data to be read in
* \param[in] mem_type Memory datatype of data read from referenced
* region into the application buffer
* \param[in,out] numelem Number of elements to be read into buffer
* \p buf
* \param[out] buf Buffer in which data is returned to the
* application
* \param[in,out] numelem Number of elements to be read into buffer \p buf
* \param[out] buf Buffer in which data is returned to the application
*
* \return \herr_t
*
@@ -400,26 +374,19 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id,
* --------------------------------------------------------------------------
* \ingroup H5LR
*
* \brief Retrieves information about the data a region reference
* points to.
* \brief Retrieves information about the data a region reference points to.
*
* \param[in] obj_id Identifier of any object in an HDF5 file
* the region reference belongs to.
* \param[in] obj_id Identifier of any object in an HDF5 file the region reference belongs to.
* \param[in] ref Region reference to query
* \param[in,out] len Size of the buffer to store \p path in.
* NOTE: if \p *path is not NULL then \p *len
* must be the appropriate length
* NOTE: if \p *path is not NULL then \p *len must be the appropriate length
* \param[out] path Full path that a region reference points to
* \param[out] rank The number of dimensions of the dataset
* dimensions of the dataset pointed by
* region reference.
* \param[out] dtype Datatype of the dataset pointed by the
* region reference.
* dimensions of the dataset pointed by region reference.
* \param[out] dtype Datatype of the dataset pointed by the region reference.
* \param[out] sel_type Type of the selection (point or hyperslab)
* \param[in,out] numelem Number of coordinate blocks or
* selected elements.
* \param[out] buf Buffer containing description of the region
* pointed by region reference
* \param[in,out] numelem Number of coordinate blocks or selected elements.
* \param[out] buf Buffer containing description of the region pointed by region reference
*
* \return \herr_t
*
@@ -508,12 +475,10 @@ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id,
*
* \param[in] file_src Name of the source file
* \param[in] path_src Full path to the source dataset
* \param[in] block_coord_src Hyperslab coordinates in the
* source dataset
* \param[in] block_coord_src Hyperslab coordinates in the source dataset
* \param[in] file_dest Name of the destination file
* \param[in] path_dest Full path to the destination dataset
* \param[in] block_coord_dset Hyperslab coordinates in the
* destination dataset
* \param[in] block_coord_dset Hyperslab coordinates in the destination dataset
*
* \return \herr_t
*
@@ -562,10 +527,8 @@ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src,
* \param[in] file Name of file
* \param[in] path Full path to a dataset
* \param[in] block_coord Hyperslab coordinates
* \param[in] mem_type Memory datatype, describing the buffer
* the referenced data will be read into
* \param[out] buf Buffer containing data from the
* referenced region
* \param[in] mem_type Memory datatype, describing the buffer the referenced data will be read into
* \param[out] buf Buffer containing data from the referenced region
*
* \return \herr_t
*
@@ -607,11 +570,9 @@ H5_HLRDLL herr_t H5LTread_region(const char *file,
* \param[in] num_values Number of the values to be extracted
* \param[in] offset Array of staring bits to be extracted from
* the element; valid values: 0 (zero) through 7
* \param[in] lengths Array of the number of bits to be extracted
* for each value
* \param[in] lengths Array of the number of bits to be extracted for each value
* \param[in] space Dataspace identifier, describing the elements
* to be read from the dataset with bit-field
* values
* to be read from the dataset with bit-field values
* \param[out] buf Buffer to read the values in
*
* \return \herr_t

View File

@@ -1,29 +0,0 @@
/** \page high_level High-level library
* The high-level HDF5 library includes several sets of convenience and standard-use APIs to
* facilitate common HDF5 operations.
*
* <ul>
* <li>\ref H5LT "Lite (H5LT, H5LD)"
* \n
* Functions to simplify creating and manipulating datasets, attributes and other features
* <li>\ref H5IM "Image (H5IM)"
* \n
* Creating and manipulating HDF5 datasets intended to be interpreted as images
* <li>\ref H5TB "Table (H5TB)"
* \n
* Creating and manipulating HDF5 datasets intended to be interpreted as tables
* <li>\ref H5PT "Packet Table (H5PT)"
* \n
* Creating and manipulating HDF5 datasets to support append- and read-only operations on table data
* <li>\ref H5DS "Dimension Scale (H5DS)"
* \n
* Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset
* <li>\ref H5DO "Optimizations (H5DO)"
* \n
* Bypassing default HDF5 behavior in order to optimize for specific use cases
* <li>\ref H5LR "Extensions (H5LR, H5LT)"
* \n
* Working with region references, hyperslab selections, and bit-fields
* </ul>
*
*/

View File

@@ -139,7 +139,7 @@
<p>This document describes the lower-level data objects;
the higher-level objects and their properties are described
in the <a href="H5.user.html"><cite>HDF5 User's Guide</cite></a>.
in the <a href="H5.user.html"><cite>HDF5 User Guide</cite></a>.
<!--

View File

@@ -172,7 +172,7 @@ TABLE.list TD { border:none; }
<p>This document describes the lower-level data objects;
the higher-level objects and their properties are described
in the <a href="H5.user.html"><cite>HDF5 User's Guide</cite></a>.
in the <a href="H5.user.html"><cite>HDF5 User Guide</cite></a>.
<P>Three levels of information comprise the file format.
Level 0 contains basic information for identifying and

File diff suppressed because it is too large Load Diff

View File

@@ -418,7 +418,7 @@
<p>This document describes the lower-level data objects;
the higher-level objects and their properties are described
in the <a href="UG/HDF5_Users_Guide-Responsive HTML5/index.html"><cite>HDF5 User&rsquo;s Guide</cite></a>.</p>
in the <a href="UG/HDF5_User_Guide-Responsive HTML5/index.html"><cite>HDF5 User Guide</cite></a>.</p>
<p>Three levels of information comprise the file format.
Level 0 contains basic information for identifying and

View File

@@ -20,9 +20,9 @@ The following code is placed at the beginning of H5private.h:
</blockquote>
<p>
<code>H5_HAVE_THREADSAFE</code> is defined when the HDF-5 library is
<code>H5_HAVE_THREADSAFE</code> is defined when the HDF5 library is
compiled with the --enable-threadsafe configuration option. In general,
code for the non-threadsafe version of HDF-5 library are placed within
code for the non-threadsafe version of HDF5 library are placed within
the <code>#else</code> part of the conditional compilation. The exception
to this rule are the changes to the <code>FUNC_ENTER</code> (in
H5private.h), <code>HRETURN</code> and <code>HRETURN_ERROR</code> (in
@@ -438,7 +438,7 @@ described in Appendix D and may be found in <code>H5TS.c</code>.
<p>
Except where stated, all tests involve 16 simultaneous threads that make
use of HDF-5 API calls without any explicit synchronization typically
use of HDF5 API calls without any explicit synchronization typically
required in a non-threadsafe environment.
</p>
@@ -453,7 +453,7 @@ dataset's named value.
<p>
The main thread would join with all 16 threads and attempt to match the
resulting HDF-5 file with expected results - that each dataset contains
resulting HDF5 file with expected results - that each dataset contains
the correct value (0 for <i>zero</i>, 1 for <i>one</i> etc ...) and all
datasets were correctly created.
</p>
@@ -473,7 +473,7 @@ name.
<p>
The error stack implementation runs correctly if it reports 15 instances
of the dataset name conflict error and finally generates a correct HDF-5
of the dataset name conflict error and finally generates a correct HDF5
containing that single dataset. Each thread should report its own stack
of errors with a thread number associated with it.
</p>

View File

@@ -0,0 +1,69 @@
<b>Core Library</b>
- @ref H5A "Attributes (H5A)"
<br />
HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data object.
- @ref H5D "Datasets (H5D)"
<br />
Manage HDF5 datasets, including the transfer of data between memory and disk and the description of dataset properties.
- @ref H5S "Dataspaces (H5S)"
<br />
HDF5 dataspaces describe the shape of datasets in memory or in HDF5 files.
- @ref H5T "Datatypes (H5T)"
<br />
HDF5 datatypes describe the element type of HDF5 datasets and attributes.
- @ref H5E "Error Handling (H5E)"
<br />
HDF5 library error reporting.
- @ref H5ES "Event Set (H5ES)"
<br />
HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5.
- @ref H5F "Files (H5F)"
<br />
Manage HDF5 files.
- @ref H5Z "Filters (H5Z)"
<br />
Manage HDF5 user-defined filters
- @ref H5G "Groups (H5G)"
<br />
Manage HDF5 groups.
- @ref H5I "Identifiers (H5I)"
<br />
Manage identifiers defined by the HDF5 library.
- @ref H5 "Library General (H5)"
<br />
Manage the life cycle of HDF5 library instances.
- @ref H5L "Links (H5L)"
<br />
Manage HDF5 links and link types.
- @ref H5O "Objects (H5O)"
<br />
Manage HDF5 objects (groups, datasets, datatype objects).
- @ref H5P "Property Lists (H5P)"
<br />
HDF5 property lists are the main vehicle to configure the behavior of HDF5 API functions.
- @ref H5PL "Dynamically-loaded Plugins (H5PL)"
<br />
Manage the loading behavior of HDF5 plugins.
- @ref H5R "References (H5R)"
<br />
Manage HDF5 references (HDF5 objects, attributes, and selections on datasets a.k.a. dataset regions).
- @ref H5VL "VOL Connector (H5VL)"
<br />
Manage HDF5 VOL connector plugins.

View File

@@ -0,0 +1,73 @@
<b>Fortran Library</b>
- @ref FH5A "Attributes (H5A)"
<br />
HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data object.
- @ref FH5D "Datasets (H5D)"
<br />
Manage HDF5 datasets, including the transfer of data between memory and disk and the description of dataset properties.
- @ref FH5S "Dataspaces (H5S)"
<br />
HDF5 dataspaces describe the shape of datasets in memory or in HDF5 files.
- @ref FH5T "Datatypes (H5T)"
<br />
HDF5 datatypes describe the element type of HDF5 datasets and attributes.
- @ref FH5E "Error Handling (H5E)"
<br />
HDF5 library error reporting.
- @ref FH5F "Files (H5F)"
<br />
Manage HDF5 files.
- @ref FH5Z "Filters (H5Z)"
<br />
Manage HDF5 user-defined filters
- @ref FH5G "Groups (H5G)"
<br />
Manage HDF5 groups.
- @ref FH5I "Identifiers (H5I)"
<br />
Manage identifiers defined by the HDF5 library.
- @ref FH5 "Library General (H5)"
<br />
Manage the life cycle of HDF5 library instances.
- @ref FH5L "Links (H5L)"
<br />
Manage HDF5 links and link types.
- @ref FH5O "Objects (H5O)"
<br />
Manage HDF5 objects (groups, datasets, datatype objects).
- @ref FH5P "Property Lists (H5P)"
<br />
HDF5 property lists are the main vehicle to configure the behavior of HDF5 API functions.
- @ref FH5R "References (H5R)"
<br />
Manage HDF5 references (HDF5 objects, attributes, and selections on datasets a.k.a. dataset regions).
- @ref FH5LT "High Level Lite (H5LT)"
<br />
Functions to simplify creating and manipulating datasets, attributes and other features
- @ref FH5IM "High Level Image (H5IM)"
<br />
Creating and manipulating HDF5 datasets intended to be interpreted as images
- @ref FH5TB "High Level Table (H5TB)"
<br />
Creating and manipulating HDF5 datasets intended to be interpreted as tables
- @ref FH5DS "High Level Dimension Scale (H5DS)"
<br />
Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset

View File

@@ -0,0 +1,30 @@
<b>High-level library</b>
<br />
The high-level HDF5 library includes several sets of convenience and standard-use APIs to
facilitate common HDF5 operations.
- @ref H5LT
<br />
Functions to simplify creating and manipulating datasets, attributes and other features
- @ref H5IM
<br />
Creating and manipulating HDF5 datasets intended to be interpreted as images
- @ref H5TB
<br />
Creating and manipulating HDF5 datasets intended to be interpreted as tables
- @ref H5PT
<br />
Creating and manipulating HDF5 datasets to support append- and read-only operations on table data
- @ref H5DS
<br />
Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset
- @ref H5DO
<br />
Bypassing default HDF5 behavior in order to optimize for specific use cases
- @ref H5LR "Extensions (H5LR, H5LT)"

View File

@@ -0,0 +1,84 @@
<b>Java Library</b>
@ref HDF5LIB
- @ref JH5
<br />
This package is the Java interface for the HDF5 library.
- @ref JH5A
<br />
This package is the Java interface for the HDF5 library attribute APIs.
- @ref JH5D
<br />
This package is the Java interface for the HDF5 library dataset APIs.
- @ref JH5S
<br />
This package is the Java interface for the HDF5 library dataspace APIs.
- @ref JH5T
<br />
This package is the Java interface for the HDF5 library datatype APIs.
- @ref JH5E
<br />
This package is the Java interface for the HDF5 library error APIs.
- @ref JH5F
<br />
This package is the Java interface for the HDF5 library file APIs.
- @ref JH5Z
<br />
This package is the Java interface for the HDF5 library filter APIs.
- @ref JH5G
<br />
This package is the Java interface for the HDF5 library group APIs.
- @ref JH5I
<br />
This package is the Java interface for the HDF5 library identifier APIs.
- @ref JH5L
<br />
This package is the Java interface for the HDF5 library links APIs.
- @ref JH5O
<br />
This package is the Java interface for the HDF5 library object APIs.
- @ref JH5P
<br />
This package is the Java interface for the HDF5 library property list APIs.
- @ref JH5PL
<br />
This package is the Java interface for the HDF5 library plugin APIs.
- @ref JH5R
<br />
This package is the Java interface for the HDF5 library reference APIs.
- @ref JH5VL
<br />
This package is the Java interface for the HDF5 library VOL connector APIs.
- @ref HDF5CONST
<br />
This class contains C constants and enumerated types of HDF5 library.
- @ref HDFNATIVE
<br />
This class encapsulates native methods to deal with arrays of numbers,
* converting from numbers to bytes and bytes to numbers.
- @ref HDFARRAY
<br />
This is a class for handling multidimensional arrays for HDF.
- @ref ERRORS
<br />
The class HDF5Exception returns errors from the Java HDF5 Interface.

View File

@@ -3,8 +3,9 @@
<!-- Navigation index tabs for HTML output -->
<navindex>
<tab type="user" url="index.html" title="Overview" />
<tab type="user" url="https://portal.hdfgroup.org/display/HDF5/Learning+HDF5" title="Getting started" />
<tab type="user" url="@ref GettingStarted" title="Getting started" />
<tab type="user" url="@ref Cookbook" title="Cookbook" />
<tab type="user" url="@ref UG" title="User Guide" />
<tab type="user" url="https://portal.hdfgroup.org/display/HDF5/HDF5+User+Guides" title="User Guides" />
<tab type="user" url="https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide" title="Application Developer's Guide" />
<tab type="user" url="@ref GLS" title="Glossary" />

BIN
doxygen/img/DataGroup.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

BIN
doxygen/img/Dmodel_fig1.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

BIN
doxygen/img/Dmodel_fig2.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

BIN
doxygen/img/Dmodel_fig5.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
doxygen/img/Dmodel_fig6.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

BIN
doxygen/img/Dmodel_fig8.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
doxygen/img/Dmodel_fig9.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

BIN
doxygen/img/Dsets_fig1.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

BIN
doxygen/img/Dsets_fig10.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.4 KiB

BIN
doxygen/img/Dsets_fig11.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

BIN
doxygen/img/Dsets_fig12.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

BIN
doxygen/img/Dsets_fig2.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

BIN
doxygen/img/Dsets_fig3.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

BIN
doxygen/img/Dsets_fig4.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

BIN
doxygen/img/Dsets_fig5.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

BIN
doxygen/img/Dsets_fig6.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
doxygen/img/Dsets_fig7.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
doxygen/img/Dsets_fig8.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

BIN
doxygen/img/Dsets_fig9.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

BIN
doxygen/img/Dspace_move.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

BIN
doxygen/img/Dspace_read.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

BIN
doxygen/img/Dtypes_fig1.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Some files were not shown because too many files have changed in this diff Show More