Compare commits

...

30 Commits

Author SHA1 Message Date
Elena Pourmal
1dd25c1ec3 [svn-r24460] Maintenance: Checking in modified files after creating released tar ball with the CMake installation fix. 2013-11-22 09:05:33 -05:00
Allen Byrne
c28d9d3177 [svn-r24458] Correct CMAKE_INSTALL_PREFIX for OS X packaging 2013-11-20 09:19:23 -05:00
Elena Pourmal
0e501b0559 [svn-r24404] Maintenance: Updated RELEASE.txt file, changed version number to 1.8.12 and ran
release script to create release tar ball. Full code freeze in 1.8.12 continues 
             until further notice.
2013-11-04 16:57:14 -05:00
Allen Byrne
facfaa074d [svn-r24397] Cleanup testfiles from scripts. Update lists of files to be cleaned. CMake update to clean testfiles.
Tested: local linux cmake
2013-11-01 16:44:58 -05:00
Allen Byrne
07e07f43bc [svn-r24387] Add missing H5_ prefix
Remove redundant function declaration.

Authorized commit
2013-10-31 10:01:37 -05:00
Elena Pourmal
7f83157df4 [svn-r24357] Maintenance: Created pre2 tar ball for testing and now committing the files for
creating ZIP file for Windows.
             Please no cjeck-ins until further notice.
2013-10-24 21:25:50 -05:00
Elena Pourmal
acacbe7e16 [svn-r24355] Maintenance: Editing to remove some controversial phrasing. 2013-10-24 18:08:30 -05:00
Mark Evans
a06bf1d06b [svn-r24352] Edited for readability. 2013-10-23 17:03:36 -05:00
Mark Evans
6c1f6783c8 [svn-r24351] Fixed some typos. 2013-10-23 16:49:18 -05:00
Mark Evans
4e4fa2ed58 [svn-r24350] Edited for readability. 2013-10-23 16:44:11 -05:00
Mark Evans
5ca3c06e0a [svn-r24349] Edited for readability. 2013-10-23 16:39:14 -05:00
Mark Evans
dd9064b37f [svn-r24348] Edited for readability. 2013-10-23 16:25:10 -05:00
Mark Evans
95554f9936 [svn-r24347] Fixed some typos. 2013-10-23 16:08:15 -05:00
Mark Evans
4faf126b6c [svn-r24346] Edited for readability. 2013-10-23 15:28:44 -05:00
Larry Knox
a04c9b72a4 [svn-r24345] Removed suggested workaround using DESTDIR for make install with shared libs to a location different than the configure prefix. For shared libs, the configure prefix, either default or specified with --prefix, should match the final install destination. 2013-10-23 14:58:22 -05:00
Mark Evans
2d8f619424 [svn-r24344] Edited for readability. 2013-10-23 14:32:30 -05:00
Larry Knox
a69ad417b6 [svn-r24342] Check in documentation entries for HDFFV-7583, "make prefix=XXX install and check-install failed", and HDFFV-8152, "A space in source code directory name (mac) causes Make to fail". 2013-10-22 15:39:53 -05:00
Raymond Lu
b4d233ca83 [svn-r24319] Fix a typo. 2013-10-17 14:57:43 -05:00
Allen Byrne
abd31122e6 [svn-r24313] Move h5repack user-defined filter option to new features section. 2013-10-17 10:16:52 -05:00
Raymond Lu
e2ee6a02ab [svn-r24309] Issue 8147: CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv
catches some defined behavior in the alignment algorithm of the macro DETECT_I      
 in H5detect.c. I documented it as a known problem.

No test is needed.
2013-10-16 16:59:18 -05:00
Binh-Minh Ribler
24c105eb91 [svn-r24296] Description:
- Added a lot of documentation to classes for Reference Manual.
    - Fixed some format inconsistencies
Platforms tested:
    Linux/ppc64 (ostrich)
    Linux/32 2.6 (jam)
    Linux/64 2.6 (koala)/PGI compilers
2013-10-15 15:55:55 -05:00
Allen Byrne
cfdfaca0cc [svn-r24283] Add log file aliases and install step 2013-10-10 14:45:18 -05:00
Albert Cheng
8e2c45d0b3 [svn-r24281] Bug: HDFFV-8534
Added a known problem of Solaris make install failure.
Updated INSTALL to state Gnu make is the preferred version of make.

Tested: eyeballed.
2013-10-10 10:39:08 -05:00
Allen Byrne
3a462efd2c [svn-r24277] Add known problem for running just make check on tools. The tests for tools use other tools and therefore require the tools be built before testing. 2013-10-09 17:18:46 -05:00
Elena Pourmal
077acdd87f [svn-r24276] Maintenance for the release:
- Changed default build mode to "production"
       - Removed v110 from the help string for "--with-default-api-version" since 1.8 cannot use 1.10 symbols; 
         this fix should also go to 1.8 branch.
Tested on jam with --enable-cxx --enable-configure
2013-10-09 17:00:11 -05:00
Allen Byrne
2f0c79a191 [svn-r24274] Update URL 2013-10-09 12:07:03 -05:00
Allen Byrne
ef0318c4d8 [svn-r24271] Correct plugin option name 2013-10-09 12:03:56 -05:00
Raymond Lu
bde0510670 [svn-r24269] I added two new files H5Location.cpp and H5Location.h to the make file.
Not tested yet.
2013-10-09 11:34:42 -05:00
Elena Pourmal
0f765d9e01 [svn-r24266] Maintenance: Changed suffix to "pre2" to prepare repository for check-ins discovered during the pre1 testing. 2013-10-08 18:44:41 -05:00
Elena Pourmal
4d252a1252 [svn-r24260] Maintenance for the 1.8.12 release: Changed version number in the hdf5_1_8_12 release branch to 1.8.12-pre1 2013-10-06 20:35:21 -05:00
80 changed files with 871 additions and 402 deletions

View File

@@ -229,23 +229,27 @@ IF (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
SET (CPACK_PACKAGE_ICON "${HDF5_RESOURCES_DIR}/hdf.gif")
SET (CPACK_SET_DESTDIR TRUE) # Required when packaging, and set CMAKE_INSTALL_PREFIX to "/".
# LIST (APPEND CPACK_GENERATOR "Bundle")
SET (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}")
SET (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
SET (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
SET (CPACK_BUNDLE_ICON "${HDF5_RESOURCES_DIR}/hdf.gif")
SET (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist")
# SET (CPACK_BUNDLE_STARTUP_COMMAND "${HDF5_BINARY_DIR}/CMakeFiles/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-startup.sh")
SET (CPACK_APPLE_GUI_INFO_STRING "HDF5 (Hierarchical Data Format 5) Software Library and Utilities")
SET (CPACK_APPLE_GUI_COPYRIGHT "Copyright © 2006-2013 by The HDF Group. All rights reserved.")
SET (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
#-----------------------------------------------------------------------------
# Configure the Info.plist file for the install bundle
#-----------------------------------------------------------------------------
CONFIGURE_FILE (
${HDF5_RESOURCES_DIR}/CPack.Info.plist.in
${HDF5_BINARY_DIR}/CMakeFiles/Info.plist @ONLY
)
IF (HDF5_PACK_MACOSX_BUNDLE)
LIST (APPEND CPACK_GENERATOR "Bundle")
SET (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}")
SET (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
IF(NOT DEFINED CMAKE_INSTALL_PREFIX)
SET (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
ENDIF(NOT DEFINED CMAKE_INSTALL_PREFIX)
SET (CPACK_BUNDLE_ICON "${HDF5_RESOURCES_DIR}/hdf.gif")
SET (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist")
# SET (CPACK_BUNDLE_STARTUP_COMMAND "${HDF5_BINARY_DIR}/CMakeFiles/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-startup.sh")
SET (CPACK_APPLE_GUI_INFO_STRING "HDF5 (Hierarchical Data Format 5) Software Library and Utilities")
SET (CPACK_APPLE_GUI_COPYRIGHT "Copyright © 2006-2013 by The HDF Group. All rights reserved.")
SET (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
#-----------------------------------------------------------------------------
# Configure the Info.plist file for the install bundle
#-----------------------------------------------------------------------------
CONFIGURE_FILE (
${HDF5_RESOURCES_DIR}/CPack.Info.plist.in
${HDF5_BINARY_DIR}/CMakeFiles/Info.plist @ONLY
)
ENDIF(HDF5_PACK_MACOSX_BUNDLE)
ELSE (WIN32)
LIST (APPEND CPACK_GENERATOR "STGZ")
SET (CPACK_PACKAGING_INSTALL_PREFIX "/usr")

View File

@@ -1,4 +1,4 @@
HDF5 version 1.8.12-snap20 currently under development
HDF5 version 1.8.12 released on 2013-11-21
Please refer to the release_docs/INSTALL file for installation instructions.
------------------------------------------------------------------------------

View File

@@ -14,6 +14,8 @@ DPRINT=:
progname=`basename $0` # program name
cminfile="cmakemin.$$" # Cmake minimum file
cfgfile=$progname.$$ # configure file
ctest_log=ctest.log # output of ctest script
install_log=install.log # output of installation
$DPRINT $cfgfile
# Remove temporary generated files if exit 0
@@ -36,7 +38,8 @@ TIMESTAMP
# Explain what and where log files are.
cat <<EOF
ctest.log: output of ctest script.
$ctest_log: output of ctest script.
$install_log: output of installation
Log files will be stored in Testing/Temporary:
LastConfigure_<timestamp>.log: output of configure
LastBuild_<timestamp>.log: output of build
@@ -219,13 +222,36 @@ if (NOT LOCAL_SKIP_TEST)
endif()
message ("test DONE")
endif (NOT LOCAL_SKIP_TEST)
if(NOT LOCAL_MEMCHECK_TEST)
##-----------------------------------------------
## Package the product
##-----------------------------------------------
execute_process(COMMAND cpack -C ${CTEST_BUILD_CONFIGURATION} -V
WORKING_DIRECTORY ${CTEST_BINARY_DIRECTORY}
RESULT_VARIABLE cpackResult
OUTPUT_VARIABLE cpackLog
ERROR_VARIABLE cpackLog.err
)
file(WRITE ${CTEST_BINARY_DIRECTORY}/cpack.log "${cpackLog.err}" "${cpackLog}")
endif(NOT LOCAL_MEMCHECK_TEST)
#-----------------------------------------------------------------------------
message ("DONE")
EOF
# Run ctest
ctest -S $cfgfile -C Release -V -O ctest.log
ctest -S $cfgfile -C Release -V -O $ctest_log
exit_code=$?
if [ $exit_code = 0 ]; then
echo CTest script completed without error
else
echo Error encountered CTest script
fi
# Using HDF5-*.sh because actual name is unavailable
install_sh=HDF5-*.sh
echo installing with $install_sh ...
./$install_sh --skip-license > $install_log
exit_code=$?
if [ $exit_code = 0 ]; then
echo Complete without error

View File

@@ -14,11 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class AbstractDs is an abstract base class, from which Attribute and
// DataSet inherit. It provides the services that are common to both
// Attribute and DataSet. It also inherits from H5Object and passes down
// the services that H5Object provides.
#ifndef __AbstractDs_H
#define __AbstractDs_H
@@ -33,6 +28,14 @@ class FloatType;
class IntType;
class StrType;
class VarLenType;
/*! \class AbstractDs
\brief AbstractDs is an abstract base class, inherited by Attribute
and DataSet.
It provides a collection of services that are common to both Attribute
and DataSet. AbstractDs inherits from H5Object.
*/
class H5_DLLCPP AbstractDs {
public:
// Gets a copy the datatype of that this abstract dataset uses.

View File

@@ -14,9 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class ArrayType inherits from DataType and provides wrappers for the
// HDF5 C's Array Datatypes.
#ifndef __H5ArrayType_H
#define __H5ArrayType_H
@@ -24,6 +21,10 @@
namespace H5 {
#endif
/*! \class ArrayType
\brief Class ArrayType inherits from DataType and provides wrappers for
the HDF5's Array Datatypes.
*/
class H5_DLLCPP ArrayType : public DataType {
public:
// Constructor that creates a new array data type based on the

View File

@@ -14,11 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class AtomType is a base class, from which IntType, FloatType, StrType,
// and PredType inherit. It provides the services that are common to these
// subclasses. It also inherits from DataType and passes down the
// services that are common to all the datatypes.
#ifndef __H5AtomType_H
#define __H5AtomType_H
@@ -26,6 +21,13 @@
namespace H5 {
#endif
/*! \class AtomType
\brief AtomType is a base class, inherited by IntType, FloatType,
StrType, and PredType.
AtomType provides operations on HDF5 atomic datatypes. It also inherits
from DataType.
*/
class H5_DLLCPP AtomType : public DataType {
public:
// Returns the byte order of an atomic datatype.

View File

@@ -21,6 +21,14 @@
namespace H5 {
#endif
/*! \class Attribute
\brief Class Attribute operates on HDF5 attributes.
An attribute has many characteristics similar to a dataset, thus both
Attribute and DataSet are derivatives of AbstractDs. Attribute also
inherits from IdComponent because an attribute is an HDF5 component that
is identified by an identifier.
*/
class H5_DLLCPP Attribute : public AbstractDs, public IdComponent {
public:
// Closes this attribute.

View File

@@ -14,10 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// CommonFG is a protocol class. Its existence is simply to provide the
// common services that are provided by H5File and Group. The file or
// group in the context of this class is referred to as 'location'.
#ifndef __CommonFG_H
#define __CommonFG_H
@@ -25,10 +21,17 @@
namespace H5 {
#endif
// Class forwarding
class Group;
class H5File;
class ArrayType;
class VarLenType;
/*! \class CommonFG
\brief \i CommonFG is an abstract base class of H5File and H5Group.
It provides common operations of H5File and H5Group.
*/
class H5_DLLCPP CommonFG {
public:
// Creates a new group at this location which can be a file

View File

@@ -14,9 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class CompType inherits from DataType and provides accesses to a compound
// datatype.
#ifndef __H5CompType_H
#define __H5CompType_H
@@ -24,6 +21,10 @@
namespace H5 {
#endif
/*! \class CompType
\brief CompType is a derivative of a DataType and operates on HDF5
compound datatypes.
*/
class H5_DLLCPP CompType : public DataType {
public:
// Default constructor

View File

@@ -37,17 +37,17 @@
* closely represent the interfaces of the HDF5 APIs, as followed:
*
* \verbatim
HDF5 C APIs C++ Classes
----------- -----------
Attribute Interface (H5A) Attribute
Datasets Interface (H5D) DataSet
Error Interface (H5E) Exception
File Interface (H5F) H5File
Group Interface (H5G) Group
Identifier Interface (H5I) IdComponent
Property List Interface (H5P) PropList and subclasses
Dataspace Interface (H5S) DataSpace
Datatype Interface (H5T) DataType and subclasses
HDF5 C APIs C++ Classes
----------- -----------
Attribute Interface (H5A) Attribute
Datasets Interface (H5D) DataSet
Error Interface (H5E) Exception
File Interface (H5F) H5File
Group Interface (H5G) Group
Identifier Interface (H5I) IdComponent
Property List Interface (H5P) PropList and subclasses
Dataspace Interface (H5S) DataSpace
Datatype Interface (H5T) DataType and subclasses
\endverbatim
* \section install_sec Installation
*

View File

@@ -14,8 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class DataSet inherits from AbstractDs and provides accesses to a dataset.
#ifndef __H5DataSet_H
#define __H5DataSet_H
@@ -23,6 +21,13 @@
namespace H5 {
#endif
/*! \class DataSet
\brief Class DataSet operates on HDF5 datasets.
An datasets has many characteristics similar to an attribute, thus both
Attribute and DataSet are derivatives of AbstractDs. DataSet also
inherits from H5Object because a dataset is an HDF5 object.
*/
class H5_DLLCPP DataSet : public H5Object, public AbstractDs {
public:
// Close this dataset.

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class DataSpace operates on HDF5 dataspaces.
class H5_DLLCPP DataSpace : public IdComponent {
public:
// Default DataSpace objects

View File

@@ -95,7 +95,7 @@ DataType::DataType( const H5T_class_t type_class, size_t size ) : H5Object()
// Function: DataType overload constructor - dereference
///\brief Given a reference, ref, to an hdf5 group, creates a
/// DataType object
///\param h5file - IN: Location referenced object is in
///\param loc - IN: Location referenced object is in
///\param ref - IN: Reference pointer
///\param ref_type - IN: Reference type - default to H5R_OBJECT
///\exception H5::ReferenceException
@@ -259,38 +259,12 @@ void DataType::p_commit(hid_t loc_id, const char* name)
// Function: DataType::commit
///\brief Commits a transient datatype to a file, creating a new
/// named datatype
///\param loc - IN: A file
///\param loc - IN: A location (file, dataset, datatype, or group)
///\param name - IN: Name of the datatype
///\exception H5::DataTypeIException
// Programmer Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DataType::commit(H5File& loc, const char* name)
{
p_commit(loc.getLocId(), name);
}
//--------------------------------------------------------------------------
// Function: DataType::commit
///\brief This is an overloaded member function, provided for convenience.
/// It differs from the above function only in the type of the
/// argument \a name.
// Programmer Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DataType::commit(H5File& loc, const H5std_string& name)
{
p_commit(loc.getLocId(), name.c_str());
}
//--------------------------------------------------------------------------
// Function: DataType::commit
///\brief Commits a transient datatype to a file, creating a new
/// named datatype
///\param loc - IN: Either a group, dataset, named datatype, or attribute.
///\param name - IN: Name of the datatype
///\exception H5::DataTypeIException
// Programmer Binh-Minh Ribler - Jan, 2007
//--------------------------------------------------------------------------
void DataType::commit(H5Object& loc, const char* name)
void DataType::commit(H5Location& loc, const char* name)
{
p_commit(loc.getId(), name);
}
@@ -302,7 +276,7 @@ void DataType::commit(H5Object& loc, const char* name)
/// argument \a name.
// Programmer Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void DataType::commit(H5Object& loc, const H5std_string& name)
void DataType::commit(H5Location& loc, const H5std_string& name)
{
p_commit(loc.getId(), name.c_str());
}

View File

@@ -14,6 +14,9 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class DataType inherits from H5Object and has several subclasses for
// specific HDF5 data types.
#ifndef __H5DataType_H
#define __H5DataType_H
@@ -21,6 +24,13 @@
namespace H5 {
#endif
/*! \class DataType
\brief Class DataType provides generic operations on HDF5 datatypes.
DataType inherits from H5Object because a named datatype is an HDF5
object and is a base class of ArrayType, AtomType, CompType, EnumType,
and VarLenType.
*/
class H5_DLLCPP DataType : public H5Object {
public:
// Creates a datatype given its class and size
@@ -47,10 +57,8 @@ class H5_DLLCPP DataType : public H5Object {
// Commits a transient datatype to a file; this datatype becomes
// a named datatype which can be accessed from the location.
void commit( H5File& loc, const char* name);
void commit( H5File& loc, const H5std_string& name);
void commit( H5Object& loc, const char* name);
void commit( H5Object& loc, const H5std_string& name);
void commit( H5Location& loc, const char* name);
void commit( H5Location& loc, const H5std_string& name);
// Determines whether this datatype is a named datatype or
// a transient datatype.

View File

@@ -14,6 +14,9 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class DSetCreatPropList represents the HDF5 dataset creation property list
// and inherits from PropList.
#ifndef __H5DSCreatPropList_H
#define __H5DSCreatPropList_H
@@ -21,6 +24,10 @@
namespace H5 {
#endif
/*! \class DSetCreatPropList
\brief Class DSetCreatPropList represents the dataset creation property
list.
*/
class H5_DLLCPP DSetCreatPropList : public PropList {
public:
// Default dataset creation property list.

View File

@@ -14,6 +14,9 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class DSetMemXferPropList represents the HDF5 dataset transfer property list
// and inherits from PropList.
#ifndef __H5DSetMemXferPropList_H
#define __H5DSetMemXferPropList_H
@@ -21,6 +24,10 @@
namespace H5 {
#endif
/*! \class DSetMemXferPropList
\brief Class DSetMemXferPropList represents the dataset memory and
transfer property list.
*/
class H5_DLLCPP DSetMemXferPropList : public PropList {
public:
static const DSetMemXferPropList DEFAULT;

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class EnumType operates on HDF5 enum datatypes.
class H5_DLLCPP EnumType : public DataType {
public:

View File

@@ -28,6 +28,11 @@ namespace H5 {
#endif
#endif
/*! \class Exception
\brief Exception provides wrappers of HDF5 error handling functions.
Many classes are derived from Exception for specific HDF5 C interfaces.
*/
class H5_DLLCPP Exception {
public:
// Creates an exception with a function name where the failure occurs

View File

@@ -14,6 +14,9 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// Class FileAccPropList represents the HDF5 file access property list and
// inherits from DataType.
#ifndef __H5FileAccPropList_H
#define __H5FileAccPropList_H
@@ -21,7 +24,7 @@
namespace H5 {
#endif
// class for file access properties
//! Class FileAccPropList represents the HDF5 file access property list.
class H5_DLLCPP FileAccPropList : public PropList {
public:
static const FileAccPropList DEFAULT;

View File

@@ -21,7 +21,7 @@
namespace H5 {
#endif
// class for file access properties
//! Class FileCreatPropList represents the HDF5 file create property list.
class H5_DLLCPP FileCreatPropList : public PropList {
public:
// Default file creation property list.

View File

@@ -273,19 +273,6 @@ void H5File::reOpen()
throw FileIException("H5File::reOpen", "H5Freopen failed");
}
//--------------------------------------------------------------------------
// Function: H5File::reopen
// Purpose: Reopens this file.
// Exception H5::FileIException
// Description
// This function is replaced by the above function reOpen.
// Programmer Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void H5File::reopen()
{
H5File::reOpen();
}
//--------------------------------------------------------------------------
// Function: H5File::getCreatePlist
///\brief Returns the creation property list of this file
@@ -505,6 +492,20 @@ hsize_t H5File::getFileSize() const
return (file_size);
}
#ifndef DOXYGEN_SHOULD_SKIP_THIS
//--------------------------------------------------------------------------
// Function: H5File::reopen
// Purpose: Reopens this file.
// Exception H5::FileIException
// Description
// This function is replaced by the above function reOpen.
// Programmer Binh-Minh Ribler - 2000
//--------------------------------------------------------------------------
void H5File::reopen()
{
H5File::reOpen();
}
//--------------------------------------------------------------------------
// Function: H5File::getLocId
// Purpose: Get the id of this file
@@ -517,6 +518,7 @@ hid_t H5File::getLocId() const
{
return( getId() );
}
#endif // DOXYGEN_SHOULD_SKIP_THIS
//--------------------------------------------------------------------------
// Function: H5File::getId

View File

@@ -21,6 +21,11 @@
namespace H5 {
#endif
/*! \class H5File
\brief Class H5File represents an HDF5 file.
It inherits from H5Location and CommonFG.
*/
class H5_DLLCPP H5File : public H5Location, public CommonFG {
public:
// Creates or opens an HDF5 file.
@@ -71,7 +76,13 @@ class H5_DLLCPP H5File : public H5Location, public CommonFG {
// Reopens this file.
void reOpen(); // added for better name
void reopen();
#ifndef DOXYGEN_SHOULD_SKIP_THIS
void reopen(); // obsolete in favor of reOpen()
// Gets the file id
virtual hid_t getLocId() const;
#endif // DOXYGEN_SHOULD_SKIP_THIS
///\brief Returns this class name.
virtual H5std_string fromClass () const { return("H5File"); }
@@ -79,9 +90,6 @@ class H5_DLLCPP H5File : public H5Location, public CommonFG {
// Throw file exception.
virtual void throwException(const H5std_string& func_name, const H5std_string& msg) const;
// Gets the file id
virtual hid_t getLocId() const;
// Default constructor
H5File();

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class FloatType operates on HDF5 floating point datatype.
class H5_DLLCPP FloatType : public AtomType {
public:
// Creates a floating-point type using a predefined type.

View File

@@ -90,7 +90,7 @@ Group::Group(const hid_t existing_id) : H5Object()
//--------------------------------------------------------------------------
// Function: Group overload constructor - dereference
///\brief Given a reference, ref, to an hdf5 group, creates a Group object
///\param obj - IN: Specifying location referenced object is in
///\param loc - IN: Specifying location referenced object is in
///\param ref - IN: Reference pointer
///\param ref_type - IN: Reference type - default to H5R_OBJECT
///\exception H5::ReferenceException

View File

@@ -21,6 +21,11 @@
namespace H5 {
#endif
/*! \class Group
\brief Class Group represents an HDF5 group.
It inherits many operations from H5Location and CommonFG.
*/
class H5_DLLCPP Group : public H5Object, public CommonFG {
public:
// Close this group.

View File

@@ -24,6 +24,13 @@ namespace H5 {
#endif
class DataSpace;
/*! \class IdComponent
\brief Class IdComponent provides wrappers of the C functions that
operate on an HDF5 identifier.
In most cases, the C library handles these operations and an application
rarely needs them.
*/
class H5_DLLCPP IdComponent {
public:
// Increment reference counter.

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class IntType operates on HDF5 integer datatype.
class H5_DLLCPP IntType : public AtomType {
public:
// Creates a integer type using a predefined type

View File

@@ -29,6 +29,12 @@ namespace H5 {
// calling H5close
#endif // DOXYGEN_SHOULD_SKIP_THIS
/*! \class H5Library
\brief Class H5Library operates the HDF5 library globably.
It is not neccessary to construct an instance of H5Library to use the
methods.
*/
class H5_DLLCPP H5Library {
public:
#ifndef DOXYGEN_SHOULD_SKIP_THIS

View File

@@ -567,7 +567,7 @@ void H5Location::p_reference(void* ref, const char* name, hid_t space_id, H5R_ty
/// \li \c H5R_DATASET_REGION - Reference is a dataset region
/// reference. (default)
///\exception H5::ReferenceException
///\notes This method is more suitable for a dataset region reference.
///\note This method is more suitable for a dataset region reference.
// Programmer Binh-Minh Ribler - May, 2004
//--------------------------------------------------------------------------
void H5Location::reference(void* ref, const char* name, const DataSpace& dataspace, H5R_type_t ref_type) const
@@ -593,7 +593,7 @@ void H5Location::reference(void* ref, const char* name, const DataSpace& dataspa
/// \li \c H5R_DATASET_REGION - Reference is a dataset region
/// reference. (default)
///\exception H5::ReferenceException
///\notes This method is more suitable for a dataset region reference.
///\note This method is more suitable for a dataset region reference.
// Programmer Binh-Minh Ribler - May, 2004
//--------------------------------------------------------------------------
void H5Location::reference(void* ref, const H5std_string& name, const DataSpace& dataspace, H5R_type_t ref_type) const
@@ -617,7 +617,7 @@ void H5Location::reference(void* ref, const H5std_string& name, const DataSpace&
/// \li \c H5R_OBJECT - Reference is an object reference (default)
/// \li \c H5R_DATASET_REGION - Reference is a dataset region
///\exception H5::ReferenceException
///\notes This method is more suitable for an object reference.
///\note This method is more suitable for an object reference.
// Programmer Binh-Minh Ribler - May, 2004
//--------------------------------------------------------------------------
void H5Location::reference(void* ref, const char* name, H5R_type_t ref_type) const
@@ -640,7 +640,7 @@ void H5Location::reference(void* ref, const char* name, H5R_type_t ref_type) con
///\param ref_type - IN: Type of reference to query, valid values are:
/// \li \c H5R_OBJECT - Reference is an object reference (default)
/// \li \c H5R_DATASET_REGION - Reference is a dataset region
///\notes This method is more suitable for an object reference.
///\note This method is more suitable for an object reference.
// Programmer Binh-Minh Ribler - May, 2004
//--------------------------------------------------------------------------
void H5Location::reference(void* ref, const H5std_string& name, H5R_type_t ref_type) const
@@ -678,7 +678,7 @@ hid_t H5Location::p_dereference(hid_t loc_id, const void* ref, H5R_type_t ref_ty
//--------------------------------------------------------------------------
// Function: H5Location::dereference
///\brief Dereferences a reference into an HDF5 object, given an HDF5 object.
///\param obj - IN: Object specifying the location of the referenced object
///\param loc - IN: Location of the referenced object
///\param ref - IN: Reference pointer
///\param ref_type - IN: Reference type
///\exception H5::ReferenceException
@@ -823,6 +823,7 @@ H5O_type_t H5Location::p_get_ref_obj_type(void *ref, H5R_type_t ref_type) const
return(obj_type);
}
#endif // DOXYGEN_SHOULD_SKIP_THIS
//--------------------------------------------------------------------------
// Function: H5Location::getRegion
@@ -858,8 +859,6 @@ DataSpace H5Location::getRegion(void *ref, H5R_type_t ref_type) const
//--------------------------------------------------------------------------
H5Location::~H5Location() {}
#endif // DOXYGEN_SHOULD_SKIP_THIS
#ifndef H5_NO_NAMESPACE
} // end namespace
#endif

View File

@@ -19,10 +19,6 @@
#include "H5Classes.h" // constains forward class declarations
// H5Location is an abstract class. It provides a collection of wrappers
// of C functions which take location IDs. Most of these were in H5Object
// but are now moved here for H5File's access.
#ifndef H5_NO_NAMESPACE
namespace H5 {
#endif
@@ -41,8 +37,15 @@ class UserData4Aiterate { // user data for attribute iteration
H5Location* location;
};
// An H5Location can be a file, group, dataset, or committed datatype.
/*! \class H5Location
\brief H5Location is an abstract base class, added in version 1.8.12.
It provides a collection of wrappers for the C functions that take a
location identifier to specify the HDF5 object. The location identifier
can be either file, group, dataset, or named datatype.
*/
// Most of these methods were in H5Object but are now moved here because
// a location can be a file, group, dataset, or named datatype. -BMR, 2013-10-1
class H5_DLLCPP H5Location : public IdComponent {
public:
// Creates an attribute for the specified object at this location
@@ -154,11 +157,11 @@ class H5_DLLCPP H5Location : public IdComponent {
// Retrieves the type of object that an object reference points to.
H5O_type_t p_get_ref_obj_type(void *ref, H5R_type_t ref_type) const;
#endif // DOXYGEN_SHOULD_SKIP_THIS
// Noop destructor.
virtual ~H5Location();
#endif // DOXYGEN_SHOULD_SKIP_THIS
}; /* end class H5Location */
#ifndef H5_NO_NAMESPACE

View File

@@ -38,6 +38,12 @@
namespace H5 {
#endif
/*! \class H5Object
\brief Class H5Object is a bridge between H5Location and DataSet, DataType,
and Group.
All the wrappers in H5Object were moved to H5Location.
*/
class H5_DLLCPP H5Object : public H5Location {
public:
#ifndef DOXYGEN_SHOULD_SKIP_THIS

View File

@@ -272,22 +272,12 @@ PredType& PredType::operator=( const PredType& rhs )
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// These dummy functions do not inherit from DataType - they'll
// throw an DataTypeIException if invoked.
void PredType::commit( H5File& loc, const char* name )
void PredType::commit(H5Location& loc, const char* name )
{
throw DataTypeIException("PredType::commit", "Error: Attempted to commit a predefined datatype. Invalid operation!" );
}
void PredType::commit( H5File& loc, const H5std_string& name )
{
commit( loc, name.c_str());
}
void PredType::commit( H5Object& loc, const char* name )
{
throw DataTypeIException("PredType::commit", "Error: Attempted to commit a predefined datatype. Invalid operation!" );
}
void PredType::commit( H5Object& loc, const H5std_string& name )
void PredType::commit(H5Location& loc, const H5std_string& name )
{
commit( loc, name.c_str());
}

View File

@@ -14,11 +14,6 @@
* access to either file, you may request a copy from help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
// PredType holds the definition of all the HDF5 predefined datatypes.
// These types can only be made copy of, not created by H5Tcreate or
// closed by H5Tclose. They are treated as constants.
/////////////////////////////////////////////////////////////////////
#ifndef __H5PredType_H
#define __H5PredType_H
@@ -36,6 +31,13 @@ namespace H5 {
#define H5CPP_EXITED -3 // -3 is less likely to be used elsewhere
#endif // DOXYGEN_SHOULD_SKIP_THIS
/*! \class PredType
\brief Class PredType holds the definition of all the HDF5 predefined
datatypes.
These types can only be made copy of, not created by H5Tcreate or
closed by H5Tclose. They are treated as constants.
*/
class H5_DLLCPP PredType : public AtomType {
public:
///\brief Returns this class name.
@@ -228,16 +230,20 @@ class H5_DLLCPP PredType : public AtomType {
static const PredType NATIVE_UINT_FAST64;
#endif /* H5_SIZEOF_UINT_FAST64_T */
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// These dummy functions do not inherit from DataType - they'll
// throw a DataTypeIException if invoked.
void commit( H5File& loc, const H5std_string& name );
void commit( H5File& loc, const char* name );
void commit( H5Object& loc, const H5std_string& name );
void commit( H5Object& loc, const char* name );
/*! \brief This dummy function do not inherit from DataType - it will
throw a DataTypeIException if invoked.
*/
void commit(H5Location& loc, const H5std_string& name );
/*! \brief This dummy function do not inherit from DataType - it will
throw a DataTypeIException if invoked.
*/
void commit(H5Location& loc, const char* name );
/*! \brief This dummy function do not inherit from DataType - it will
throw a DataTypeIException if invoked.
*/
bool committed();
#endif // DOXYGEN_SHOULD_SKIP_THIS
#ifndef DOXYGEN_SHOULD_SKIP_THIS
private:
// Added this to work around the atexit/global destructor problem.
// It'll help to terminate the library after other PredType instances
@@ -245,7 +251,6 @@ class H5_DLLCPP PredType : public AtomType {
static const PredType AtExit;
protected:
#ifndef DOXYGEN_SHOULD_SKIP_THIS
// Default constructor
PredType();

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class PropList provides operations for generic property lists.
class H5_DLLCPP PropList : public IdComponent {
public:
// Default property list

View File

@@ -21,6 +21,7 @@
namespace H5 {
#endif
//! Class StrType operates on HDF5 string datatypes.
class H5_DLLCPP StrType : public AtomType {
public:
// Creates a string type using a predefined type

View File

@@ -24,6 +24,7 @@
namespace H5 {
#endif
//! VarLenType operates on the HDF5 C's Variable-length Datatypes.
class H5_DLLCPP VarLenType : public DataType {
public:
// Constructor that creates a variable-length datatype based

View File

@@ -4,8 +4,21 @@
### T E S T I N G ###
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
ADD_TEST (
NAME cpp_testhdf5-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
tattr_basic.h5
tattr_compound.h5
tattr_dtype.h5
tattr_multi.h5
tattr_scalar.h5
tfattrs.h5
)
ADD_TEST (NAME cpp_testhdf5 COMMAND $<TARGET_FILE:cpp_testhdf5>)
SET_TESTS_PROPERTIES (cpp_testhdf5 PROPERTIES DEPENDS cpp_testhdf5-clear-objects)
IF (HDF5_TEST_VFD)
@@ -24,6 +37,17 @@ IF (HDF5_TEST_VFD)
MACRO (ADD_VFD_TEST vfdname resultcode)
IF (NOT HDF5_ENABLE_USING_MEMCHECKER)
ADD_TEST (
NAME VFD-${vfdname}-cpp_testhdf5-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
tattr_basic.h5
tattr_compound.h5
tattr_dtype.h5
tattr_multi.h5
tattr_scalar.h5
tfattrs.h5
)
ADD_TEST (
NAME VFD-${vfdname}-cpp_testhdf5
COMMAND "${CMAKE_COMMAND}"
@@ -35,6 +59,7 @@ IF (HDF5_TEST_VFD)
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF5_RESOURCES_DIR}/vfdTest.cmake"
)
SET_TESTS_PROPERTIES (VFD-${vfdname}-cpp_testhdf5 PROPERTIES DEPENDS VFD-${vfdname}-cpp_testhdf5-clear-objects)
ENDIF (NOT HDF5_ENABLE_USING_MEMCHECKER)
ENDMACRO (ADD_VFD_TEST)

View File

@@ -747,7 +747,7 @@
#cmakedefine H5_USING_MEMCHECKER @H5_USING_MEMCHECKER@
/* Version number of package */
#define VERSION "@HDF5_PACKAGE_VERSION@"
#define H5_VERSION "@HDF5_PACKAGE_VERSION@"
/* Define if vsnprintf() returns the correct value for formatted strings that
don't fit into size allowed */
@@ -774,10 +774,10 @@
#cmakedefine WORDS_BIGENDIAN @H5_WORDS_BIGENDIANR@
/* Number of bits in a file offset, on hosts where this is settable. */
#cmakedefine _FILE_OFFSET_BITS
#cmakedefine H5__FILE_OFFSET_BITS
/* Define for large files, on AIX-style hosts. */
#cmakedefine _LARGE_FILES
#cmakedefine H5__LARGE_FILES
/* Define to empty if `const' does not conform to ANSI C. */
#cmakedefine H5_const

30
configure vendored
View File

@@ -1,7 +1,7 @@
#! /bin/sh
# From configure.ac Id: configure.ac 24231 2013-09-30 21:51:26Z songyulu .
# From configure.ac Id: configure.ac 24276 2013-10-09 22:00:11Z epourmal .
# Guess values for system-dependent variables and create Makefiles.
# Generated by GNU Autoconf 2.69 for HDF5 1.8.12-snap20.
# Generated by GNU Autoconf 2.69 for HDF5 1.8.12.
#
# Report bugs to <help@hdfgroup.org>.
#
@@ -591,8 +591,8 @@ MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='HDF5'
PACKAGE_TARNAME='hdf5'
PACKAGE_VERSION='1.8.12-snap20'
PACKAGE_STRING='HDF5 1.8.12-snap20'
PACKAGE_VERSION='1.8.12'
PACKAGE_STRING='HDF5 1.8.12'
PACKAGE_BUGREPORT='help@hdfgroup.org'
PACKAGE_URL=''
@@ -1490,7 +1490,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
\`configure' configures HDF5 1.8.12-snap20 to adapt to many kinds of systems.
\`configure' configures HDF5 1.8.12 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1560,7 +1560,7 @@ fi
if test -n "$ac_init_help"; then
case $ac_init_help in
short | recursive ) echo "Configuration of HDF5 1.8.12-snap20:";;
short | recursive ) echo "Configuration of HDF5 1.8.12:";;
esac
cat <<\_ACEOF
@@ -1677,7 +1677,7 @@ Optional Packages:
--with-default-plugindir=location
Specify default location for plugins
[default="/usr/local/hdf5/lib/plugin"]
--with-default-api-version=(v16|v18|v110)
--with-default-api-version=(v16|v18)
Specify default release version of public symbols
[default=v18]
@@ -1762,7 +1762,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
HDF5 configure 1.8.12-snap20
HDF5 configure 1.8.12
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
@@ -2856,7 +2856,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
It was created by HDF5 $as_me 1.8.12-snap20, which was
It was created by HDF5 $as_me 1.8.12, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
@@ -3688,7 +3688,7 @@ fi
# Define the identity of the package.
PACKAGE='hdf5'
VERSION='1.8.12-snap20'
VERSION='1.8.12'
cat >>confdefs.h <<_ACEOF
@@ -22224,7 +22224,7 @@ fi
case "X-$enable_production" in
X-yes)
X-|X-yes)
enable_production="yes"
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: production" >&5
$as_echo "production" >&6; }
@@ -22234,7 +22234,7 @@ $as_echo "production" >&6; }
H5_CXXFLAGS="$H5_CXXFLAGS $PROD_CXXFLAGS"
H5_FCFLAGS="$H5_FCFLAGS $PROD_FCFLAGS"
;;
X-|X-no)
X-no)
enable_production="no"
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: development" >&5
$as_echo "development" >&6; }
@@ -31678,7 +31678,7 @@ Usage: $0 [OPTIONS]
Report bugs to <bug-libtool@gnu.org>."
lt_cl_version="\
HDF5 config.lt 1.8.12-snap20
HDF5 config.lt 1.8.12
configured by $0, generated by GNU Autoconf 2.69.
Copyright (C) 2011 Free Software Foundation, Inc.
@@ -33812,7 +33812,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
This file was extended by HDF5 $as_me 1.8.12-snap20, which was
This file was extended by HDF5 $as_me 1.8.12, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -33878,7 +33878,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
HDF5 config.status 1.8.12-snap20
HDF5 config.status 1.8.12
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"

View File

@@ -26,7 +26,7 @@ AC_PREREQ([2.69])
## NOTE: Do not forget to change the version number here when we do a
## release!!!
##
AC_INIT([HDF5], [1.8.12-snap20], [help@hdfgroup.org])
AC_INIT([HDF5], [1.8.12], [help@hdfgroup.org])
AC_CONFIG_SRCDIR([src/H5.c])
AC_CONFIG_HEADER([src/H5config.h])
@@ -1204,7 +1204,7 @@ AC_ARG_ENABLE([production],
[Determines how to run the compiler.])])
case "X-$enable_production" in
X-yes)
X-|X-yes)
enable_production="yes"
AC_MSG_RESULT([production])
CONFIG_MODE=production
@@ -1213,7 +1213,7 @@ case "X-$enable_production" in
H5_CXXFLAGS="$H5_CXXFLAGS $PROD_CXXFLAGS"
H5_FCFLAGS="$H5_FCFLAGS $PROD_FCFLAGS"
;;
X-|X-no)
X-no)
enable_production="no"
AC_MSG_RESULT([development])
CONFIG_MODE=development
@@ -4279,7 +4279,7 @@ esac
AC_SUBST([DEFAULT_API_VERSION])
AC_MSG_CHECKING([which version of public symbols to use by default])
AC_ARG_WITH([default-api-version],
[AS_HELP_STRING([--with-default-api-version=(v16|v18|v110)],
[AS_HELP_STRING([--with-default-api-version=(v16|v18)],
[Specify default release version of public symbols
[default=v18]])],,
withval=v18)

View File

@@ -39,6 +39,9 @@
soft_link.h5
subset.h5
unix2win.h5
blue/prefix_target.h5
red/prefix_target.h5
u2w/u2w_target.h5
)
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (EXAMPLES-clear-objects PROPERTIES DEPENDS ${last_test})

View File

@@ -5,6 +5,30 @@
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
ADD_TEST (
NAME f90_ex-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
compound.h5
copy1.h5
copy2.h5
dsetf.h5
extend.h5
FORTRAN.h5
groupf.h5
groupsf.h5
h5_cmprss.h5
mount1.h5
mount2.h5
sdsf.h5
subset.h5
)
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (f90_ex-clear-objects PROPERTIES DEPENDS ${last_test})
ENDIF (NOT "${last_test}" STREQUAL "")
SET (last_test "f90_ex-clear-objects")
FOREACH (example ${examples})
ADD_TEST (NAME f90_ex_${example} COMMAND $<TARGET_FILE:f90_ex_${example}>)
IF (NOT "${last_test}" STREQUAL "")

View File

@@ -4,5 +4,13 @@
### T E S T I N G ###
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
ADD_TEST (
NAME cpp_hl_ex_ptExampleFL-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
PTcppexampleFL.h5
)
ADD_TEST (NAME cpp_hl_ex_ptExampleFL COMMAND $<TARGET_FILE:ptExampleFL>)
SET_TESTS_PROPERTIES (cpp_hl_ex_ptExampleFL PROPERTIES DEPENDS cpp_hl_ex_ptExampleFL-clear-objects)

View File

@@ -6,7 +6,7 @@ PROJECT (HDF5_HL_EXAMPLES )
#-----------------------------------------------------------------------------
SET (examples
ex_lite1
ex_lite2
ex_lite2 #ex_lite2 PROPERTIES DEPENDS ex_lite1)
ex_lite3
ptExampleFL
ex_image1

View File

@@ -21,8 +21,40 @@ FOREACH (h5_file ${HDF5_TEST_FILES})
)
ENDFOREACH (h5_file ${HDF5_TEST_FILES})
# Remove any output file left over from previous test run
ADD_TEST (
NAME hl_ex-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
ex_lite1.h5
ex_lite2.h5
ex_lite3.h5
packet_table_FLexample.h5
ex_image1.h5
ex_image2.h5
ex_table_01.h5
ex_table_02.h5
ex_table_03.h5
ex_table_04.h5
ex_table_05.h5
ex_table_06.h5
ex_table_07.h5
ex_table_08.h5
ex_table_09.h5
ex_table_10.h5
ex_table_11.h5
ex_table_12.h5
ex_ds1.h5
)
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (hl_ex-clear-objects PROPERTIES DEPENDS ${last_test})
ENDIF (NOT "${last_test}" STREQUAL "")
SET (last_test "hl_ex-clear-objects")
FOREACH (example ${examples})
ADD_TEST (NAME hl_ex_${example} COMMAND $<TARGET_FILE:hl_ex_${example}>)
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (hl_ex_${example} PROPERTIES DEPENDS ${last_test})
ENDIF (NOT "${last_test}" STREQUAL "")
SET (last_test "hl_ex_${example}")
ENDFOREACH (example ${examples})
SET_TESTS_PROPERTIES (hl_ex_ex_lite2 PROPERTIES DEPENDS hl_ex_ex_lite1)

View File

@@ -15,8 +15,9 @@ ADD_TEST (
dsetf3.h5
dsetf4.h5
dsetf5.h5
f1image.h5
f1img.h5
f1tab.h5
tstds.h5
)
ADD_TEST (NAME hl_f90_tstds COMMAND $<TARGET_FILE:hl_f90_tstds>)

View File

@@ -47,12 +47,15 @@ ADD_TEST (
test_ds7.h5
test_ds8.h5
test_ds9.h5
test_ds10.h5
test_dectris.h5
test_image1.h5
test_image2.h5
test_image3.h5
test_lite1.h5
test_lite2.h5
test_lite3.h5
test_lite4.h5
test_packet_compress.h5
test_packet_table.h5
test_table.h5

View File

@@ -301,13 +301,13 @@ CONTENTS
Intel or PGI compilers.
4.3.6. Specifying other programs
The build system has been tuned for use with GNU make but also
works with other versions of make. If the `make' command runs a
non-GNU version but a GNU version is available under a different
name (perhaps `gmake'), then HDF5 can be configured to use it by
setting the MAKE variable. Note that whatever value is used for
MAKE must also be used as the make command when building the
library:
The build system has been tuned for use with GNU make which is
the preferred version. Other versions of make may or may not work
completely. If the `make' command runs a non-GNU version but a GNU
version is available under a different name (perhaps `gmake'), then
HDF5 can be configured to use it by setting the MAKE variable. Note
that whatever value is used for MAKE must also be used as the make
command when building the library:
$ MAKE=gmake ./configure
$ gmake
@@ -515,8 +515,8 @@ CONTENTS
$ bin/deploy NEW_DIR
This will install HDF5 in NEW_DIR. Alternately, you can do this
manually by issuing the command:
This will install HDF5 in NEW_DIR. Alternately, if shared libraries
were disabled, you can do this manually by issuing the command:
$ make install prefix=NEW_DIR
@@ -525,6 +525,11 @@ CONTENTS
NEW_DIR/bin directory. This utility will fix the h5cc, h5fc and
h5c++ scripts to reflect the new NEW_DIR location.
If shared libraries were enabled, configure must be run with
--prefix=<final install directory>. HDF5 can then be installed
directly with "make install" or using a stage directory with
"make install DESTDIR=STAGE_DIR".
The library can be used without installing it by pointing the
compiler at the `src' and 'src/.libs' directory for include files and
libraries. However, the minimum which must be installed to make

View File

@@ -35,10 +35,10 @@ Notes: This short set of instructions is written for users who want to
3. Open a command prompt at:
c:\MyHDFstuff\hdf5\build
4. Configure the C library, tools and tests with this command:
4. Configure the C library, tools, and tests with this command:
cmake -G "Visual Studio 10" -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..
3. Build the C library, tools and tests with this command:
3. Build the C library, tools, and tests with this command:
cmake --build . --config Release
4. Test the C library and tools with this command:
@@ -63,10 +63,10 @@ Notes: This short set of instructions is written for users who want to
3. Open a command prompt at:
~\MyHDFstuff\hdf5\build
4. Configure the C library, tools and tests with this command:
4. Configure the C library, tools, and tests with this command:
cmake -G "Unix Makefiles" -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..
3. Build the C library, tools and tests with this command:
3. Build the C library, tools, and tests with this command:
cmake --build . --config Release
4. Test the C library and tools with this command:
@@ -301,8 +301,8 @@ These five steps are described in detail below.
3. Build HDF5
On Windows, you can build HDF5 using either the Visual Studio Environment
or the command line. The command line can be used on all platforms;
Windows, linux, Unix, and Mac.
or the command line. The command line can be used on all platforms:
Windows, Linux, Unix, and Mac.
To build from the command line, navigate to your build directory and
execute the following:
@@ -468,13 +468,13 @@ V. User Defined Options for HDF5 Libraries with CMake
========================================================================
Support for User Defined macros and options has been added. The file
UserMacros.cmake has an example of the technique. In the folder,
config/cmake/UserMacros, is an implementation for Windows Visual Studio
UserMacros.cmake has an example of the technique. In the folder
config/cmake/UserMacros is an implementation for Windows Visual Studio
users for linking libraries to the static CRT - Windows_MT.cmake.
Copy the contents of the file, both macro and option, into the
UserMacros.cmake file. Then enable the option to the CMake configuration,
build and test process.
build, and test process.
========================================================================
VI. APPENDIX
@@ -488,7 +488,7 @@ a Windows machine by adjusting the CTEST_CMAKE_GENERATOR option in the
product specific script.
NOTE: these files are available at the HDF web site:
http://www.hdfgroup.org/xxx/cmake-help.html
http://www.hdfgroup.org/HDF5/release/cmakebuild.html
CTestScript.cmake
SZip.tar.gz

View File

@@ -5,7 +5,7 @@
Preconditions:
--------------
1. Installed Cygwin 1.7.15 or higher
1. Installed Cygwin 1.7.15 or Higher
To install the Cygwin net release, go to http://www.cygwin.com and
click on "Install or update now!" icon. This will download a GUI
@@ -32,9 +32,9 @@ Preconditions:
2.2 Using Compilers Not Supported
The compilers in 2.1 are supported and tested by HDF
group. Any other compilers may still work but they are not
guaranteed by HDF group.
The compilers in 2.1 are supported and tested by The HDF
Group. Other compilers may still work, but they are not
guaranteed by The HDF Group.
If users want to use other compilers except those in 2.1,
try to set the following variables to override the default
@@ -43,7 +43,7 @@ Preconditions:
CXX : C++ compiler command
FC : Fortran compiler command
For example, if users want to use pgf90 as fortran compiler, then
For example, if users want to use pgf90 as the Fortran compiler, then
setenv FC pgf90
@@ -54,7 +54,7 @@ Preconditions:
zlib-1.2.5 or later is supported and tested on Cygwin.
3.2 Szip
The HDF5 library has a predefined compression filter that uses
The HDF5 Library has a predefined compression filter that uses
the extended-Rice lossless compression algorithm for chunked
datatsets. For more information about Szip compression and
license terms see
@@ -62,15 +62,15 @@ Preconditions:
The latest supported public release of SZIP is available from
ftp://ftp.hdfgroup.org/lib-external/szip/2.1. Binary distribution can be
found under ftp://ftp.hdfgroup.org/lib-external/szip/2.1/bin/cygwin
found under ftp://ftp.hdfgroup.org/lib-external/szip/2.1/bin/cygwin.
Install HDF5 on Cygwin
----------------------
1. Get HDF5 source code package
Users can download HDF5 source code package from HDF website
(http://hdfgroup.org).
Users can download the HDF5 source code package from The HDF Group
website (http://hdfgroup.org).
2. Unpacking the distribution
@@ -92,7 +92,7 @@ Install HDF5 on Cygwin
2. Setup Environment
In Cygwin, most compilers and setting are automatically detected during
In Cygwin, most compilers and settings are automatically detected during
the configure script. However, if you are building Fortran we recommend
that you explicitly set the "FC" variable in your environment to use the
gfortran compiler. For example, issue the command:
@@ -102,10 +102,10 @@ Install HDF5 on Cygwin
4. Configuring
Notes: See detailed information in hdf5/release_docs/INSTALL,
part 5. Full installation instructions for source
distributions
section "5. Full Installation Instructions for Source
Sistributions."
The host configuration file for cygwin i686-pc-cygwin is located
The host configuration file for Cygwin i686-pc-cygwin is located
in the `config' directory and are based on architecture name,
vendor name, and operating system which are displayed near the
beginning of the `configure' output. The host config file influences
@@ -129,9 +129,9 @@ Install HDF5 on Cygwin
$ ./configure --with-szlib="path to szlib"
For example, if szip library was installed in the directory
/cygdrive/c/szip, which is parent directory of "include" and
"lib", then the following command will configure HDF5 C library
For example, if the szip library was installed in the directory
/cygdrive/c/szip, which is the parent directory of "include" and
"lib", then the following command will configure the HDF5 C Library
with szip enabled:
$ ./configure --with-szlib=/cygdrive/c/szip
@@ -157,7 +157,7 @@ Install HDF5 on Cygwin
Through the CPPFLAGS and LDFLAGS Variables
For example, if zlib was installed in the directory
/cygdrive/c/usr then using the following command to configure
/cygdrive/c/usr, then using the following command to configure
HDF5 with zib
$ CPPFLAGS=-I/cygdrive/c/usr/include \
@@ -179,10 +179,10 @@ Install HDF5 on Cygwin
All of the above switches can be combined together. For
example, if users want to configure HDF5 C/C++/Fortran
library with szip library enabled, with zlib library at
/cygdrive/c/usr/, and install HDF5 into directory
/cygdrive/c/hdf5 using gcc/g++ as C/C++ compiler and gfortran
as fortran compiler
library with the szip library enabled, with the zlib library at
/cygdrive/c/usr/, and install HDF5 into the directory
/cygdrive/c/hdf5 using gcc/g++ as the C/C++ compiler and gfortran
as the Fortran compiler
$ ./configure
--with-szlib=/cygdrive/c/szip
@@ -192,12 +192,11 @@ Install HDF5 on Cygwin
--enable-fortran
<"If no more switches, then hit Enter">
Notes: The command format above is for readilibity. In practice,
please type in the command above with at least one
space between each line, No "Enter" until users finish
the switches and want to run the configure.
Notes: The command format above is for readability. In practice,
please type in the command above with at least one
space between each line, and do not hit "Enter" until you
finish the switches and want to run the configure.
or do it through CPPFLAGS and LDFLAGS variables:
$ CPPFLAGS=-I/cygdrive/c/usr/include \
@@ -212,30 +211,30 @@ Install HDF5 on Cygwin
5. Make and Make Check
After configuration is done successfully, run the following series of
commands to build, test and install HDF5
After configuration has completed successfully, run the following series of
commands to build, test, and install HDF5:
$ make > "output file name"
$ make check > "output file name"
Before run "make install", check output file for "make check", there
should be no failures at all.
Before running "make install" below, check the output file for
"make check". There should be no failures at all.
6. Make Install
$ make install > "output file name"
7. Check installed HDF5 library
7. Check Installed HDF5 Library
After step 6, go to your installation directory, there should be
three subdirectories: "bin" "include" and "lib".
After step 6, go to your installation directory. There should be
three subdirectories: "bin", "include", and "lib".
8. Known Problems
dt_arith tests may fail due to the use of fork. This is a known issue
with cygwin on windows.
with Cygwin on Windows.
-----------------------------------------------------------------------
Need Further assistance, email help@hdfgroup.org
For further assistance, email help@hdfgroup.org

View File

@@ -1,61 +1,68 @@
Building and installation instructions for Alpha Open VMS
Building and Installation Instructions for Alpha Open VMS
HDF5 1.8.12 release
18 September 2013
CONTENTS
--------
CONTENTS
--------
1. Obtaining HDF5
2. Building and testing HDF5 C, Fortran and C++ libraries,
and utilities
3. Installing HDF5 libraries
4. Known problems
1. Obtaining HDF5
2. Building and Testing HDF5 C, Fortran, and C++ Libraries
and Utilities
3. Installing HDF5 Libraries
4. Known Problems
1.Obtaining HDF5
1. Obtaining HDF5
Please see INSTALL file in this directory for downloading instructions.
Please see the INSTALL file in this directory for downloading
instructions.
2. Building and testing HDF5 C, Fortran and C++ libraries and utilities.
2. Building and Testing HDF5 C, Fortran, and C++ Libraries and Utilities
IMPORTANT: This version REQUIRES GNU ZLIB library to be installed
on a system. You may download the source code from
http://www.zlib.net/
The newest ZLIB 1.2.8 doesn't work properly on OpenVMS.
But ZLIB 1.2.5 works fine.
The newest ZLIB 1.2.8 does not work properly on OpenVMS,
but ZLIB 1.2.5 works fine.
- Use tar command to untar HDF5 source ball
- Use the tar command to untar HDF5 source ball
tar -xvf <source>.tar
tar -xvf <source>.tar
- Change default directory to [.<source>.vms]
- Specify top HDF5 source directory in the build.com file where the
- Change the default directory to [.<source>.vms]
- Specify the top HDF5 source directory in the build.com file where
"hdf5top" is defined.
- Edit make.com:
specify path to ZLIB library
if necessary, modify compilation flags
Do NOT REMOVE /define=H5_VMS and /standard=strict_ansi qualifiers.
- Run build.com command file
@build
We recommend using batch queue to build and test this distribution.
The command file runs make.com and builds
C library, tests, and utilities
Fortran library and tests
C++ library and tests
It also runs test scripts to test C, Fortran and C++ libraries, and
h5dump, h5diff, h5ls, and h5repack utilities.
- Specify the path to the ZLIB library
- If necessary, modify the compilation flags
Do NOT REMOVE the /define=H5_VMS and /standard=strict_ansi qualifiers.
- Run the build.com command file
@build
We recommend using a batch queue to build and test this
distribution.
The command file runs make.com and builds the C library, tests,
and utilities, the Fortran library and tests, and the C++
library and tests. It also runs test scripts to test C, Fortran,
and C++ libraries, and the h5dump, h5diff, h5ls, and h5repack
utilities.
3. Installing HDF5 libraries
3. Installing HDF5 Libraries
Modify install.com file to specify installation directory (hdf5top)
and run
@install
to install HDF5 C, Fortran, and C++ libraries, and utilities.
The script will create the directories shown below in the
installation directory:
Modify the install.com file to specify the installation directory
(hdf5top) and run
@install
to install HDF5 C, Fortran, and C++ libraries, and utilities.
The script will create the directories shown below in the
installation directory:
[.HDF5]...
[.HDF5.INCLUDE]
@@ -70,13 +77,13 @@
to build examples against the installed libraries and to verify
the installation.
4. Known problems
4. Known Problems
- There is no support for szip compression filter.
- There is no support for the szip compression filter.
- Two soft conversion functions (H5T__conv_i_f and H5T__conv_f_i)
have bugs. They convert data between floating-point numbers and
integers. But the library's default is hard conversion. The user
should avoid explicitly enabling soft conversion between floating-point
numbers and integers.
integers, but the library's default is hard conversion. The user
should avoid explicitly enabling soft conversion between
floating-point numbers and integers.
For help contact help@hdfgroup.org
For help, contact help@hdfgroup.org.

View File

@@ -4,13 +4,13 @@
* (Full Version) *
***********************************************************************
We now recommend that users build, test and install HDF5 using CMake.
We now recommend that users build, test, and install HDF5 using CMake.
Instructions for building and testing HDF5 using CMake can be found in the
INSTALL_CMake.txt file found in this folder.
For instructions of building and testing an application with HDF5, see
USING_HDF5_CMake.txt file found in this folder.
Instructions for building and testing an application with HDF5, can be
found in the USING_HDF5_CMake.txt file found in this folder.
Users who want to build and run an application with HDF5 in Visual Studio
without using CMake should consult the USING_HDF5_VS.txt file.

View File

@@ -4,13 +4,13 @@
1. Overview
-----------
This file contains instructions for the installation of parallel HDF5 (PHDF5).
This file contains instructions for the installation of Parallel HDF5 (PHDF5).
It is assumed that you are familiar with the general installation steps as
described in the INSTALL file. Get familiar with that file before trying
the parallel HDF5 installation.
the Parallel HDF5 installation.
The remaining of this section explains the requirements to run PHDF5.
Section 2 shows quick instructions for some well know systems. Section 3
The rest of this section explains the requirements to run PHDF5.
Section 2 shows quick instructions for some well known systems. Section 3
explains the details of the installation steps. Section 4 shows some details
of running the parallel test suites.
@@ -19,12 +19,12 @@ of running the parallel test suites.
-----------------
PHDF5 requires an MPI compiler with MPI-IO support and a POSIX compliant
(Ref. 1) parallel file system. If you don't know yet, you should first consult
with your system support staff of information how to compile an MPI program,
how to run an MPI application, and how to access the parallel file system.
There are sample MPI-IO C and Fortran programs in the appendix section of
"Sample programs". You can use them to run simple tests of your MPI compilers
and the parallel file system. Also, the t_posix_compliant test in testpar
verifies if the file system is POSIX compliant.
with your system support staff for information on how to compile an MPI
program, how to run an MPI application, and how to access the parallel file
system. There are sample MPI-IO C and Fortran programs in the appendix section
of "Sample Programs". You can use them to run simple tests of your MPI
compilers and parallel file system. Also, the t_posix_compliant test in
testpar verifies if the file system is POSIX compliant.
1.2. Further Help
@@ -38,18 +38,18 @@ In your mail, please include the output of "uname -a". If you have run the
the file "config.log".
2. Quick Instruction for known systems
2. Quick Instruction for Known Systems
--------------------------------------
The following shows particular steps to run the parallel HDF5 configure for
The following shows particular steps to run the Parallel HDF5 configure for
a few machines we've tested. If your particular platform is not shown or
somehow the steps do not work for yours, please go to the next section for
more detailed explanations.
2.1. Know parallel compilers
2.1. Known Parallel Compilers
----------------------------
HDF5 knows several parallel compilers: mpicc, hcc, mpcc, mpcc_r. To build
parallel HDF5 with one of the above, just set CC as it and configure.
Parallel HDF5 with one of the above, just set CC to it and configure.
The "--enable-parallel" is optional in this case.
$ CC=/usr/local/mpi/bin/mpicc ./configure --prefix=<install-directory>
@@ -61,13 +61,13 @@ The "--enable-parallel" is optional in this case.
2.2. IBM SP
-----------
During the build stage, the H5detect is compiled and executed to generate
During the build stage, H5detect is compiled and executed to generate
the source file H5Tinit.c which is compiled as part of the HDF5 library. In
parallel mode, make sure your environment variables are set correctly to
execute a single process mpi application. Otherwise, multiple processes
attempt to write to the same H5Tinit.c file, resulting in a scrambled
source file. Unfortunately, the setting varies from machine to machine.
E.g., the following works for the IBM SP machine at LLNL.
For example, the following works for the IBM SP machine at LLNL.
setenv MP_PROCS 1
setenv MP_NODES 1
@@ -75,7 +75,7 @@ E.g., the following works for the IBM SP machine at LLNL.
setenv MP_RMPOOL 0
setenv LLNL_COMPILE_SINGLE_THREADED TRUE # for LLNL site only
The shared library configuration is problematic. So, only static library
The shared library configuration is problematic. So, only the static library
is supported.
Then do the following steps:
@@ -87,7 +87,7 @@ Then do the following steps:
$ make install
We also suggest that you add "-qxlf90=autodealloc" to FFLAGS when building
parallel with fortran enabled. This can be done by invoking:
parallel with Fortran enabled. This can be done by invoking:
setenv FFLAGS -qxlf90=autodealloc # 32 bit build
or
@@ -97,7 +97,7 @@ prior to running configure. Recall that the "-q64" is necessary for 64
bit builds.
2.3. Linux 2.4 and greater
2.3. Linux 2.4 and Greater
--------------------------
Be sure that your installation of MPICH was configured with the following
configuration command-line option:
@@ -112,9 +112,9 @@ Linux kernels 2.4 and greater.
-------------------------
Both serial and parallel HDF5 are supported in Red Storm.
2.4.1 Building serial HDF5 for Red Storm
2.4.1 Building Serial HDF5 for Red Storm
------------------------------------------
The following steps are for building the serial HDF5 for the Red Storm
The following steps are for building the Serial HDF5 for the Red Storm
compute nodes. They would probably work for other Cray XT3 systems but have
not been verified.
@@ -132,7 +132,7 @@ $ make check
# if all is well, install the binary.
$ make install
2.4.2 Building parallel HDF5 for Red Storm
2.4.2 Building Parallel HDF5 for Red Storm
------------------------------------------
The following steps are for building the Parallel HDF5 for the Red Storm
compute nodes. They would probably work for other Cray XT3 systems but have
@@ -153,11 +153,11 @@ $ make check
# if all is well, install the binary.
$ make install
2.4.3 Red Storm known problems
2.4.3 Red Storm Known Problems
------------------------------
For Red Storm, a Cray XT3 system, the yod command sometimes gives the
message, "yod allocation delayed for node recovery". This interferes with
test suites that do not expect seeing this message. To bypass this problem,
test suites that do not expect to see this message. To bypass this problem,
I launch the executables with a command shell script called "myyod" which
consists of the following lines. (You should set $RUNSERIAL and $RUNPARALLEL
to use myyod instead of yod.)
@@ -197,11 +197,11 @@ not been verified.
Obtain a copy from the HDF ftp server:
http://www.hdfgroup.org/ftp/HDF5/current/src/
(link might change, so always double check the HDF group website).
(the link might change, so always doublecheck the HDF Group website).
$ wget http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-x.x.x.tar.gz
unpack the tarball
Unpack the tarball.
$ cd hdf5-x.x.x/
$ CC=cc FC=ftn ./configure \
@@ -216,7 +216,7 @@ scripts and allocate nodes for your job. For Hopper, all the
information can be found on:
http://www.nersc.gov/systems/hopper-cray-xe6/
save the PBS script into your HDF5 build directory. The PBS script
Save the PBS script into your HDF5 build directory. The PBS script
should contain (besides the PBS node allocation requests)the
following:
@@ -234,7 +234,7 @@ make check
Once the job runs and all is well, install the binary:
$ make install
2.5.2 Hopper known issues
2.5.2 Hopper Known Issues
------------------------------
Sometimes when building the library with make, you might get this problem:
@@ -254,12 +254,12 @@ If that happens, you are probable running with make -j <x>. In that
case, you need to cleanup everything and start again as detailed above
but use serial make (do not use -j <x>).
3. Detail explanation
3. Detailed Explanation
---------------------
3.1. Installation steps (Uni/Multiple processes modes)
3.1. Installation Steps (Uni/Multiple Processes Modes)
-----------------------
During the step of configure, you must be running in the uni-process mode.
During the configure step, you must be running in the uni-process mode.
If multiple processes are doing the configure simultaneously, they will
incur errors.
@@ -267,38 +267,40 @@ In the build step (make), it depends on your make command whether it can
run correctly in multiple processes mode. If you are not sure, you should
try running it in uni-process mode.
In the test step (make check), if your system can control number of processes
running in the MPI application, you can just use "make check". But if your
system (e.g., IBM SP) has a fixed number of processes for each batch run,
you need to do the serial tests by "make check-s", requesting 1 process and
then do the parallel tests by "make check-p", requesting n processes.
In the test step (make check), if your system can control the number of
processes running in the MPI application, you can just use "make check".
But if your system (for example, IBM SP) has a fixed number of processes
for each batch run, you need to do the serial tests by "make check-s",
requesting 1 process and then do the parallel tests by "make check-p",
requesting n processes.
Lastly, "make install" should be run in the uni-process mode.
3.2. Configure details
3.2. Configure Details
----------------------
The HDF5 library can be configured to use MPI and MPI-IO for parallelism on
The HDF5 Library can be configured to use MPI and MPI-IO for parallelism on
a distributed multi-processor system. The easiest way to do this is to have
a properly installed parallel compiler (e.g., MPICH's mpicc or IBM's mpcc_r)
and supply the compiler name as the value of the CC environment variable.
For examples,
a properly installed parallel compiler (for example, MPICH's mpicc or
IBM's mpcc_r) and supply the compiler name as the value of the CC
environment variable. For example,
$ CC=mpcc_r ./configure
$ CC=/usr/local/mpi/bin/mpicc ./configure
If no such a compiler command is available then you must use your normal
C compiler along with the location(s) of MPI/MPI-IO files to be used.
If no such compiler command is available then you must use your normal
C compiler along with the location(s) of the MPI/MPI-IO files to be used.
For example,
$ CPPFLAGS=-I/usr/local/mpi/include \
LDFLAGS=-L/usr/local/mpi/lib/LINUX/ch_p4 \
./configure --enable-parallel=mpich
If a parallel library is being built then configure attempts to determine how
If a parallel library is being built, then configure attempts to determine how
to run a parallel application on one processor and on many processors. If the
compiler is `mpicc' and the user hasn't specified values for RUNSERIAL and
RUNPARALLEL then configure chooses `mpiexec' from the same directory as `mpicc':
compiler is `mpicc' and the user has not specified values for RUNSERIAL and
RUNPARALLEL, then configure chooses `mpiexec' from the same directory as
`mpicc':
RUNSERIAL: /usr/local/mpi/bin/mpiexec -np 1
RUNPARALLEL: /usr/local/mpi/bin/mpiexec -np $${NPROCS:=6}
@@ -307,45 +309,45 @@ The `$${NPROCS:=6}' will be substituted with the value of the NPROCS
environment variable at the time `make check' is run (or the value 6).
4. Parallel test suite
4. Parallel Test Suite
----------------------
The testpar/ directory contains tests for Parallel HDF5 and MPI-IO. Here are
some notes about some of the tests.
The t_mpi tests the basic functionalities of some MPI-IO features used by
t_mpi tests the basic functionalities of some MPI-IO features used by
Parallel HDF5. It usually exits with non-zero code if a required MPI-IO
feature does not succeed as expected. One exception is the testing of
accessing files larger than 2GB. If the underlying filesystem or if the
accessing files larger than 2GB. If the underlying file system or the
MPI-IO library fails to handle file sizes larger than 2GB, the test will
print informational messages stating the failure but will not exit with
non-zero code. Failure to support file size greater than 2GB is not a fatal
error for HDF5 because HDF5 can use other file-drivers such as families of
error for HDF5 because HDF5 can use other file drivers such as families of
files to bypass the file size limit.
The t_posix_compliant tests if the file system is POSIX compliant when POSIX
t_posix_compliant tests if the file system is POSIX compliant when POSIX
and MPI IO APIs are used. This is for information only and it always exits
with 0 even when non-compliance errors have occurred. This is to prevent
the test from aborting the remaining parallel HDF5 tests unnecessarily.
the test from aborting the remaining Parallel HDF5 tests unnecessarily.
The t_cache does many small sized I/O requests and may not run well in a
t_cache does many small sized I/O requests and may not run well in a
slow file system such as NFS disk. If it takes a long time to run it, try
set the environment variable $HDF5_PARAPREFIX to a file system more suitable
for MPI-IO requests before running t_cache.
setting the environment variable $HDF5_PARAPREFIX to a file system more
suitable for MPI-IO requests before running t_cache.
By default, the parallel tests use the current directory as the test directory.
This can be changed by the environment variable $HDF5_PARAPREFIX. For example,
This can be changed by the environment variable $HDF5_PARAPREFIX. For example,
if the tests should use directory /PFS/user/me, do
HDF5_PARAPREFIX=/PFS/user/me
export HDF5_PARAPREFIX
make check
(In some batch job system, you many need to hardset HDF5_PARAPREFIX in the
(In some batch job systems, you many need to hardset HDF5_PARAPREFIX in the
shell initial files like .profile, .cshrc, etc.)
Reference
---------
1. POSIX Compliant. A good explanation is by Donald Lewin,
1. POSIX Compliant. A good explanation is by Donald Lewin.
After a write() to a regular file has successfully returned, any
successful read() from each byte position on the file that was modified
by that write() will return the date that was written by the write(). A
@@ -357,11 +359,11 @@ Reference
& Associates.
Appendix A. Sample programs
Appendix A. Sample Programs
---------------------------
Here are sample MPI-IO C and Fortran programs. You may use them to run simple
tests of your MPI compilers and the parallel file system. The MPI commands
used here are mpicc, mpif90 and mpiexec. Replace them with the commands of
used here are mpicc, mpif90, and mpiexec. Replace them with the commands of
your system.
The programs assume they run in the parallel file system. Thus they create
@@ -380,8 +382,8 @@ Example compiling and running:
==> Sample_mpio.c <==
/* Simple MPI-IO program testing if a parallel file can be created.
* Default filename can be specified via first program argument.
* Each process writes something, then reads all data back.
* Default filename can be specified via the first program argument.
* Each process writes something and then reads all data back.
*/
#include <mpi.h>
@@ -498,7 +500,7 @@ main(int ac, char **av)
! file using MPI-IO calls.
!
! USE MPI is the proper way to bring in MPI definitions but many
! MPI Fortran compiler supports the pseudo standard of INCLUDE.
! MPI Fortran compilers support the pseudo standard of INCLUDE.
! So, HDF5 uses the INCLUDE statement instead.
!

View File

@@ -1,12 +1,12 @@
HDF5 version 1.8.12-snap20 currently under development
HDF5 version 1.8.12 released on 2013-11-21
================================================================================
INTRODUCTION
============
This document describes the differences between HDF5-1.8.11 and
HDF5-1.8.12-*, and contains information on the platforms tested and
known problems in HDF5-1.8.12-*.
HDF5-1.8.12, and contains information on the platforms tested and
known problems in HDF5-1.8.12.
For more details, see the files HISTORY-1_0-1_8_0_rc3.txt
and HISTORY-1_8.txt in the release_docs/ directory of the HDF5 source.
@@ -57,11 +57,11 @@ New Features
Configuration
-------------
- New configuration option added to change the default plugin path.
configure option is --with-default-plugin=location
cmake option is -DH5_DEFAULT_PLUGINDIR:PATH=location
- Added a configuration option to change the default plugin path.
The configure option is --with-default-plugindir=location.
The cmake option is -DH5_DEFAULT_PLUGINDIR:PATH=location.
HDFFV-8513. (ADB 2013/09/04)
- Rename FFLAGS to FCFLAGS in configure (ADB 2013/08/13)
- Renamed FFLAGS to FCFLAGS in configure. (ADB 2013/08/13)
- CMake can now package a compressed examples file, the default for
Windows binaries from HDF Group. (ADB - 2013/07/22)
@@ -75,10 +75,13 @@ New Features
Tools
-----
- h5dump: Added option -N --any_path, which searches the file for paths that
match the search path. HDFFV-7989 (ADB - 2013/08/12).
- h5dump: Added optional arg 0 to -A, which excludes attributes from display.
HDFFV-8134 (ADB - 2013/08/01).
- h5repack: Added the ability to use plugin filters to read and write
files. The option uses the filter number. HDFFV-8345
(ADB - 2013/09/04).
- h5dump: Added the option -N --any_path, which searches the file for
paths that match the search path. HDFFV-7989 (ADB - 2013/08/12).
- h5dump: Added the optional arg 0 to -A, which excludes attributes
from display. HDFFV-8134 (ADB - 2013/08/01).
High-Level APIs
---------------
@@ -90,7 +93,7 @@ New Features
C++ API
-------
- Added tutorial examples to c++/examples. They can be installed by
- Added tutorial examples to C++/examples. They can be installed by
"make install-examples" and, in the installed directory, they can be
executed by running the script file run-c++-ex.sh. (BMR - 2013/09/28)
- A new class, H5::H5Location, is added to represent the location concept
@@ -118,17 +121,22 @@ Bug Fixes since HDF5-1.8.11
Configuration
-------------
- Modified H5detect.c to scan floating point types for padding bits before
analyzing the type further. This should fix problems with gcc 4.8
analyzing the type further. This should fix problems with gcc 4.8.
(NAF - 2013/09/19 - HDFFV-8523/HDFFV-8500)
- HDF5 rpaths are no longer encoded in the library files when configured
with --disable-sharedlib-rpath. (LRK-2013-09-23 - HDFFV-8276)
CMake
-----
- Fixed installation problem for Mac OS X reported on FORUM.
(ADB - 2013/11/20)
Library
-------
- Added const qualifier to source buffer parameters in H5Dgather and
H5D_scatter_func_t (H5Dscatter callback). (NAF - 2013/7/09)
- CMake can now creates *.so.{lt_version} files with the same version as
- CMake now creates *.so.{lt_version} files with the same version as
configure. (ADB - 2013/06/05 HDFFV-8437)
Parallel Library
@@ -141,11 +149,10 @@ Bug Fixes since HDF5-1.8.11
Tools
-----
- h5repack: Added ability to use plugin filters. HDFFV-8345 (ADB - 2013/09/04).
- h5dump: Added option -N --any_path, which searches the file for paths that
match the search path. HDFFV-7989 (ADB - 2013/08/12).
- h5dump: Added optional arg 0 to -A, which excludes attributes from display.
HDFFV-8134 (ADB - 2013/08/01).
- h5dump: Added the option -N --any_path, which searches the file for
paths that match the search path. HDFFV-7989 (ADB - 2013/08/12).
- h5dump: Added the optional arg 0 to -A, which excludes attributes
from display. HDFFV-8134 (ADB - 2013/08/01).
- h5dump correctly exports subsetted data to a file, using the --output
option. (ADB - 2013/06/07 HDFFV-8447)
- h5cc and other compile scripts now default to linking shared libraries
@@ -173,7 +180,7 @@ Bug Fixes since HDF5-1.8.11
- test/big sometimes failed with the message of "file selection+offset not
within extent". This has been fixed. (AKC - 2013/09/28 HDFFV-8271).
- tools/h5diff/testh5diff.sh is run in every "make check", even after it
has passed in the previous run. It should not run again if there is no
has passed in the previous run. It should not run again if there are no
code changes. Fixed. (AKC - 2013/07/19 HDFFV-8392)
Supported Platforms
@@ -253,11 +260,11 @@ They are built with the configure process unless specified otherwise.
(duck) Intel icc/icpc/ifort version 13.0.3
Mac OS X Mountain Lion 10.8.1 Apple clang/clang++ version 4.2 from Xcode 4.6.1
64-bit gfortran GNU Fortran (GCC) 4.6.2
64-bit gfortran GNU Fortran (GCC) 4.6.2
(wren) Intel icc/icpc/ifort version 13.0.3
OpenVMS IA64 V8.4 HP C V7.3-018
(Boeing) HP Fortran V8.2-104939-50H96
HP Fortran V8.2-104939-50H96
HP C++ V7.4-004
Tested Configuration Features Summary
@@ -375,6 +382,36 @@ The following platforms are not supported but have been tested for this release.
Known Problems
==============
* Several HDF5 command-line tools and tests leave behind generated files
that are not cleaned up with "make clean" or "make distclean" when software
is built in place. The issue will be addressed in the 1.8.13 release. We
recommend to use build directory to compile and test HDF5 as described
in the INSTALL file, section 4.2.
* Source directory names with spaces in them will cause failures in configure
or make on Mac (HDFFV-8152), Linux, and probably all other platforms. If a
configure command with a space is run from a build directory, it will exit
with an error message: "checking whether build environment is sane...
configure: error: unsafe srcdir value: '/scr/lrknox/hdf5 v1.8.12'". If
configure is run inside or below the directory with the space in the name,
libtool will get the directory path from the system, put the part of the
path before the space in the libdir variable in .../src/libhdf5.la, and
then fail to find the nonexistent directory. This is a known libtool issue
and the suggested workaround is to rename the directory without spaces.
(LRK - 2013/10/22)
* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv
catches some undefined behavior in the alignment algorithm of the macro
DETECT_I in H5detect.c (HDFFV-8147). This issue will be addressed in the
next release. (SLU - 2013/10/16)
* Running make check for the tools can fail in the tools tests if make was not
run prior. The tests for the tools use other tools in the tests, therefore
all the tools should be built before testing the tools. (ADB - 2013/10/09)
* Make provided by Solaris fails in "make check". Solaris users should use
gmake to build and install HDF5 software. (AKC - 2013/10/08 - HDFFV-8534)
* On OpenVMS, two soft conversion functions (H5T__conv_i_f and H5T__conv_f_i)
have bugs. They convert data between floating-point numbers and integers.
But the library's default is hard conversion. The user should avoid
@@ -439,14 +476,14 @@ Known Problems
* On hopper, the build failed when RUNSERIAL and RUNPARALLEL are set
to aprun -np X, because the H5lib_settings.c file was not generated
properly. Not setting those environment variables works, because
configure was able to automatically detect that it's a Cray system
configure was able to automatically detect that it is a Cray system
and used the proper launch commands when necessary.
(MSC - 2012/04/18)
* The data conversion test dt_arith.c fails in "long double" to integer
conversion on Ubuntu 11.10 (3.0.0.13 kernel) with GCC 4.6.1 if the library
is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernal
(3.2.2 on Fedora) doesn't have the problem. Users should lower the
is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernel
(3.2.2 on Fedora) do not have the problem. Users should lower the
optimization level (-O1 or -O0) by defining CFLAGS in the command line of
"configure" like:

View File

@@ -78,7 +78,7 @@ These steps are described in more detail below.
1. Run CMake
The visual CMake executable is named "cmake-gui.exe" on Windows and should be
available in your Start menu. For Linux, UNIX, and Mac users the
available in your Start menu. For Linux, UNIX, and Mac users, the
executable is named "cmake-gui" and can be found where CMake was
installed.
@@ -220,7 +220,7 @@ Windows should adjust the forward slash to double backslashes, except for
the HDF_DIR environment variable.
NOTE: these files are available at the HDF web site:
http://www.hdfgroup.org/xxx/cmake-help.html
http://www.hdfgroup.org/HDF5/release/cmakebuild.html
CTestScript.cmake

View File

@@ -13,17 +13,17 @@ The following two sections are helpful if you do not use CMake to build
your applications.
========================================================================
Using Visual Studio 2010 with HDF5 Libraries built with Visual Studio 2010
Using Visual Studio 2010 with HDF5 Libraries Built with Visual Studio 2010
========================================================================
1. Set up path for external libraries and headers
1. Set up the path for external libraries and headers
The path settings will need to be in project property sheets per project.
Go to "Project" and select "Properties", find "Configuration Properties",
and then "VC++ Directories".
The path settings will need to be in the project property sheets
per project. Go to "Project" and select "Properties", find
"Configuration Properties", and then "VC++ Directories".
1.1 If you are building on 64-bit Windows, find the "Platform" dropdown
and select "x64".
and select "x64".
1.2 Add the header path to the "Include Directories" setting.
@@ -32,17 +32,17 @@ Using Visual Studio 2010 with HDF5 Libraries built with Visual Studio 2010
1.4 Select Linker->Input and beginning with the
"Additional Dependencies" line, enter the library names. The
external libraries should be listed first, followed by the HDF5
library, and then optionally the HDF5 High Level, Fortran or C++
library, and then optionally the HDF5 High Level, Fortran, or C++
libraries. For example, to compile a C++ application, enter:
szip.lib zlib.lib hdf5.lib hdf5_cpp.lib
==========================================================================
Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
Using Visual Studio 2008 with HDF5 Libraries Built with Visual Studio 2008
==========================================================================
2. Set up path for external libraries and headers
2. Set up the path for external libraries and headers
Invoke Microsoft Visual Studio and go to "Tools" and select "Options",
find "Projects", and then "VC++ Directories".
@@ -51,20 +51,22 @@ Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
and select "x64".
2.2 Find the box "Show directories for", choose "Include files", add the
header path (i.e. c:\Program Files\HDF_Group\HDF5\1.8.x\include)
to the included directories.
header path (in other words,
c:\Program Files\HDF_Group\HDF5\1.8.x\include) to the included
directories.
2.3 Find the box "Show directories for", choose "Library files", add the
library path (i.e. c:\Program Files\HDF_Group\HDF5\1.8.x\lib)
to the library directories.
library path (in other words,
c:\Program Files\HDF_Group\HDF5\1.8.x\lib) to the library
directories.
2.4 If using Fortran libraries, you will also need to setup the path
2.4 If using the Fortran libraries, you will also need to setup the path
for the Intel Fortran compiler.
2.5 Select Project->Properties->Linker->Input and beginning with the
"Additional Dependencies" line, enter the library names. The
external libraries should be listed first, followed by the HDF5
library, and then optionally the HDF5 High Level, Fortran or C++
library, and then optionally the HDF5 High Level, Fortran, or C++
libraries. For example, to compile a C++ application, enter:
szip.lib zlib.lib hdf5.lib hdf5_cpp.lib
@@ -75,14 +77,14 @@ Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
3.1 FAQ
Many other common questions and hints are located online and being updated
in the HDF5 FAQ. For Windows-specific questions, please see:
Many common questions and hints are located online in the HDF5 FAQ.
For Windows-specific questions, please see:
http://www.hdfgroup.org/HDF5/faq/windows.html
For all other general questions, you can look in the general FAQ:
For all other general questions, look in the general FAQ:
http://hdfgroup.org/HDF5-FAQ.html
************************************************************************
Please send email to help@hdfgroup.org for further assistance.
Please send email to help@hdfgroup.org for further assistance.

View File

@@ -547,7 +547,6 @@ H5_DLL herr_t H5F_get_vfd_handle(const H5F_t *file, hid_t fapl,
H5_DLL hbool_t H5F_is_mount(const H5F_t *file);
H5_DLL hbool_t H5F_has_mount(const H5F_t *file);
H5_DLL herr_t H5F_traverse_mount(struct H5O_loc_t *oloc/*in,out*/);
H5_DLL herr_t H5F_flush_mounts(H5F_t *f, hid_t dxpl_id);
/* Functions that operate on blocks of bytes wrt super block */
H5_DLL herr_t H5F_block_read(const H5F_t *f, H5FD_mem_t type, haddr_t addr,

View File

@@ -76,9 +76,9 @@ extern "C" {
#define H5_VERS_MAJOR 1 /* For major interface/format changes */
#define H5_VERS_MINOR 8 /* For minor interface/format changes */
#define H5_VERS_RELEASE 12 /* For tweaks, bug-fixes, or development */
#define H5_VERS_SUBRELEASE "snap20" /* For pre-releases like snap0 */
#define H5_VERS_SUBRELEASE "" /* For pre-releases like snap0 */
/* Empty string for real releases. */
#define H5_VERS_INFO "HDF5 library version: 1.8.12-snap20" /* Full version string */
#define H5_VERS_INFO "HDF5 library version: 1.8.12" /* Full version string */
#define H5check() H5check_version(H5_VERS_MAJOR,H5_VERS_MINOR, \
H5_VERS_RELEASE)

View File

@@ -100,6 +100,7 @@ ADD_TEST (
COMMAND ${CMAKE_COMMAND}
-E remove
coord.h5
dtypes10.h5
sys_file1
tattr.h5
tfile1.h5
@@ -107,6 +108,8 @@ ADD_TEST (
tfile3.h5
tfile4.h5
tfile5.h5
tfile6.h5
tfile7.h5
th5o_file
th5s1.h5
tselect.h5
@@ -381,7 +384,7 @@ IF (HDF5_TEST_VFD)
objcopy
links
unlink
big
# big
mtime
fillval
mount
@@ -408,7 +411,10 @@ IF (HDF5_TEST_VFD)
testmeta
links_env
unregister
)
)
IF (NOT CYGWIN)
SET (H5_VFD_TESTS "${H5_VFD_TESTS} big")
ENDIF (NOT CYGWIN)
IF (DIRECT_VFD)
SET (VFD_LIST ${VFD_LIST} direct)

View File

@@ -130,7 +130,7 @@ CHECK_CLEANFILES+=accum.h5 cmpd_dset.h5 compact_dataset.h5 dataset.h5 dset_offse
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 frspace.h5 links*.h5 \
sys_file1 tfile[1-6].h5 th5s[1-4].h5 lheap.h5 fheap.h5 ohdr.h5 \
sys_file1 tfile[1-7].h5 th5s[1-4].h5 lheap.h5 fheap.h5 ohdr.h5 \
stab.h5 extern_[1-3].h5 extern_[1-4][ab].raw gheap[0-4].h5 \
dt_arith[1-2] links.h5 links[0-6]*.h5 extlinks[0-15].h5 tmp \
big.data big[0-9][0-9][0-9][0-9][0-9].h5 \

View File

@@ -827,7 +827,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog accum.h5 cmpd_dset.h5 \
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 \
frspace.h5 links*.h5 sys_file1 tfile[1-6].h5 th5s[1-4].h5 \
frspace.h5 links*.h5 sys_file1 tfile[1-7].h5 th5s[1-4].h5 \
lheap.h5 fheap.h5 ohdr.h5 stab.h5 extern_[1-3].h5 \
extern_[1-4][ab].raw gheap[0-4].h5 dt_arith[1-2] links.h5 \
links[0-6]*.h5 extlinks[0-15].h5 tmp big.data \

View File

@@ -7063,7 +7063,7 @@ test_utf_ascii_conv(void)
char *ascii_r = NULL;
const char *ascii_w = "bar!";
char *utf8_r = NULL;
char filename[1024];
char ascii2[4], utf8_2[4];
herr_t status;
@@ -7096,7 +7096,8 @@ test_utf_ascii_conv(void)
FAIL_STACK_ERROR
/* Create a file */
if((fid = H5Fcreate(FILENAME[10], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
h5_fixname(FILENAME[10], H5P_DEFAULT, filename, sizeof filename);
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
/* Create a scalar dataspace for the dataset */
if((sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR

View File

@@ -89,5 +89,6 @@ else
fi
# Clean up temporary files/directories and leave
$RM $PLUGIN_LIBDIR2
$RM $PLUGIN_LIBDIR1 $PLUGIN_LIBDIR2
exit $exit_code

View File

@@ -67,6 +67,8 @@ H5DIFF_BIN=`pwd`/../h5diff/$H5DIFF # The path of the h5diff tool binary
H5LS=h5ls # The h5ls tool name
H5LS_ARGS=-Svr # Arguments to the h5ls tool
H5LS_BIN=`pwd`/../h5ls/$H5LS # The path of the h5ls tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -121,6 +123,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a "SKIP" message
SKIP() {
@@ -578,6 +593,8 @@ COPY_REFERENCES
COPY_EXT_LINKS
TEST_MISC
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."

View File

@@ -38,6 +38,7 @@ EXIT_FAILURE=1
H5DIFF=h5diff # The tool name
H5DIFF_BIN=`pwd`/$H5DIFF # The path of the tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -336,6 +337,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Parse option
# -p run ph5diff tests
# -h print help page
@@ -1096,6 +1110,9 @@ TOOLTEST h5diff_646.txt -v --use-system-epsilon -p 0.05 h5diff_basic1.h5 h5diff_
# # END
# ##############################################################################
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."
exit $EXIT_SUCCESS

View File

@@ -30,7 +30,7 @@ H5DIFF_BIN=`pwd`/$H5DIFF # The path of the h5diff tool binary
H5IMPORT=../h5import/h5import # The h5import tool name
H5IMPORT_BIN=`pwd`/$H5IMPORT # The path of the h5import tool binary
RM='rm -rf'
CMP='cmp'
DIFF='diff -c'
CP='cp'
@@ -394,6 +394,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
#
@@ -1255,6 +1268,8 @@ TOOLTEST5 filter_fail.ddl --enable-error-stack filter_fail.h5
# test for -o -y for dataset with attributes
TOOLTEST2 tall-6.exp --enable-error-stack -y -o tall-6.txt -d /g1/g1.1/dset1.1.1 tall.h5
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
# Report test results and exit
if test $nerrors -eq 0 ; then

View File

@@ -38,7 +38,7 @@ H5DIFF_BIN=`pwd`/$H5DIFF # The path of the h5diff tool binary
H5IMPORT=../h5import/h5import # The h5import tool name
H5IMPORT_BIN=`pwd`/$H5IMPORT # The path of the h5import tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -189,6 +189,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
#
@@ -576,6 +589,8 @@ TOOLTEST tpbitsLongLengthExceeded.ddl --enable-error-stack -d /DS32BITS -M 26,7
# Incomplete pair of packed bits request.
TOOLTEST tpbitsIncomplete.ddl --enable-error-stack -d /DS08BITS -M 0,2,2,1,0,2,2, packedbits.h5
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
# Report test results and exit
if test $nerrors -eq 0 ; then

View File

@@ -26,6 +26,7 @@ H5_LONE_COLON="@H5_LONE_COLON@"
DUMPER=h5dump # The tool name
DUMPER_BIN=`pwd`/$DUMPER # The path of the tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -217,6 +218,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
#
@@ -372,6 +386,8 @@ TOOLTEST torderattr4.h5.xml --xml -H --sort_by=creation_order --sort_order=desce
# tests for floating point user defined printf format
TOOLTEST tfpformat.h5.xml -u -m %.7f tfpformat.h5
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."

View File

@@ -105,7 +105,7 @@
IF (HDF5_ENABLE_USING_MEMCHECKER)
ADD_TEST (NAME H5IMPORT-${testname} COMMAND $<TARGET_FILE:h5import> ${importfile} -c ${conffile} -o ${testfile})
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (H5IMPORT-${testname} PROPERTIES DEPENDS ${last_test})
SET_TESTS_PROPERTIES (H5IMPORT-${testname} PROPERTIES DEPENDS H5IMPORT-h5importtest)
ENDIF (NOT "${last_test}" STREQUAL "")
ELSE (HDF5_ENABLE_USING_MEMCHECKER)
ADD_TEST (
@@ -118,6 +118,7 @@
${testfile}.out
${testfile}.out.err
)
SET_TESTS_PROPERTIES (H5IMPORT-${testname}-clear-objects PROPERTIES DEPENDS H5IMPORT-h5importtest)
ADD_TEST (NAME H5IMPORT-${testname} COMMAND $<TARGET_FILE:h5import> ${importfile} -c ${conffile} -o ${testfile})
SET_TESTS_PROPERTIES (H5IMPORT-${testname} PROPERTIES DEPENDS H5IMPORT-${testname}-clear-objects)
@@ -167,6 +168,7 @@
d${testfile}.dff
d${testfile}.dff.err
)
SET_TESTS_PROPERTIES (H5IMPORT-DUMP-${testname}-clear-objects PROPERTIES DEPENDS H5IMPORT-h5importtest)
IF ("${ARGN}" STREQUAL "BINARY")
ADD_TEST (
@@ -407,10 +409,25 @@
SET (last_test "H5IMPORT-clear-objects")
ENDIF (HDF5_ENABLE_USING_MEMCHECKER)
ADD_TEST (NAME H5IMPORT-h5importtest COMMAND $<TARGET_FILE:h5importtest>)
ADD_TEST (
NAME H5IMPORT-h5importtest-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
binfp64.bin
binin8.bin
binin8w.bin
binin16.bin
binin32.bin
binuin16.bin
binuin32.bin
)
IF (NOT "${last_test}" STREQUAL "")
SET_TESTS_PROPERTIES (H5IMPORT-h5importtest PROPERTIES DEPENDS ${last_test})
SET_TESTS_PROPERTIES (H5IMPORT-h5importtest-clear-objects PROPERTIES DEPENDS ${last_test})
ENDIF (NOT "${last_test}" STREQUAL "")
SET (last_test "H5IMPORT-clear-objects")
ADD_TEST (NAME H5IMPORT-h5importtest COMMAND $<TARGET_FILE:h5importtest>)
SET_TESTS_PROPERTIES (H5IMPORT-h5importtest PROPERTIES DEPENDS H5IMPORT-h5importtest-clear-objects)
# ----- TESTING "ASCII I32 rank 3 - Output BE " ;
ADD_H5_TEST (ASCII_I32 testfiles/txtin32.txt testfiles/txtin32.conf txtin32.h5)

View File

@@ -40,4 +40,7 @@ h5import_LDFLAGS = $(LT_STATIC_EXEC) $(AM_LDFLAGS)
# All programs depend on the main hdf5 library and the tools library
LDADD=$(LIBH5TOOLS) $(LIBHDF5)
# Temporary files from h5importtest
CHECK_CLEANFILES+=*.bin
include $(top_srcdir)/config/conclude.am

View File

@@ -427,7 +427,9 @@ TRACE = perl $(top_srcdir)/bin/trace
# .chkexe files are used to mark tests that have run successfully.
# .chklog files are output from those tests.
# *.clog are from the MPE option.
CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Temporary files from h5importtest
CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.bin
# Include src and tools/lib directories
INCLUDES = -I$(top_srcdir)/src -I$(top_srcdir)/tools/lib

View File

@@ -25,6 +25,7 @@ TESTNAME=h5import
EXIT_SUCCESS=0
EXIT_FAILURE=1
RM='rm -rf'
CP='cp'
DIRNAME='dirname'
LS='ls'
@@ -147,6 +148,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
TESTING() {
SPACES=" "
echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
@@ -350,6 +364,9 @@ else
nerrors="` expr $nerrors + 1 `";
fi
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
#
# Check errors result
if test $nerrors -eq 0 ; then

View File

@@ -34,6 +34,7 @@ UNJAM=h5unjam # Tool to test
JAM_BIN="$RUNSERIAL "`pwd` # The path of the jam binary
UNJAM_BIN=`pwd` # The path of the jam binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
AWK='awk'
@@ -122,6 +123,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
#
@@ -675,7 +689,8 @@ UNJAMTEST taj3.h5 --delete tac3.h5
CHECKFILE $TESTDIR/tall.h5 tac3.h5
CLEANUP tac3.h5 taj3.h5
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."

View File

@@ -24,6 +24,7 @@ EXIT_FAILURE=1
H5LS=h5ls # The tool name
H5LS_BIN=`pwd`/$H5LS # The path of the tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -189,6 +190,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
TESTING() {
@@ -404,6 +418,8 @@ else
TOOLTEST tdataregbe.ls 0 -v tdatareg.h5
fi
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."

View File

@@ -42,6 +42,7 @@ H5DIFF_BIN=`pwd`/$H5DIFF # The path of the h5diff tool binary
H5DUMP=../h5dump/h5dump # The h5dump tool name
H5DUMP_BIN=`pwd`/$H5DUMP # The path of the h5dump tool binary
RM='rm -rf'
GREP='grep'
CP='cp'
DIRNAME='dirname'
@@ -161,6 +162,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a $* message left justified in a field of 70 characters
#
MESSAGE() {
@@ -1088,6 +1102,9 @@ TOOLTEST HDFFV-7840 h5diff_attr1.h5
TOOLTEST_META meta_short h5repack_layout.h5 -M 8192
TOOLTEST_META meta_long h5repack_layout.h5 --metadata_block_size=8192
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."
exit $EXIT_SUCCESS

View File

@@ -35,7 +35,7 @@ TEST_NAME=ud_plugin
FROM_DIR=`pwd`/.libs
PLUGIN_LIB="$FROM_DIR/libdynlibadd.*"
PLUGIN_LIBDIR=testdir3
RM="rm -rf"
RM='rm -rf'
GREP='grep'
CP='cp'
@@ -118,6 +118,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a $* message left justified in a field of 70 characters
#
MESSAGE() {
@@ -233,6 +246,10 @@ else
exit_code=$EXIT_SUCCESS
fi
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
# Clean up temporary files/directories and leave
$RM $PLUGIN_LIBDIR
exit $exit_code

View File

@@ -37,6 +37,7 @@ EXIT_FAILURE=1
STAT=h5stat # The tool name
STAT_BIN=`pwd`/$STAT # The path of the tool binary
RM='rm -rf'
CMP='cmp -s'
DIFF='diff -c'
CP='cp'
@@ -147,6 +148,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
#
@@ -294,6 +308,9 @@ TOOLTEST h5stat_numattrs3.ddl -A --numattrs=25 h5stat_threshold.h5
TOOLTEST h5stat_numattrs4.ddl -A -a 100 h5stat_newgrat.h5
#
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."
exit $EXIT_SUCCESS

View File

@@ -31,6 +31,7 @@ H5LS=h5ls # The h5ls tool name
H5LS_ARGS=-vr # Arguments to the h5ls tool
H5LS_BIN=`pwd`/../h5ls/$H5LS # The path of the h5ls tool binary
RM='rm -rf'
CMP='cmp'
DIFF='diff -c'
CP='cp'
@@ -111,6 +112,19 @@ COPY_TESTFILES_TO_TESTDIR()
done
}
CLEAN_TESTFILES_AND_TESTDIR()
{
# skip rm if srcdir is same as destdir
# this occurs when build/test performed in source dir and
# make cp fail
SDIR=`$DIRNAME $tstfile`
INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
$RM $TESTDIR
fi
}
# Print a line-line message left justified in a field of 70 characters
# beginning with the word "Testing".
TESTING()
@@ -299,6 +313,8 @@ RUNTEST h5mkgrp_nested_lp.h5 "-lp" /one/two
RUNTEST h5mkgrp_nested_mult_p.h5 "-p" /one/two /three/four
RUNTEST h5mkgrp_nested_mult_lp.h5 "-lp" /one/two /three/four
# Clean up temporary files/directories
CLEAN_TESTFILES_AND_TESTDIR
if test $nerrors -eq 0 ; then
echo "All $TESTNAME tests passed."

View File

@@ -69,6 +69,8 @@ $ ccc H5IntType.cxx
$ lib/insert/object hdf5_cplus H5IntType
$ ccc H5Library.cxx
$ lib/insert/object hdf5_cplus H5Library
$ ccc H5Location.cxx
$ lib/insert/object hdf5_cplus H5Location
$ ccc H5Object.cxx
$ lib/insert/object hdf5_cplus H5Object
$ ccc H5PredType.cxx

View File

@@ -511,7 +511,7 @@
#define H5_PACKAGE_NAME "HDF5"
/* Define to the full name and version of this package. */
#define H5_PACKAGE_STRING "HDF5 1.8.12-snap20"
#define H5_PACKAGE_STRING "HDF5 1.8.12"
/* Define to the one symbol short name of this package. */
#define H5_PACKAGE_TARNAME "hdf5"
@@ -520,7 +520,7 @@
#define H5_PACKAGE_URL ""
/* Define to the version of this package. */
#define H5_PACKAGE_VERSION "1.8.12-snap20"
#define H5_PACKAGE_VERSION "1.8.12"
/* Width for printf() for type `long long' or `__int64', use `ll' */
#define H5_PRINTF_LL_WIDTH "ll"
@@ -683,7 +683,7 @@
/* #undef H5_USING_MEMCHECKER */
/* Version number of package */
#define H5_VERSION "1.8.12-snap20"
#define H5_VERSION "1.8.12"
/* Define if vsnprintf() returns the correct value for formatted strings that
don't fit into size allowed */